repo_id stringclasses 875
values | size int64 974 38.9k | file_path stringlengths 10 308 | content stringlengths 974 38.9k |
|---|---|---|---|
apache/lucene | 36,312 | lucene/spatial-extras/src/java/org/apache/lucene/spatial/prefix/tree/NumberRangePrefixTree.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.lucene.spatial.prefix.tree;
import java.text.ParseException;
import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.StringHelper;
import org.locationtech.spatial4j.context.SpatialContext;
import org.locationtech.spatial4j.context.SpatialContextFactory;
import org.locationtech.spatial4j.shape.Point;
import org.locationtech.spatial4j.shape.Rectangle;
import org.locationtech.spatial4j.shape.Shape;
import org.locationtech.spatial4j.shape.SpatialRelation;
import org.locationtech.spatial4j.shape.impl.RectangleImpl;
/**
* A SpatialPrefixTree for single-dimensional numbers and number ranges of fixed precision values
* (not floating point). Despite its name, the indexed values (and queries) need not actually be
* ranges, they can be unit instance/values.
*
* <p>Why might you use this instead of Lucene's built-in integer/long support? Here are some
* reasons with features based on code in this class, <em>or are possible based on this class but
* require a subclass to fully realize it</em>.
*
* <ul>
* <li>Index ranges, not just unit instances. This is especially useful when the requirement calls
* for a multi-valued range.
* <li>Instead of a fixed "precisionStep", this prefixTree can have a customizable number of child
* values for any prefix (up to 32768). This allows exact alignment of the prefix-tree with
* typical/expected values, which results in better performance. For example in a Date
* implementation, every month can get its own dedicated prefix, every day, etc., even though
* months vary in duration.
* <li>Arbitrary precision, like {@link java.math.BigDecimal}.
* <li>Standard Lucene integer/long indexing always indexes the full precision of those data types
* but this one is customizable.
* </ul>
*
* Unlike "normal" spatial components in this module, this special-purpose one only works with
* {@link Shape}s created by the methods on this class, not from any {@link
* org.locationtech.spatial4j.context.SpatialContext}.
*
* @see org.apache.lucene.spatial.prefix.NumberRangePrefixTreeStrategy
* @see <a href="https://issues.apache.org/jira/browse/LUCENE-5648">LUCENE-5648</a>
* @lucene.experimental
*/
public abstract class NumberRangePrefixTree extends SpatialPrefixTree {
//
// Dummy SpatialContext
//
private static final SpatialContext DUMMY_CTX;
static {
SpatialContextFactory factory = new SpatialContextFactory();
factory.geo = false;
factory.worldBounds =
new RectangleImpl(Double.NEGATIVE_INFINITY, Double.POSITIVE_INFINITY, 0L, 0L, null);
DUMMY_CTX = factory.newSpatialContext();
}
/**
* Base interface for {@link Shape}s this prefix tree supports. It extends {@link Shape}
* (Spatial4j) for compatibility with the spatial API even though it doesn't intermix with
* conventional 2D shapes.
*
* @lucene.experimental
*/
public interface NRShape extends Shape, Cloneable {
/** The result should be parseable by {@link #parseShape(String)}. */
@Override
String toString();
/**
* Returns this shape rounded to the target level. If we are already more course than the level
* then the shape is simply returned. The result may refer to internal state of the argument so
* you may want to clone it.
*/
NRShape roundToLevel(int targetLevel);
}
//
// Factory / Conversions / parsing relating to NRShapes
//
/**
* Converts the value to a unit shape. Doesn't parse strings; see {@link #parseShape(String)} for
* that. This is the reverse of {@link
* #toObject(org.apache.lucene.spatial.prefix.tree.NumberRangePrefixTree.UnitNRShape)}.
*/
public abstract UnitNRShape toUnitShape(Object value);
/**
* Returns a shape that represents the continuous range between {@code start} and {@code end}. It
* will be normalized, and so sometimes a {@link
* org.apache.lucene.spatial.prefix.tree.NumberRangePrefixTree.UnitNRShape} will be returned,
* other times a {@link
* org.apache.lucene.spatial.prefix.tree.NumberRangePrefixTree.SpanUnitsNRShape} will be.
*
* @throws IllegalArgumentException if the arguments are in the wrong order, or if either contains
* the other (yet they aren't equal).
*/
public NRShape toRangeShape(UnitNRShape startUnit, UnitNRShape endUnit) {
// note: this normalization/optimization process is actually REQUIRED based on assumptions
// elsewhere.
// Normalize start & end
startUnit =
startUnit.getShapeAtLevel(
truncateStartVals(startUnit, 0)); // chops off trailing min-vals (zeroes)
endUnit = endUnit.getShapeAtLevel(truncateEndVals(endUnit, 0)); // chops off trailing max-vals
// Optimize to just start or end if it's equivalent, e.g. April to April 1st is April 1st.
int cmp = comparePrefix(startUnit, endUnit);
if (cmp > 0) {
throw new IllegalArgumentException("Wrong order: " + startUnit + " TO " + endUnit);
}
if (cmp == 0) { // one is a prefix of the other
if (startUnit.getLevel() == endUnit.getLevel()) {
// same
return startUnit;
} else if (endUnit.getLevel() > startUnit.getLevel()) {
// e.g. April to April 1st
if (truncateStartVals(endUnit, startUnit.getLevel()) == startUnit.getLevel()) {
return endUnit;
}
} else { // minLV level > maxLV level
// e.g. April 30 to April
if (truncateEndVals(startUnit, endUnit.getLevel()) == endUnit.getLevel()) {
return startUnit;
}
}
}
return new SpanUnitsNRShape(startUnit, endUnit);
}
/**
* From lv.getLevel on up, it returns the first Level seen with val != 0. It doesn't check past
* endLevel.
*/
private int truncateStartVals(UnitNRShape lv, int endLevel) {
for (int level = lv.getLevel(); level > endLevel; level--) {
if (lv.getValAtLevel(level) != 0) return level;
}
return endLevel;
}
private int truncateEndVals(UnitNRShape lv, int endLevel) {
for (int level = lv.getLevel(); level > endLevel; level--) {
int max = getNumSubCells(lv.getShapeAtLevel(level - 1)) - 1;
if (lv.getValAtLevel(level) != max) return level;
}
return endLevel;
}
/**
* Converts a UnitNRShape shape to the corresponding type supported by this class, such as a
* Calendar/BigDecimal. This is the reverse of {@link #toUnitShape(Object)}.
*/
public abstract Object toObject(UnitNRShape shape);
/**
* A string representation of the UnitNRShape that is parse-able by {@link
* #parseUnitShape(String)}.
*/
protected abstract String toString(UnitNRShape lv);
protected static String toStringUnitRaw(UnitNRShape lv) {
StringBuilder buf = new StringBuilder(100);
buf.append('[');
for (int level = 1; level <= lv.getLevel(); level++) {
buf.append(lv.getValAtLevel(level)).append(',');
}
buf.setLength(buf.length() - 1); // chop off ','
buf.append(']');
return buf.toString();
}
/**
* Detects a range pattern and parses it, otherwise it's parsed as one shape via {@link
* #parseUnitShape(String)}. The range pattern looks like this BNF:
*
* <pre>
* '[' + parseShapeLV + ' TO ' + parseShapeLV + ']'
* </pre>
*
* It's the same thing as the toString() of the range shape, notwithstanding range optimization.
*
* @param str not null or empty
* @return not null
* @throws java.text.ParseException If there is a problem
*/
public NRShape parseShape(String str) throws ParseException {
if (str == null || str.isEmpty()) throw new IllegalArgumentException("str is null or blank");
if (str.charAt(0) == '[') {
if (str.charAt(str.length() - 1) != ']')
throw new ParseException("If starts with [ must end with ]; got " + str, str.length() - 1);
int middle = str.indexOf(" TO ");
if (middle < 0)
throw new ParseException("If starts with [ must contain ' TO '; got " + str, -1);
String leftStr = str.substring(1, middle);
String rightStr = str.substring(middle + " TO ".length(), str.length() - 1);
return toRangeShape(parseUnitShape(leftStr), parseUnitShape(rightStr));
} else if (str.charAt(0) == '{') {
throw new ParseException("Exclusive ranges not supported; got " + str, 0);
} else {
return parseUnitShape(str);
}
}
/** Parse a String to a UnitNRShape. "*" should be the full-range (level 0 shape). */
protected abstract UnitNRShape parseUnitShape(String str) throws ParseException;
//
// UnitNRShape
//
/**
* A unit value Shape implemented as a stack of numbers, one for each level in the prefix tree. It
* directly corresponds to a {@link Cell}. Spatially speaking, it's analogous to a Point but 1D
* and has some precision width.
*
* @lucene.experimental
*/
public interface UnitNRShape extends NRShape, Comparable<UnitNRShape> {
// note: formerly known as LevelledValue; thus some variables still use 'lv'
/** Get the prefix tree level, the higher the more precise. 0 means the world (universe). */
int getLevel();
/**
* Gets the value at the specified level of this unit. level must be >= 0 and <=
* getLevel().
*/
int getValAtLevel(int level);
/** Gets an ancestor at the specified level. It shares state, so you may want to clone() it. */
UnitNRShape getShapeAtLevel(int level);
@Override
UnitNRShape roundToLevel(int targetLevel);
/** Deep clone */
UnitNRShape clone();
}
/**
* Compares a to b, returning less than 0, 0, or greater than 0, if a is less than, equal to, or
* greater than b, respectively, up to their common prefix (i.e. only min(a.levels,b.levels) are
* compared).
*
* @lucene.internal
*/
protected static int comparePrefix(UnitNRShape a, UnitNRShape b) {
int minLevel = Math.min(a.getLevel(), b.getLevel());
for (int level = 1; level <= minLevel; level++) {
int diff = a.getValAtLevel(level) - b.getValAtLevel(level);
if (diff != 0) return diff;
}
return 0;
}
//
// SpanUnitsNRShape
//
/**
* A range Shape; based on a pair of {@link
* org.apache.lucene.spatial.prefix.tree.NumberRangePrefixTree.UnitNRShape}. Spatially speaking,
* it's analogous to a Rectangle but 1D. It might have been named with Range in the name but it
* may be confusing since even the {@link
* org.apache.lucene.spatial.prefix.tree.NumberRangePrefixTree.UnitNRShape} is in some sense a
* range.
*
* @lucene.experimental
*/
public class SpanUnitsNRShape implements NRShape {
private final UnitNRShape minLV, maxLV;
private final int lastLevelInCommon; // computed; not part of identity
/**
* Don't call directly; see {@link
* #toRangeShape(org.apache.lucene.spatial.prefix.tree.NumberRangePrefixTree.UnitNRShape,
* org.apache.lucene.spatial.prefix.tree.NumberRangePrefixTree.UnitNRShape)}.
*/
private SpanUnitsNRShape(UnitNRShape minLV, UnitNRShape maxLV) {
this.minLV = minLV;
this.maxLV = maxLV;
// calc lastLevelInCommon
int level = 1;
for (; level <= minLV.getLevel() && level <= maxLV.getLevel(); level++) {
if (minLV.getValAtLevel(level) != maxLV.getValAtLevel(level)) break;
}
lastLevelInCommon = level - 1;
}
@Override
public SpatialContext getContext() {
return DUMMY_CTX;
}
public UnitNRShape getMinUnit() {
return minLV;
}
public UnitNRShape getMaxUnit() {
return maxLV;
}
/** How many levels are in common between minUnit and maxUnit, not including level 0. */
private int getLevelsInCommon() {
return lastLevelInCommon;
}
@Override
public NRShape roundToLevel(int targetLevel) {
return toRangeShape(minLV.roundToLevel(targetLevel), maxLV.roundToLevel(targetLevel));
}
@Override
public SpatialRelation relate(Shape shape) {
// if (shape instanceof UnitNRShape)
// return relate((UnitNRShape)shape);
if (shape instanceof SpanUnitsNRShape) return relate((SpanUnitsNRShape) shape);
return shape.relate(this).transpose(); // probably a UnitNRShape
}
public SpatialRelation relate(SpanUnitsNRShape ext) {
// This logic somewhat mirrors RectangleImpl.relate_range()
int extMin_intMax = comparePrefix(ext.getMinUnit(), getMaxUnit());
if (extMin_intMax > 0) return SpatialRelation.DISJOINT;
int extMax_intMin = comparePrefix(ext.getMaxUnit(), getMinUnit());
if (extMax_intMin < 0) return SpatialRelation.DISJOINT;
int extMin_intMin = comparePrefix(ext.getMinUnit(), getMinUnit());
int extMax_intMax = comparePrefix(ext.getMaxUnit(), getMaxUnit());
if ((extMin_intMin > 0
|| extMin_intMin == 0 && ext.getMinUnit().getLevel() >= getMinUnit().getLevel())
&& (extMax_intMax < 0
|| extMax_intMax == 0 && ext.getMaxUnit().getLevel() >= getMaxUnit().getLevel()))
return SpatialRelation.CONTAINS;
if ((extMin_intMin < 0
|| extMin_intMin == 0 && ext.getMinUnit().getLevel() <= getMinUnit().getLevel())
&& (extMax_intMax > 0
|| extMax_intMax == 0 && ext.getMaxUnit().getLevel() <= getMaxUnit().getLevel()))
return SpatialRelation.WITHIN;
return SpatialRelation.INTERSECTS;
}
@Override
public Rectangle getBoundingBox() {
throw new UnsupportedOperationException();
}
@Override
public boolean hasArea() {
return true;
}
@Override
public double getArea(SpatialContext spatialContext) {
throw new UnsupportedOperationException();
}
@Override
public Point getCenter() {
throw new UnsupportedOperationException();
}
@Override
public Shape getBuffered(double v, SpatialContext spatialContext) {
throw new UnsupportedOperationException();
}
@Override
public boolean isEmpty() {
return false;
}
/** A deep clone. */
@Override
public SpanUnitsNRShape clone() {
return new SpanUnitsNRShape(minLV.clone(), maxLV.clone());
}
@Override
public String toString() {
return "["
+ NumberRangePrefixTree.this.toString(minLV)
+ " TO "
+ NumberRangePrefixTree.this.toString(maxLV)
+ "]";
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
SpanUnitsNRShape spanShape = (SpanUnitsNRShape) o;
if (!maxLV.equals(spanShape.maxLV)) return false;
if (!minLV.equals(spanShape.minLV)) return false;
return true;
}
@Override
public int hashCode() {
int result = minLV.hashCode();
result = 31 * result + maxLV.hashCode();
return result;
}
} // class SpanUnitsNRShape
//
// NumberRangePrefixTree
//
protected final int[] maxSubCellsByLevel;
protected final int[] termLenByLevel;
protected final int[] levelByTermLen;
protected final int maxTermLen; // how long could cell.getToken... (that is a leaf) possibly be?
protected NumberRangePrefixTree(int[] maxSubCellsByLevel) {
super(DUMMY_CTX, maxSubCellsByLevel.length);
this.maxSubCellsByLevel = maxSubCellsByLevel;
// Fill termLenByLevel
this.termLenByLevel = new int[maxLevels + 1];
termLenByLevel[0] = 0;
final int MAX_STATES = 1 << 15; // 1 bit less than 2 bytes
for (int level = 1; level <= maxLevels; level++) {
final int states = maxSubCellsByLevel[level - 1];
if (states >= MAX_STATES || states <= 1) {
throw new IllegalArgumentException(
"Max states is " + MAX_STATES + ", given " + states + " at level " + level);
}
boolean twoBytes = states >= 256;
termLenByLevel[level] = termLenByLevel[level - 1] + (twoBytes ? 2 : 1);
}
maxTermLen = termLenByLevel[maxLevels] + 1; // + 1 for leaf byte
// Fill levelByTermLen
levelByTermLen = new int[maxTermLen];
levelByTermLen[0] = 0;
for (int level = 1; level < termLenByLevel.length; level++) {
int termLen = termLenByLevel[level];
int prevTermLen = termLenByLevel[level - 1];
if (termLen - prevTermLen == 2) { // 2 byte delta
// if the term doesn't completely cover this cell then it must be a leaf of the prior.
levelByTermLen[termLen - 1] = -1; // won't be used; otherwise erroneous
levelByTermLen[termLen] = level;
} else { // 1 byte delta
assert termLen - prevTermLen == 1;
levelByTermLen[termLen] = level;
}
}
}
@Override
public String toString() {
return getClass().getSimpleName();
}
@Override
public int getLevelForDistance(double dist) {
// note: it might be useful to compute which level has a raw width (counted in
// bottom units, e.g. milliseconds), that covers the provided dist in those units?
//
// thus always use full precision. We don't do approximations in this tree/strategy.
return maxLevels;
// throw new UnsupportedOperationException("Not applicable.");
}
@Override
public double getDistanceForLevel(int level) {
// note: we could compute this... should we?
throw new UnsupportedOperationException("Not applicable.");
}
protected UnitNRShape toShape(int[] valStack, int len) {
final NRCell[] cellStack = newCellStack(len);
for (int i = 0; i < len; i++) {
cellStack[i + 1].resetCellWithCellNum(valStack[i]);
}
return cellStack[len];
}
@Override
public Cell getWorldCell() {
return newCellStack(maxLevels)[0];
}
protected NRCell[] newCellStack(int levels) {
final NRCell[] cellsByLevel = new NRCell[levels + 1];
final BytesRef term = new BytesRef(maxTermLen);
for (int level = 0; level <= levels; level++) {
cellsByLevel[level] = new NRCell(cellsByLevel, term, level);
}
return cellsByLevel;
}
@Override
public Cell readCell(BytesRef term, Cell scratch) {
if (scratch == null) scratch = getWorldCell();
// We decode level #, leaf boolean, and populate bytes by reference. We don't decode the stack.
// reverse lookup term length to the level and hence the cell
NRCell[] cellsByLevel = ((NRCell) scratch).cellsByLevel;
boolean isLeaf = term.bytes[term.offset + term.length - 1] == 0;
int lenNoLeaf = isLeaf ? term.length - 1 : term.length;
NRCell result = cellsByLevel[levelByTermLen[lenNoLeaf]];
if (cellsByLevel[0].termBuf == null)
cellsByLevel[0].termBuf = result.term.bytes; // a kluge; see cell.ensureOwnTermBytes()
result.term.bytes = term.bytes;
result.term.offset = term.offset;
result.term.length = lenNoLeaf; // technically this isn't used but may help debugging
result.reset();
if (isLeaf) result.setLeaf();
result.cellNumber = -1; // lazy decode flag
return result;
}
/** Returns the number of sub-cells beneath the given UnitNRShape. */
public int getNumSubCells(UnitNRShape lv) {
return maxSubCellsByLevel[lv.getLevel()];
}
//
// NRCell
//
/**
* Most of the PrefixTree implementation is in this one class, which is both the Cell, the
* CellIterator, and the Shape to reduce object allocation. It's implemented as a re-used
* array/stack of Cells at adjacent levels, that all have a reference back to the cell array to
* traverse. They also share a common BytesRef for the term.
*
* @lucene.internal
*/
protected class NRCell extends CellIterator implements Cell, UnitNRShape {
// Shared: (TODO put this in a new class)
final NRCell[] cellsByLevel;
final BytesRef term; // AKA the token
byte[] termBuf; // see ensureOwnTermBytes(), only for cell0
// Cell state...
final int cellLevel; // assert levelStack[cellLevel] == this
int cellNumber; // relative to parent cell. It's unused for level 0. Starts at 0.
SpatialRelation cellShapeRel;
boolean cellIsLeaf;
// CellIterator state is defined further below
NRCell(NRCell[] cellsByLevel, BytesRef term, int cellLevel) {
this.cellsByLevel = cellsByLevel;
this.term = term;
this.cellLevel = cellLevel;
this.cellNumber = cellLevel == 0 ? 0 : -1;
this.cellIsLeaf = false;
assert cellsByLevel[cellLevel] == null;
}
/**
* Ensure we own term.bytes so that it's safe to modify. We detect via a kluge in which
* cellsByLevel[0].termBuf is non-null, which is a pre-allocated for use to replace term.bytes.
*/
void ensureOwnTermBytes() {
NRCell cell0 = cellsByLevel[0];
if (cell0.termBuf == null) return; // we already own the bytes
System.arraycopy(term.bytes, term.offset, cell0.termBuf, 0, term.length);
term.bytes = cell0.termBuf;
term.offset = 0;
cell0.termBuf = null;
}
private void reset() {
this.cellIsLeaf = false;
this.cellShapeRel = null;
}
private void resetCellWithCellNum(int cellNumber) {
reset();
// update bytes
// note: see lazyInitCellNumsFromBytes() for the reverse
if (cellNumber >= 0) { // valid
ensureOwnTermBytes();
int termLen = termLenByLevel[getLevel()];
boolean twoBytes = (termLen - termLenByLevel[getLevel() - 1]) > 1;
if (twoBytes) {
// right 7 bits, plus 1 (may overflow to 8th bit which is okay)
term.bytes[termLen - 2] = (byte) (cellNumber >> 7);
term.bytes[termLen - 1] = (byte) ((cellNumber & 0x7F) + 1);
} else {
term.bytes[termLen - 1] = (byte) (cellNumber + 1);
}
assert term.bytes[termLen - 1] != 0;
term.length = termLen;
}
this.cellNumber = cellNumber;
}
private void ensureDecoded() {
if (cellNumber >= 0) return;
// Decode cell numbers from bytes. This is the inverse of resetCellWithCellNum().
for (int level = 1; level <= getLevel(); level++) {
NRCell cell = cellsByLevel[level];
int termLen = termLenByLevel[level];
boolean twoBytes = (termLen - termLenByLevel[level - 1]) > 1;
if (twoBytes) {
int byteH = (term.bytes[term.offset + termLen - 2] & 0xFF);
int byteL = (term.bytes[term.offset + termLen - 1] & 0xFF);
assert byteL - 1 < (1 << 7);
cell.cellNumber = (byteH << 7) + (byteL - 1);
assert cell.cellNumber < 1 << 15;
} else {
cell.cellNumber = (term.bytes[term.offset + termLen - 1] & 0xFF) - 1;
assert cell.cellNumber < 255;
}
cell.assertDecoded();
}
}
private void assertDecoded() {
assert cellNumber >= 0 : "Illegal state; ensureDecoded() wasn't called";
}
@Override // for Cell & for UnitNRShape
public int getLevel() {
return cellLevel;
}
@Override
public SpatialRelation getShapeRel() {
return cellShapeRel;
}
@Override
public void setShapeRel(SpatialRelation rel) {
cellShapeRel = rel;
}
@Override
public boolean isLeaf() {
return cellIsLeaf;
}
@Override
public void setLeaf() {
cellIsLeaf = true;
}
@Override
public UnitNRShape getShape() {
ensureDecoded();
return this;
}
@Override
public BytesRef getTokenBytesNoLeaf(BytesRef result) {
if (result == null) result = new BytesRef();
result.bytes = term.bytes;
result.offset = term.offset;
result.length = termLenByLevel[cellLevel];
assert result.length <= term.length;
return result;
}
@Override
public BytesRef getTokenBytesWithLeaf(BytesRef result) {
ensureOwnTermBytes(); // normally shouldn't do anything
result = getTokenBytesNoLeaf(result);
if (isLeaf()) {
result.bytes[result.length++] = 0;
}
return result;
}
@Override
public boolean isPrefixOf(Cell c) {
NRCell otherCell = (NRCell) c;
assert term != otherCell.term;
// trick to re-use bytesref; provided that we re-instate it
int myLastLen = term.length;
term.length = termLenByLevel[getLevel()];
int otherLastLen = otherCell.term.length;
otherCell.term.length = termLenByLevel[otherCell.getLevel()];
boolean answer = StringHelper.startsWith(otherCell.term, term);
term.length = myLastLen;
otherCell.term.length = otherLastLen;
return answer;
}
@Override
public int compareToNoLeaf(Cell fromCell) {
final NRCell nrCell = (NRCell) fromCell;
assert term != nrCell.term;
// trick to re-use bytesref; provided that we re-instate it
int myLastLen = term.length;
int otherLastLen = nrCell.term.length;
term.length = termLenByLevel[getLevel()];
nrCell.term.length = termLenByLevel[nrCell.getLevel()];
int answer = term.compareTo(nrCell.term);
term.length = myLastLen;
nrCell.term.length = otherLastLen;
return answer;
}
@Override
public CellIterator getNextLevelCells(Shape shapeFilter) {
ensureDecoded();
NRCell subCell = cellsByLevel[cellLevel + 1];
subCell.initIter(shapeFilter);
return subCell;
}
// ----------- CellIterator
Shape iterFilter; // UnitNRShape or NRShape
boolean iterFirstIsIntersects;
boolean iterLastIsIntersects;
int iterFirstCellNumber;
int iterLastCellNumber;
private void initIter(Shape filter) {
cellNumber = -1;
if (filter instanceof UnitNRShape && ((UnitNRShape) filter).getLevel() == 0)
filter = null; // world means everything -- no filter
iterFilter = filter;
NRCell parent = getShapeAtLevel(getLevel() - 1);
// Initialize iter* members.
// no filter means all subcells
if (filter == null) {
iterFirstCellNumber = 0;
iterFirstIsIntersects = false;
iterLastCellNumber = getNumSubCells(parent) - 1;
iterLastIsIntersects = false;
return;
}
final UnitNRShape minLV;
final UnitNRShape maxLV;
final int lastLevelInCommon; // between minLV & maxLV
if (filter instanceof SpanUnitsNRShape) {
SpanUnitsNRShape spanShape = (SpanUnitsNRShape) iterFilter;
minLV = spanShape.getMinUnit();
maxLV = spanShape.getMaxUnit();
lastLevelInCommon = spanShape.getLevelsInCommon();
} else {
minLV = (UnitNRShape) iterFilter;
maxLV = minLV;
lastLevelInCommon = minLV.getLevel();
}
// fast path optimization that is usually true, but never first level
if (iterFilter == parent.iterFilter
&& (getLevel() <= lastLevelInCommon
|| parent.iterFirstCellNumber != parent.iterLastCellNumber)) {
// TODO benchmark if this optimization pays off. We avoid two comparePrefixLV calls.
if (parent.iterFirstIsIntersects
&& parent.cellNumber == parent.iterFirstCellNumber
&& minLV.getLevel() >= getLevel()) {
iterFirstCellNumber = minLV.getValAtLevel(getLevel());
iterFirstIsIntersects = (minLV.getLevel() > getLevel());
} else {
iterFirstCellNumber = 0;
iterFirstIsIntersects = false;
}
if (parent.iterLastIsIntersects
&& parent.cellNumber == parent.iterLastCellNumber
&& maxLV.getLevel() >= getLevel()) {
iterLastCellNumber = maxLV.getValAtLevel(getLevel());
iterLastIsIntersects = (maxLV.getLevel() > getLevel());
} else {
iterLastCellNumber = getNumSubCells(parent) - 1;
iterLastIsIntersects = false;
}
if (iterFirstCellNumber == iterLastCellNumber) {
if (iterLastIsIntersects) iterFirstIsIntersects = true;
else if (iterFirstIsIntersects) iterLastIsIntersects = true;
}
return;
}
// not common to get here, except for level 1 which always happens
int startCmp = comparePrefix(minLV, parent);
if (startCmp > 0) { // start comes after this cell
iterFirstCellNumber = 0;
iterFirstIsIntersects = false;
iterLastCellNumber = -1; // so ends early (no cells)
iterLastIsIntersects = false;
return;
}
int endCmp = comparePrefix(maxLV, parent); // compare to end cell
if (endCmp < 0) { // end comes before this cell
iterFirstCellNumber = 0;
iterFirstIsIntersects = false;
iterLastCellNumber = -1; // so ends early (no cells)
iterLastIsIntersects = false;
return;
}
if (startCmp < 0 || minLV.getLevel() < getLevel()) {
// start comes before...
iterFirstCellNumber = 0;
iterFirstIsIntersects = false;
} else {
iterFirstCellNumber = minLV.getValAtLevel(getLevel());
iterFirstIsIntersects = (minLV.getLevel() > getLevel());
}
if (endCmp > 0 || maxLV.getLevel() < getLevel()) {
// end comes after...
iterLastCellNumber = getNumSubCells(parent) - 1;
iterLastIsIntersects = false;
} else {
iterLastCellNumber = maxLV.getValAtLevel(getLevel());
iterLastIsIntersects = (maxLV.getLevel() > getLevel());
}
if (iterFirstCellNumber == iterLastCellNumber) {
if (iterLastIsIntersects) iterFirstIsIntersects = true;
else if (iterFirstIsIntersects) iterLastIsIntersects = true;
}
}
@Override
public boolean hasNext() {
thisCell = null;
if (nextCell != null) // calling hasNext twice in a row
return true;
if (cellNumber >= iterLastCellNumber) return false;
resetCellWithCellNum(cellNumber < iterFirstCellNumber ? iterFirstCellNumber : cellNumber + 1);
boolean hasChildren =
(cellNumber == iterFirstCellNumber && iterFirstIsIntersects)
|| (cellNumber == iterLastCellNumber && iterLastIsIntersects);
if (!hasChildren) {
setLeaf();
setShapeRel(SpatialRelation.WITHIN);
} else if (iterFirstCellNumber == iterLastCellNumber) {
setShapeRel(SpatialRelation.CONTAINS);
} else {
setShapeRel(SpatialRelation.INTERSECTS);
}
nextCell = this;
return true;
}
// TODO override nextFrom to be more efficient
// ----------- UnitNRShape
@Override
public int getValAtLevel(int level) {
final int result = cellsByLevel[level].cellNumber;
assert result >= 0; // initialized (decoded)
return result;
}
@Override
public NRCell getShapeAtLevel(int level) {
assert level <= cellLevel;
return cellsByLevel[level];
}
@Override
public UnitNRShape roundToLevel(int targetLevel) {
if (getLevel() <= targetLevel) {
return this;
} else {
return getShapeAtLevel(targetLevel);
}
}
@Override
public SpatialRelation relate(Shape shape) {
assertDecoded();
if (shape == iterFilter && cellShapeRel != null) return cellShapeRel;
if (shape instanceof UnitNRShape) return relate((UnitNRShape) shape);
if (shape instanceof SpanUnitsNRShape) return relate((SpanUnitsNRShape) shape);
return shape.relate(this).transpose();
}
public SpatialRelation relate(UnitNRShape lv) {
assertDecoded();
int cmp = comparePrefix(this, lv);
if (cmp != 0) return SpatialRelation.DISJOINT;
if (getLevel() > lv.getLevel()) return SpatialRelation.WITHIN;
return SpatialRelation.CONTAINS; // or equals
// no INTERSECTS; that won't happen.
}
public SpatialRelation relate(SpanUnitsNRShape spanShape) {
assertDecoded();
int startCmp = comparePrefix(spanShape.getMinUnit(), this);
if (startCmp > 0) { // start comes after this cell
return SpatialRelation.DISJOINT;
}
int endCmp = comparePrefix(spanShape.getMaxUnit(), this);
if (endCmp < 0) { // end comes before this cell
return SpatialRelation.DISJOINT;
}
int nrMinLevel = spanShape.getMinUnit().getLevel();
int nrMaxLevel = spanShape.getMaxUnit().getLevel();
if ((startCmp < 0 || startCmp == 0 && nrMinLevel <= getLevel())
&& (endCmp > 0 || endCmp == 0 && nrMaxLevel <= getLevel()))
return SpatialRelation.WITHIN; // or equals
// At this point it's Contains or Within.
if (startCmp != 0 || endCmp != 0) return SpatialRelation.INTERSECTS;
// if min or max Level is less, it might be on the equivalent edge.
for (; nrMinLevel < getLevel(); nrMinLevel++) {
if (getValAtLevel(nrMinLevel + 1) != 0) return SpatialRelation.INTERSECTS;
}
for (; nrMaxLevel < getLevel(); nrMaxLevel++) {
if (getValAtLevel(nrMaxLevel + 1) != getNumSubCells(getShapeAtLevel(nrMaxLevel)) - 1)
return SpatialRelation.INTERSECTS;
}
return SpatialRelation.CONTAINS;
}
@Override
public UnitNRShape clone() {
// no leaf distinction; this is purely based on UnitNRShape
NRCell cell = (NRCell) readCell(getTokenBytesNoLeaf(null), null);
cell.ensureOwnTermBytes();
return cell.getShape();
}
@Override
public int compareTo(UnitNRShape o) {
assertDecoded();
// no leaf distinction; this is purely based on UnitNRShape
int cmp = comparePrefix(this, o);
if (cmp != 0) {
return cmp;
} else {
return getLevel() - o.getLevel();
}
}
@Override
public Rectangle getBoundingBox() {
throw new UnsupportedOperationException();
}
@Override
public boolean hasArea() {
return true;
}
@Override
public double getArea(SpatialContext ctx) {
throw new UnsupportedOperationException();
}
@Override
public Point getCenter() {
throw new UnsupportedOperationException();
}
@Override
public Shape getBuffered(double distance, SpatialContext ctx) {
throw new UnsupportedOperationException();
}
@Override
public boolean isEmpty() {
return false;
}
// ------- Object
@Override
public boolean equals(Object obj) {
if (!(obj instanceof NRCell)) {
return false;
}
if (this == obj) return true;
NRCell nrCell = (NRCell) obj;
assert term != nrCell.term;
if (getLevel() != nrCell.getLevel()) return false;
// trick to re-use bytesref; provided that we re-instate it
int myLastLen = term.length;
int otherLastLen = nrCell.term.length;
boolean answer = getTokenBytesNoLeaf(term).equals(nrCell.getTokenBytesNoLeaf(nrCell.term));
term.length = myLastLen;
nrCell.term.length = otherLastLen;
return answer;
}
@Override
public SpatialContext getContext() {
return DUMMY_CTX;
}
@Override
public int hashCode() {
// trick to re-use bytesref; provided that we re-instate it
int myLastLen = term.length;
int result = getTokenBytesNoLeaf(term).hashCode();
term.length = myLastLen;
return result;
}
@Override
public String toString() {
return NumberRangePrefixTree.this.toString(getShape());
}
/** Configure your IDE to use this. */
public String toStringDebug() {
String pretty = toString();
if (getLevel() == 0) return pretty;
return toStringUnitRaw(this) + (isLeaf() ? "•" : "") + " " + pretty;
}
} // END OF NRCell
}
|
apache/druid | 36,249 | server/src/main/java/org/apache/druid/server/http/DataSourcesResource.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.druid.server.http;
import com.google.common.base.Throwables;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.Iterables;
import com.google.common.util.concurrent.ListenableFuture;
import com.google.inject.Inject;
import com.sun.jersey.spi.container.ResourceFilters;
import it.unimi.dsi.fastutil.objects.Object2LongMap;
import org.apache.commons.lang3.StringUtils;
import org.apache.druid.audit.AuditEntry;
import org.apache.druid.audit.AuditManager;
import org.apache.druid.client.CoordinatorServerView;
import org.apache.druid.client.DataSourcesSnapshot;
import org.apache.druid.client.DruidDataSource;
import org.apache.druid.client.DruidServer;
import org.apache.druid.client.ImmutableDruidDataSource;
import org.apache.druid.client.ImmutableSegmentLoadInfo;
import org.apache.druid.client.SegmentLoadInfo;
import org.apache.druid.common.guava.FutureUtils;
import org.apache.druid.error.DruidException;
import org.apache.druid.guice.annotations.PublicApi;
import org.apache.druid.java.util.common.DateTimes;
import org.apache.druid.java.util.common.Intervals;
import org.apache.druid.java.util.common.MapUtils;
import org.apache.druid.java.util.common.Pair;
import org.apache.druid.java.util.common.guava.Comparators;
import org.apache.druid.java.util.common.guava.FunctionalIterable;
import org.apache.druid.java.util.common.logger.Logger;
import org.apache.druid.metadata.MetadataRuleManager;
import org.apache.druid.metadata.SegmentsMetadataManager;
import org.apache.druid.query.SegmentDescriptor;
import org.apache.druid.query.TableDataSource;
import org.apache.druid.rpc.HttpResponseException;
import org.apache.druid.rpc.indexing.OverlordClient;
import org.apache.druid.rpc.indexing.SegmentUpdateResponse;
import org.apache.druid.server.coordination.DruidServerMetadata;
import org.apache.druid.server.coordinator.DruidCoordinator;
import org.apache.druid.server.coordinator.rules.LoadRule;
import org.apache.druid.server.coordinator.rules.Rule;
import org.apache.druid.server.http.security.DatasourceResourceFilter;
import org.apache.druid.server.security.AuthorizationUtils;
import org.apache.druid.server.security.AuthorizerMapper;
import org.apache.druid.timeline.DataSegment;
import org.apache.druid.timeline.SegmentId;
import org.apache.druid.timeline.TimelineLookup;
import org.apache.druid.timeline.TimelineObjectHolder;
import org.apache.druid.timeline.VersionedIntervalTimeline;
import org.apache.druid.timeline.partition.PartitionChunk;
import org.jboss.netty.handler.codec.http.HttpResponseStatus;
import org.joda.time.DateTime;
import org.joda.time.Interval;
import javax.annotation.Nullable;
import javax.servlet.http.HttpServletRequest;
import javax.ws.rs.Consumes;
import javax.ws.rs.DELETE;
import javax.ws.rs.GET;
import javax.ws.rs.POST;
import javax.ws.rs.Path;
import javax.ws.rs.PathParam;
import javax.ws.rs.Produces;
import javax.ws.rs.QueryParam;
import javax.ws.rs.core.Context;
import javax.ws.rs.core.MediaType;
import javax.ws.rs.core.Response;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Comparator;
import java.util.EnumMap;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.Set;
import java.util.SortedMap;
import java.util.TreeMap;
import java.util.TreeSet;
import java.util.function.Predicate;
import java.util.stream.Collectors;
/**
*
*/
@Path("/druid/coordinator/v1/datasources")
public class DataSourcesResource
{
private static final Logger log = new Logger(DataSourcesResource.class);
/**
* Default offset of 14 days.
*/
private static final long DEFAULT_LOADSTATUS_INTERVAL_OFFSET = 14 * 24 * 60 * 60 * 1000;
private final CoordinatorServerView serverInventoryView;
private final SegmentsMetadataManager segmentsMetadataManager;
private final MetadataRuleManager metadataRuleManager;
private final OverlordClient overlordClient;
private final AuthorizerMapper authorizerMapper;
private final DruidCoordinator coordinator;
private final AuditManager auditManager;
@Inject
public DataSourcesResource(
CoordinatorServerView serverInventoryView,
SegmentsMetadataManager segmentsMetadataManager,
MetadataRuleManager metadataRuleManager,
OverlordClient overlordClient,
AuthorizerMapper authorizerMapper,
DruidCoordinator coordinator,
AuditManager auditManager
)
{
this.serverInventoryView = serverInventoryView;
this.segmentsMetadataManager = segmentsMetadataManager;
this.metadataRuleManager = metadataRuleManager;
this.overlordClient = overlordClient;
this.authorizerMapper = authorizerMapper;
this.coordinator = coordinator;
this.auditManager = auditManager;
}
@GET
@Produces(MediaType.APPLICATION_JSON)
public Response getQueryableDataSources(
@QueryParam("full") @Nullable String full,
@QueryParam("simple") @Nullable String simple,
@Context final HttpServletRequest req
)
{
Response.ResponseBuilder builder = Response.ok();
final Set<ImmutableDruidDataSource> datasources =
InventoryViewUtils.getSecuredDataSources(req, serverInventoryView, authorizerMapper);
final Object entity;
if (full != null) {
entity = datasources;
} else if (simple != null) {
entity = datasources.stream().map(this::makeSimpleDatasource).collect(Collectors.toList());
} else {
entity = datasources.stream().map(ImmutableDruidDataSource::getName).collect(Collectors.toList());
}
return builder.entity(entity).build();
}
@GET
@Path("/{dataSourceName}")
@Produces(MediaType.APPLICATION_JSON)
@ResourceFilters(DatasourceResourceFilter.class)
public Response getQueryableDataSource(
@PathParam("dataSourceName") final String dataSourceName,
@QueryParam("full") final String full
)
{
final ImmutableDruidDataSource dataSource = getQueryableDataSource(dataSourceName);
if (dataSource == null) {
return logAndCreateDataSourceNotFoundResponse(dataSourceName);
}
if (full != null) {
return Response.ok(dataSource).build();
}
return Response.ok(getSimpleDatasource(dataSourceName)).build();
}
private interface RemoteSegmentUpdateOperation
{
ListenableFuture<SegmentUpdateResponse> perform();
}
/**
* @deprecated Use {@code OverlordDataSourcesResource#markAllNonOvershadowedSegmentsAsUsed} instead.
*/
@Deprecated
@POST
@Path("/{dataSourceName}")
@Consumes(MediaType.APPLICATION_JSON)
@Produces(MediaType.APPLICATION_JSON)
@ResourceFilters(DatasourceResourceFilter.class)
public Response markAsUsedAllNonOvershadowedSegments(@PathParam("dataSourceName") final String dataSourceName)
{
RemoteSegmentUpdateOperation remoteOperation = () -> overlordClient
.markNonOvershadowedSegmentsAsUsed(dataSourceName);
return updateSegmentsViaOverlord(dataSourceName, remoteOperation);
}
/**
* @deprecated Use {@code OverlordDataSourcesResource#markNonOvershadowedSegmentsAsUsed} instead.
*/
@Deprecated
@POST
@Path("/{dataSourceName}/markUsed")
@Produces(MediaType.APPLICATION_JSON)
@ResourceFilters(DatasourceResourceFilter.class)
public Response markAsUsedNonOvershadowedSegments(
@PathParam("dataSourceName") final String dataSourceName,
final SegmentsToUpdateFilter payload
)
{
if (payload == null || !payload.isValid()) {
return Response
.status(Response.Status.BAD_REQUEST)
.entity(SegmentsToUpdateFilter.INVALID_PAYLOAD_ERROR_MESSAGE)
.build();
} else {
RemoteSegmentUpdateOperation remoteOperation
= () -> overlordClient.markNonOvershadowedSegmentsAsUsed(dataSourceName, payload);
return updateSegmentsViaOverlord(dataSourceName, remoteOperation);
}
}
/**
* @deprecated Use {@code OverlordDataSourcesResource#markSegmentsAsUnused} instead.
*/
@Deprecated
@POST
@Path("/{dataSourceName}/markUnused")
@ResourceFilters(DatasourceResourceFilter.class)
@Produces(MediaType.APPLICATION_JSON)
@Consumes(MediaType.APPLICATION_JSON)
public Response markSegmentsAsUnused(
@PathParam("dataSourceName") final String dataSourceName,
final SegmentsToUpdateFilter payload,
@Context final HttpServletRequest req
)
{
if (payload == null || !payload.isValid()) {
return Response
.status(Response.Status.BAD_REQUEST)
.entity(SegmentsToUpdateFilter.INVALID_PAYLOAD_ERROR_MESSAGE)
.build();
} else {
RemoteSegmentUpdateOperation remoteOperation
= () -> overlordClient.markSegmentsAsUnused(dataSourceName, payload);
return updateSegmentsViaOverlord(dataSourceName, remoteOperation);
}
}
private static Response logAndCreateDataSourceNotFoundResponse(String dataSourceName)
{
log.warn("datasource[%s] not found", dataSourceName);
return Response.noContent().build();
}
private static Response updateSegmentsViaOverlord(
String dataSourceName,
RemoteSegmentUpdateOperation operation
)
{
try {
SegmentUpdateResponse response = FutureUtils.getUnchecked(operation.perform(), true);
return Response.ok(response).build();
}
catch (DruidException e) {
return ServletResourceUtils.buildErrorResponseFrom(e);
}
catch (Exception e) {
final Throwable rootCause = Throwables.getRootCause(e);
if (rootCause instanceof HttpResponseException) {
HttpResponseStatus status = ((HttpResponseException) rootCause).getResponse().getStatus();
if (status.getCode() == 404) {
final String errorMessage = "Could not update segments since Overlord is on an older version.";
log.error(errorMessage);
return ServletResourceUtils.buildErrorResponseFrom(
DruidException.forPersona(DruidException.Persona.OPERATOR)
.ofCategory(DruidException.Category.NOT_FOUND)
.build(errorMessage)
);
}
}
log.error(e, "Error occurred while updating segments for datasource[%s].", dataSourceName);
return Response
.serverError()
.entity(Map.of("error", "Unknown server error", "message", rootCause.toString()))
.build();
}
}
/**
* @deprecated Use {@code OverlordDataSourcesResource#markAllSegmentsAsUnused}
* or {@link #killUnusedSegmentsInInterval} instead.
*/
@DELETE
@Deprecated
@Path("/{dataSourceName}")
@ResourceFilters(DatasourceResourceFilter.class)
@Produces(MediaType.APPLICATION_JSON)
public Response markAsUnusedAllSegmentsOrKillUnusedSegmentsInInterval(
@PathParam("dataSourceName") final String dataSourceName,
@QueryParam("kill") final String kill,
@QueryParam("interval") final String interval,
@Context HttpServletRequest req
)
{
if (Boolean.parseBoolean(kill)) {
return killUnusedSegmentsInInterval(dataSourceName, interval, req);
} else {
RemoteSegmentUpdateOperation remoteOperation
= () -> overlordClient.markSegmentsAsUnused(dataSourceName);
return updateSegmentsViaOverlord(dataSourceName, remoteOperation);
}
}
@DELETE
@Path("/{dataSourceName}/intervals/{interval}")
@ResourceFilters(DatasourceResourceFilter.class)
@Produces(MediaType.APPLICATION_JSON)
public Response killUnusedSegmentsInInterval(
@PathParam("dataSourceName") final String dataSourceName,
@PathParam("interval") final String interval,
@Context final HttpServletRequest req
)
{
if (StringUtils.contains(interval, '_')) {
log.warn("Use interval with '/', not '_': [%s] given", interval);
}
final Interval theInterval = Intervals.of(interval.replace('_', '/'));
try {
final String killTaskId = FutureUtils.getUnchecked(
overlordClient.runKillTask("api-issued", dataSourceName, theInterval, null, null, null),
true
);
auditManager.doAudit(
AuditEntry.builder()
.key(dataSourceName)
.type("segment.kill")
.payload(ImmutableMap.of("killTaskId", killTaskId, "interval", theInterval))
.auditInfo(AuthorizationUtils.buildAuditInfo(req))
.request(AuthorizationUtils.buildRequestInfo("coordinator", req))
.build()
);
return Response.ok().build();
}
catch (Exception e) {
return Response
.serverError()
.entity(
ImmutableMap.of(
"error", "Exception occurred. Are you sure you have an indexing service?",
"message", e.toString()
)
)
.build();
}
}
@GET
@Path("/{dataSourceName}/intervals")
@Produces(MediaType.APPLICATION_JSON)
@ResourceFilters(DatasourceResourceFilter.class)
public Response getIntervalsWithServedSegmentsOrAllServedSegmentsPerIntervals(
@PathParam("dataSourceName") String dataSourceName,
@QueryParam("simple") String simple,
@QueryParam("full") String full
)
{
if (simple == null && full == null) {
final ImmutableDruidDataSource dataSource = getQueryableDataSource(dataSourceName);
if (dataSource == null) {
return logAndCreateDataSourceNotFoundResponse(dataSourceName);
}
final Comparator<Interval> comparator = Comparators.intervalsByStartThenEnd().reversed();
Set<Interval> intervals = new TreeSet<>(comparator);
dataSource.getSegments().forEach(segment -> intervals.add(segment.getInterval()));
return Response.ok(intervals).build();
} else {
return getServedSegmentsInInterval(dataSourceName, full != null, interval -> true);
}
}
@GET
@Path("/{dataSourceName}/intervals/{interval}")
@Produces(MediaType.APPLICATION_JSON)
@ResourceFilters(DatasourceResourceFilter.class)
public Response getServedSegmentsInInterval(
@PathParam("dataSourceName") String dataSourceName,
@PathParam("interval") String interval,
@QueryParam("simple") String simple,
@QueryParam("full") String full
)
{
final Interval theInterval = Intervals.of(interval.replace('_', '/'));
if (simple == null && full == null) {
final ImmutableDruidDataSource dataSource = getQueryableDataSource(dataSourceName);
if (dataSource == null) {
return logAndCreateDataSourceNotFoundResponse(dataSourceName);
}
final Set<SegmentId> segmentIds = new TreeSet<>();
for (DataSegment dataSegment : dataSource.getSegments()) {
if (theInterval.contains(dataSegment.getInterval())) {
segmentIds.add(dataSegment.getId());
}
}
return Response.ok(segmentIds).build();
}
return getServedSegmentsInInterval(dataSourceName, full != null, theInterval::contains);
}
@GET
@Path("/{dataSourceName}/loadstatus")
@Produces(MediaType.APPLICATION_JSON)
@ResourceFilters(DatasourceResourceFilter.class)
public Response getDatasourceLoadstatus(
@PathParam("dataSourceName") String dataSourceName,
@QueryParam("forceMetadataRefresh") final Boolean forceMetadataRefresh,
@QueryParam("interval") @Nullable final String interval,
@QueryParam("simple") @Nullable final String simple,
@QueryParam("full") @Nullable final String full,
@QueryParam("computeUsingClusterView") @Nullable String computeUsingClusterView
)
{
if (forceMetadataRefresh == null) {
return Response
.status(Response.Status.BAD_REQUEST)
.entity("Invalid request. forceMetadataRefresh must be specified")
.build();
}
final Interval theInterval;
if (interval == null) {
long currentTimeInMs = System.currentTimeMillis();
theInterval = Intervals.utc(currentTimeInMs - DEFAULT_LOADSTATUS_INTERVAL_OFFSET, currentTimeInMs);
} else {
theInterval = Intervals.of(interval.replace('_', '/'));
}
final DataSourcesSnapshot snapshot;
if (forceMetadataRefresh) {
snapshot = segmentsMetadataManager.forceUpdateDataSourcesSnapshot();
} else {
snapshot = segmentsMetadataManager.getRecentDataSourcesSnapshot();
}
final ImmutableDruidDataSource immutableDruidDataSource = snapshot.getDataSource(dataSourceName);
if (immutableDruidDataSource == null) {
return logAndCreateDataSourceNotFoundResponse(dataSourceName);
}
final Set<DataSegment> segments = snapshot.getAllUsedNonOvershadowedSegments(dataSourceName, theInterval);
if (segments.isEmpty()) {
return Response
.status(Response.Status.NO_CONTENT)
.entity("No used segment found for the given datasource and interval")
.build();
}
if (simple != null) {
// Calculate response for simple mode
SegmentsLoadStatistics segmentsLoadStatistics = computeSegmentLoadStatistics(segments);
return Response.ok(
ImmutableMap.of(
dataSourceName,
segmentsLoadStatistics.getNumUnavailableSegments()
)
).build();
} else if (full != null) {
// Calculate response for full mode
Map<String, Object2LongMap<String>> segmentLoadMap =
coordinator.getTierToDatasourceToUnderReplicatedCount(segments, computeUsingClusterView != null);
if (segmentLoadMap.isEmpty()) {
return Response.serverError()
.entity("Coordinator segment replicant lookup is not initialized yet. Try again later.")
.build();
}
return Response.ok(segmentLoadMap).build();
} else {
// Calculate response for default mode
SegmentsLoadStatistics segmentsLoadStatistics = computeSegmentLoadStatistics(segments);
return Response.ok(
ImmutableMap.of(
dataSourceName,
100 * ((double) (segmentsLoadStatistics.getNumLoadedSegments())
/ (double) segmentsLoadStatistics.getNumPublishedSegments())
)
).build();
}
}
private SegmentsLoadStatistics computeSegmentLoadStatistics(Iterable<DataSegment> segments)
{
Map<SegmentId, SegmentLoadInfo> segmentLoadInfos = serverInventoryView.getLoadInfoForAllSegments();
int numPublishedSegments = 0;
int numUnavailableSegments = 0;
int numLoadedSegments = 0;
for (DataSegment segment : segments) {
numPublishedSegments++;
if (!segmentLoadInfos.containsKey(segment.getId())) {
numUnavailableSegments++;
} else {
numLoadedSegments++;
}
}
return new SegmentsLoadStatistics(numPublishedSegments, numUnavailableSegments, numLoadedSegments);
}
private static class SegmentsLoadStatistics
{
private final int numPublishedSegments;
private final int numUnavailableSegments;
private final int numLoadedSegments;
SegmentsLoadStatistics(
int numPublishedSegments,
int numUnavailableSegments,
int numLoadedSegments
)
{
this.numPublishedSegments = numPublishedSegments;
this.numUnavailableSegments = numUnavailableSegments;
this.numLoadedSegments = numLoadedSegments;
}
public int getNumPublishedSegments()
{
return numPublishedSegments;
}
public int getNumUnavailableSegments()
{
return numUnavailableSegments;
}
public int getNumLoadedSegments()
{
return numLoadedSegments;
}
}
/**
* The property names belong to the public HTTP JSON API.
*/
@PublicApi
enum SimpleProperties
{
size,
count
}
private Response getServedSegmentsInInterval(
String dataSourceName,
boolean full,
Predicate<Interval> intervalFilter
)
{
final ImmutableDruidDataSource dataSource = getQueryableDataSource(dataSourceName);
if (dataSource == null) {
return logAndCreateDataSourceNotFoundResponse(dataSourceName);
}
final Comparator<Interval> comparator = Comparators.intervalsByStartThenEnd().reversed();
if (full) {
final Map<Interval, Map<SegmentId, Object>> retVal = new TreeMap<>(comparator);
for (DataSegment dataSegment : dataSource.getSegments()) {
if (intervalFilter.test(dataSegment.getInterval())) {
Map<SegmentId, Object> segments = retVal.computeIfAbsent(dataSegment.getInterval(), i -> new HashMap<>());
Pair<DataSegment, Set<String>> segmentAndServers = getServersWhereSegmentIsServed(dataSegment.getId());
if (segmentAndServers != null) {
segments.put(
dataSegment.getId(),
ImmutableMap.of("metadata", segmentAndServers.lhs, "servers", segmentAndServers.rhs)
);
}
}
}
return Response.ok(retVal).build();
} else {
final Map<Interval, Map<SimpleProperties, Object>> statsPerInterval = new TreeMap<>(comparator);
for (DataSegment dataSegment : dataSource.getSegments()) {
if (intervalFilter.test(dataSegment.getInterval())) {
Map<SimpleProperties, Object> properties =
statsPerInterval.computeIfAbsent(dataSegment.getInterval(), i -> new EnumMap<>(SimpleProperties.class));
properties.merge(SimpleProperties.size, dataSegment.getSize(), (a, b) -> (Long) a + (Long) b);
properties.merge(SimpleProperties.count, 1, (a, b) -> (Integer) a + (Integer) b);
}
}
return Response.ok(statsPerInterval).build();
}
}
@GET
@Path("/{dataSourceName}/segments")
@Produces(MediaType.APPLICATION_JSON)
@ResourceFilters(DatasourceResourceFilter.class)
public Response getAllServedSegments(
@PathParam("dataSourceName") String dataSourceName,
@QueryParam("full") String full
)
{
ImmutableDruidDataSource dataSource = getQueryableDataSource(dataSourceName);
if (dataSource == null) {
return logAndCreateDataSourceNotFoundResponse(dataSourceName);
}
Response.ResponseBuilder builder = Response.ok();
if (full != null) {
return builder.entity(dataSource.getSegments()).build();
}
return builder.entity(Iterables.transform(dataSource.getSegments(), DataSegment::getId)).build();
}
@GET
@Path("/{dataSourceName}/segments/{segmentId}")
@Produces(MediaType.APPLICATION_JSON)
@ResourceFilters(DatasourceResourceFilter.class)
public Response getServedSegment(
@PathParam("dataSourceName") String dataSourceName,
@PathParam("segmentId") String segmentId
)
{
ImmutableDruidDataSource dataSource = getQueryableDataSource(dataSourceName);
if (dataSource == null) {
return logAndCreateDataSourceNotFoundResponse(dataSourceName);
}
for (SegmentId possibleSegmentId : SegmentId.iteratePossibleParsingsWithDataSource(dataSourceName, segmentId)) {
Pair<DataSegment, Set<String>> retVal = getServersWhereSegmentIsServed(possibleSegmentId);
if (retVal != null) {
return Response.ok(ImmutableMap.of("metadata", retVal.lhs, "servers", retVal.rhs)).build();
}
}
log.warn("Segment id [%s] is unknown", segmentId);
return Response.noContent().build();
}
/**
* @deprecated Use {@code OverlordDataSourcesResource#markSegmentAsUnused} instead.
*/
@Deprecated
@DELETE
@Path("/{dataSourceName}/segments/{segmentId}")
@Produces(MediaType.APPLICATION_JSON)
@ResourceFilters(DatasourceResourceFilter.class)
public Response markSegmentAsUnused(
@PathParam("dataSourceName") String dataSourceName,
@PathParam("segmentId") String segmentIdString
)
{
final SegmentId segmentId = SegmentId.tryParse(dataSourceName, segmentIdString);
if (segmentId == null) {
return Response.status(Response.Status.BAD_REQUEST).entity(
org.apache.druid.java.util.common.StringUtils.format(
"Could not parse Segment ID[%s] for DataSource[%s]",
segmentIdString, dataSourceName
)
).build();
}
RemoteSegmentUpdateOperation remoteOperation
= () -> overlordClient.markSegmentAsUnused(segmentId);
return updateSegmentsViaOverlord(dataSourceName, remoteOperation);
}
/**
* @deprecated Use {@code OverlordDataSourcesResource#markSegmentAsUsed} instead.
*/
@Deprecated
@POST
@Path("/{dataSourceName}/segments/{segmentId}")
@Consumes(MediaType.APPLICATION_JSON)
@Produces(MediaType.APPLICATION_JSON)
@ResourceFilters(DatasourceResourceFilter.class)
public Response markSegmentAsUsed(
@PathParam("dataSourceName") String dataSourceName,
@PathParam("segmentId") String segmentIdString
)
{
final SegmentId segmentId = SegmentId.tryParse(dataSourceName, segmentIdString);
if (segmentId == null) {
return Response.status(Response.Status.BAD_REQUEST).entity(
org.apache.druid.java.util.common.StringUtils.format(
"Could not parse Segment ID[%s] for DataSource[%s]",
segmentIdString, dataSourceName
)
).build();
}
RemoteSegmentUpdateOperation remoteOperation
= () -> overlordClient.markSegmentAsUsed(segmentId);
return updateSegmentsViaOverlord(dataSourceName, remoteOperation);
}
@GET
@Path("/{dataSourceName}/tiers")
@Produces(MediaType.APPLICATION_JSON)
@ResourceFilters(DatasourceResourceFilter.class)
public Response getTiersWhereSegmentsAreServed(@PathParam("dataSourceName") String dataSourceName)
{
Set<String> retVal = new HashSet<>();
for (DruidServer druidServer : serverInventoryView.getInventory()) {
if (druidServer.getDataSource(dataSourceName) != null) {
retVal.add(druidServer.getTier());
}
}
return Response.ok(retVal).build();
}
@Nullable
private ImmutableDruidDataSource getQueryableDataSource(final String dataSourceName)
{
List<DruidDataSource> dataSources = serverInventoryView
.getInventory()
.stream()
.map(server -> server.getDataSource(dataSourceName))
.filter(Objects::nonNull)
.collect(Collectors.toList());
if (dataSources.isEmpty()) {
return null;
}
// Note: this logic doesn't guarantee that the result is a snapshot that ever existed in the cluster because all
// DruidDataSource objects (belonging to different servers) are independently, concurrently mutable objects.
// But this is OK because a "snapshot" hardly even makes sense in a distributed system anyway.
final SortedMap<SegmentId, DataSegment> segmentMap = new TreeMap<>();
for (DruidDataSource dataSource : dataSources) {
Iterable<DataSegment> segments = dataSource.getSegments();
for (DataSegment segment : segments) {
segmentMap.put(segment.getId(), segment);
}
}
return new ImmutableDruidDataSource(dataSourceName, Collections.emptyMap(), segmentMap);
}
@Nullable
private Pair<DataSegment, Set<String>> getServersWhereSegmentIsServed(SegmentId segmentId)
{
DataSegment theSegment = null;
Set<String> servers = new HashSet<>();
for (DruidServer druidServer : serverInventoryView.getInventory()) {
DataSegment currSegment = druidServer.getSegment(segmentId);
if (currSegment != null) {
theSegment = currSegment;
servers.add(druidServer.getHost());
}
}
if (theSegment == null) {
return null;
}
return new Pair<>(theSegment, servers);
}
private Map<String, Object> makeSimpleDatasource(ImmutableDruidDataSource input)
{
return new ImmutableMap.Builder<String, Object>()
.put("name", input.getName())
.put("properties", getSimpleDatasource(input.getName()))
.build();
}
private Map<String, Map<String, Object>> getSimpleDatasource(String dataSourceName)
{
Map<String, Object> tiers = new HashMap<>();
Map<String, Object> segments = new HashMap<>();
Map<String, Map<String, Object>> retVal = ImmutableMap.of(
"tiers", tiers,
"segments", segments
);
Set<SegmentId> totalDistinctSegments = new HashSet<>();
Map<String, HashSet<Object>> tierDistinctSegments = new HashMap<>();
long totalSegmentSize = 0;
long totalReplicatedSize = 0;
DateTime minTime = DateTimes.MAX;
DateTime maxTime = DateTimes.MIN;
String tier;
for (DruidServer druidServer : serverInventoryView.getInventory()) {
DruidDataSource druidDataSource = druidServer.getDataSource(dataSourceName);
tier = druidServer.getTier();
if (druidDataSource == null) {
continue;
}
tierDistinctSegments.computeIfAbsent(tier, t -> new HashSet<>());
long dataSourceSegmentSize = 0;
long replicatedSegmentSize = 0;
for (DataSegment dataSegment : druidDataSource.getSegments()) {
// tier segments stats
if (!tierDistinctSegments.get(tier).contains(dataSegment.getId())) {
dataSourceSegmentSize += dataSegment.getSize();
tierDistinctSegments.get(tier).add(dataSegment.getId());
}
// total segments stats
if (totalDistinctSegments.add(dataSegment.getId())) {
totalSegmentSize += dataSegment.getSize();
minTime = DateTimes.min(minTime, dataSegment.getInterval().getStart());
maxTime = DateTimes.max(maxTime, dataSegment.getInterval().getEnd());
}
totalReplicatedSize += dataSegment.getSize();
replicatedSegmentSize += dataSegment.getSize();
}
// tier stats
Map<String, Object> tierStats = (Map) tiers.get(tier);
if (tierStats == null) {
tierStats = new HashMap<>();
tiers.put(druidServer.getTier(), tierStats);
}
tierStats.put("segmentCount", tierDistinctSegments.get(tier).size());
long segmentSize = MapUtils.getLong(tierStats, "size", 0L);
tierStats.put("size", segmentSize + dataSourceSegmentSize);
long replicatedSize = MapUtils.getLong(tierStats, "replicatedSize", 0L);
tierStats.put("replicatedSize", replicatedSize + replicatedSegmentSize);
}
segments.put("count", totalDistinctSegments.size());
segments.put("size", totalSegmentSize);
segments.put("replicatedSize", totalReplicatedSize);
segments.put("minTime", minTime);
segments.put("maxTime", maxTime);
return retVal;
}
/**
* Provides serverView for a datasource and Interval which gives details about servers hosting segments for an
* interval. Used by the realtime tasks to fetch a view of the interval they are interested in.
*/
@GET
@Path("/{dataSourceName}/intervals/{interval}/serverview")
@Produces(MediaType.APPLICATION_JSON)
@ResourceFilters(DatasourceResourceFilter.class)
public Response getServedSegmentsInInterval(
@PathParam("dataSourceName") String dataSourceName,
@PathParam("interval") String interval,
@QueryParam("partial") final boolean partial
)
{
TimelineLookup<String, SegmentLoadInfo> timeline = serverInventoryView.getTimeline(
new TableDataSource(dataSourceName)
);
final Interval theInterval = Intervals.of(interval.replace('_', '/'));
if (timeline == null) {
log.debug("No timeline found for datasource[%s]", dataSourceName);
return Response.ok(new ArrayList<ImmutableSegmentLoadInfo>()).build();
}
return Response.ok(prepareServedSegmentsInInterval(timeline, theInterval)).build();
}
private Iterable<ImmutableSegmentLoadInfo> prepareServedSegmentsInInterval(
TimelineLookup<String, SegmentLoadInfo> dataSourceServingTimeline,
Interval interval
)
{
Iterable<TimelineObjectHolder<String, SegmentLoadInfo>> lookup =
dataSourceServingTimeline.lookupWithIncompletePartitions(interval);
return FunctionalIterable
.create(lookup)
.transformCat(
(TimelineObjectHolder<String, SegmentLoadInfo> input) ->
Iterables.transform(
input.getObject(),
(PartitionChunk<SegmentLoadInfo> chunk) -> chunk.getObject().toImmutableSegmentLoadInfo()
)
);
}
/**
* Used by the realtime tasks to learn whether a segment is handed off or not.
* It returns true when the segment will never be handed off or is already handed off. Otherwise, it returns false.
*/
@GET
@Path("/{dataSourceName}/handoffComplete")
@Produces(MediaType.APPLICATION_JSON)
@ResourceFilters(DatasourceResourceFilter.class)
public Response isHandOffComplete(
@PathParam("dataSourceName") String dataSourceName,
@QueryParam("interval") final String interval,
@QueryParam("partitionNumber") final int partitionNumber,
@QueryParam("version") final String version
)
{
try {
final List<Rule> rules = metadataRuleManager.getRulesWithDefault(dataSourceName);
final Interval theInterval = Intervals.of(interval);
final SegmentDescriptor descriptor = new SegmentDescriptor(theInterval, version, partitionNumber);
final DateTime now = DateTimes.nowUtc();
// A segment that is not eligible for load will never be handed off
boolean eligibleForLoad = false;
for (Rule rule : rules) {
if (rule.appliesTo(theInterval, now)) {
eligibleForLoad = rule instanceof LoadRule && ((LoadRule) rule).shouldMatchingSegmentBeLoaded();
break;
}
}
if (!eligibleForLoad) {
return Response.ok(true).build();
}
VersionedIntervalTimeline<String, SegmentLoadInfo> timeline = serverInventoryView.getTimeline(
new TableDataSource(dataSourceName)
);
if (timeline == null) {
log.error("No timeline found for datasource[%s]", dataSourceName);
return Response.ok(false).build();
}
// A segment with version lower than that of the latest chunk might never get handed off
// If there are multiple versions of this segment (due to a concurrent replace task),
// only the latest version would get handed off
List<TimelineObjectHolder<String, SegmentLoadInfo>> timelineObjects = timeline.lookup(Intervals.of(interval));
if (!timelineObjects.isEmpty() && timelineObjects.get(0).getVersion().compareTo(version) > 0) {
return Response.ok(true).build();
}
Iterable<ImmutableSegmentLoadInfo> servedSegmentsInInterval =
prepareServedSegmentsInInterval(timeline, theInterval);
if (isSegmentLoaded(servedSegmentsInInterval, descriptor)) {
return Response.ok(true).build();
}
return Response.ok(false).build();
}
catch (Exception e) {
log.error(e, "Error while handling hand off check request");
return Response.serverError().entity(ImmutableMap.of("error", e.toString())).build();
}
}
static boolean isSegmentLoaded(Iterable<ImmutableSegmentLoadInfo> servedSegments, SegmentDescriptor descriptor)
{
for (ImmutableSegmentLoadInfo segmentLoadInfo : servedSegments) {
if (segmentLoadInfo.getSegment().getInterval().contains(descriptor.getInterval())
&& segmentLoadInfo.getSegment().getShardSpec().getPartitionNum() == descriptor.getPartitionNumber()
&& segmentLoadInfo.getSegment().getVersion().compareTo(descriptor.getVersion()) >= 0
&& Iterables.any(
segmentLoadInfo.getServers(), DruidServerMetadata::isSegmentReplicationTarget
)) {
return true;
}
}
return false;
}
}
|
apache/seatunnel | 36,379 | seatunnel-e2e/seatunnel-connector-v2-e2e/connector-cdc-opengauss-e2e/src/test/java/org/apache/seatunnel/connectors/seatunnel/cdc/postgres/OpengaussCDCIT.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.seatunnel.connectors.seatunnel.cdc.postgres;
import org.apache.seatunnel.shade.com.google.common.collect.Lists;
import org.apache.seatunnel.e2e.common.TestResource;
import org.apache.seatunnel.e2e.common.TestSuiteBase;
import org.apache.seatunnel.e2e.common.container.ContainerExtendedFactory;
import org.apache.seatunnel.e2e.common.container.EngineType;
import org.apache.seatunnel.e2e.common.container.TestContainer;
import org.apache.seatunnel.e2e.common.junit.DisabledOnContainer;
import org.apache.seatunnel.e2e.common.junit.TestContainerExtension;
import org.apache.seatunnel.e2e.common.util.JobIdGenerator;
import org.awaitility.Awaitility;
import org.junit.jupiter.api.AfterAll;
import org.junit.jupiter.api.Assertions;
import org.junit.jupiter.api.BeforeAll;
import org.junit.jupiter.api.TestTemplate;
import org.testcontainers.containers.Container;
import org.testcontainers.containers.GenericContainer;
import org.testcontainers.containers.output.Slf4jLogConsumer;
import org.testcontainers.lifecycle.Startables;
import org.testcontainers.utility.DockerImageName;
import lombok.extern.slf4j.Slf4j;
import java.io.IOException;
import java.net.URL;
import java.nio.charset.StandardCharsets;
import java.nio.file.Files;
import java.nio.file.Paths;
import java.sql.Connection;
import java.sql.DriverManager;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.Statement;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.TimeUnit;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import java.util.stream.Collectors;
import java.util.stream.Stream;
import static org.awaitility.Awaitility.await;
import static org.awaitility.Awaitility.given;
import static org.junit.Assert.assertNotNull;
@Slf4j
@DisabledOnContainer(
value = {},
type = {EngineType.SPARK},
disabledReason = "Currently SPARK do not support cdc")
public class OpengaussCDCIT extends TestSuiteBase implements TestResource {
private static final int OPENGAUSS_PORT = 5432;
private static final Pattern COMMENT_PATTERN = Pattern.compile("^(.*)--.*$");
private static final String USERNAME = "gaussdb";
private static final String PASSWORD = "openGauss@123";
private static final String OPENGAUSSQL_DATABASE = "opengauss_cdc";
private static final String OPENGAUSSQL_DEFAULT_DATABASE = "postgres";
private static final String OPENGAUSS_SCHEMA = "inventory";
private static final String SOURCE_TABLE_1 = "opengauss_cdc_table_1";
private static final String SOURCE_TABLE_2 = "opengauss_cdc_table_2";
private static final String SOURCE_TABLE_3 = "opengauss_cdc_table_3";
private static final String SINK_TABLE_1 = "sink_opengauss_cdc_table_1";
private static final String SINK_TABLE_2 = "sink_opengauss_cdc_table_2";
private static final String SINK_TABLE_3 = "sink_opengauss_cdc_table_3";
private static final String SOURCE_TABLE_NO_PRIMARY_KEY = "full_types_no_primary_key";
private static final String OPENGAUSS_HOST = "opengauss_cdc_e2e";
protected static final DockerImageName OPENGAUSS_IMAGE =
DockerImageName.parse("opengauss/opengauss:5.0.0")
.asCompatibleSubstituteFor("postgres");
private static final String SOURCE_SQL_TEMPLATE = "select * from %s.%s order by id";
public static final GenericContainer<?> OPENGAUSS_CONTAINER =
new GenericContainer<>(OPENGAUSS_IMAGE)
.withNetwork(NETWORK)
.withNetworkAliases(OPENGAUSS_HOST)
.withEnv("GS_PASSWORD", PASSWORD)
.withLogConsumer(new Slf4jLogConsumer(log));
private String driverUrl() {
return "https://repo1.maven.org/maven2/org/postgresql/postgresql/42.5.1/postgresql-42.5.1.jar";
}
@TestContainerExtension
protected final ContainerExtendedFactory extendedFactory =
container -> {
Container.ExecResult extraCommands =
container.execInContainer(
"bash",
"-c",
"mkdir -p /tmp/seatunnel/plugins/JDBC/lib && cd /tmp/seatunnel/plugins/JDBC/lib && wget "
+ driverUrl());
Assertions.assertEquals(0, extraCommands.getExitCode(), extraCommands.getStderr());
};
@BeforeAll
@Override
public void startUp() throws Exception {
log.info("The second stage: Starting opengauss containers...");
OPENGAUSS_CONTAINER.setPortBindings(
Lists.newArrayList(String.format("%s:%s", OPENGAUSS_PORT, OPENGAUSS_PORT)));
Startables.deepStart(Stream.of(OPENGAUSS_CONTAINER)).join();
log.info("Opengauss Containers are started");
given().ignoreExceptions()
.await()
.atLeast(100, TimeUnit.MILLISECONDS)
.pollInterval(2, TimeUnit.SECONDS)
.atMost(2, TimeUnit.MINUTES)
.untilAsserted(this::initializeOpengaussSql);
String[] command1 = {
"/bin/sh",
"-c",
"sed -i 's/^#password_encryption_type = 2/password_encryption_type = 1/' /var/lib/opengauss/data/postgresql.conf"
};
Container.ExecResult result1 = OPENGAUSS_CONTAINER.execInContainer(command1);
Assertions.assertEquals(0, result1.getExitCode());
String[] command2 = {
"/bin/sh",
"-c",
"sed -i 's/host replication gaussdb 0.0.0.0\\/0 md5/host replication gaussdb 0.0.0.0\\/0 sha256/' /var/lib/opengauss/data/pg_hba.conf"
};
Container.ExecResult result2 = OPENGAUSS_CONTAINER.execInContainer(command2);
Assertions.assertEquals(0, result2.getExitCode());
String[] command3 = {
"/bin/sh",
"-c",
"echo \"host all dailai 0.0.0.0/0 md5\" >> /var/lib/opengauss/data/pg_hba.conf"
};
Container.ExecResult result3 = OPENGAUSS_CONTAINER.execInContainer(command3);
Assertions.assertEquals(0, result3.getExitCode());
reloadConf();
createNewUserForJdbcSink();
}
@TestTemplate
public void testOpengaussCdcCheckDataE2e(TestContainer container) {
try {
CompletableFuture.supplyAsync(
() -> {
try {
container.executeJob("/opengausscdc_to_opengauss.conf");
} catch (Exception e) {
log.error("Commit task exception :" + e.getMessage());
throw new RuntimeException(e);
}
return null;
});
await().atMost(60000, TimeUnit.MILLISECONDS)
.untilAsserted(
() -> {
Assertions.assertIterableEquals(
query(getQuerySQL(OPENGAUSS_SCHEMA, SOURCE_TABLE_1)),
query(getQuerySQL(OPENGAUSS_SCHEMA, SINK_TABLE_1)));
});
// insert update delete
upsertDeleteSourceTable(OPENGAUSS_SCHEMA, SOURCE_TABLE_1);
// stream stage
await().atMost(60000, TimeUnit.MILLISECONDS)
.untilAsserted(
() -> {
Assertions.assertIterableEquals(
query(getQuerySQL(OPENGAUSS_SCHEMA, SOURCE_TABLE_1)),
query(getQuerySQL(OPENGAUSS_SCHEMA, SINK_TABLE_1)));
});
} finally {
// Clear related content to ensure that multiple operations are not affected
clearTable(OPENGAUSS_SCHEMA, SOURCE_TABLE_1);
clearTable(OPENGAUSS_SCHEMA, SINK_TABLE_1);
}
}
@TestTemplate
@DisabledOnContainer(
value = {},
type = {EngineType.SPARK, EngineType.FLINK},
disabledReason =
"This case requires obtaining the task health status and manually canceling the canceled task, which is currently only supported by the zeta engine.")
public void testOpengaussCdcMeatadataTrans(TestContainer container)
throws InterruptedException, IOException {
try {
Long jobId = JobIdGenerator.newJobId();
CompletableFuture.supplyAsync(
() -> {
try {
container.executeJob(
"/opengausscdc_to_meatadata_trans.conf", String.valueOf(jobId));
} catch (Exception e) {
log.error("Commit task exception :" + e.getMessage());
throw new RuntimeException(e);
}
return null;
});
TimeUnit.SECONDS.sleep(10);
// insert update delete
upsertDeleteSourceTable(OPENGAUSS_SCHEMA, SOURCE_TABLE_1);
TimeUnit.SECONDS.sleep(20);
Awaitility.await()
.atMost(2, TimeUnit.MINUTES)
.untilAsserted(
() -> {
String jobStatus = container.getJobStatus(String.valueOf(jobId));
Assertions.assertEquals("RUNNING", jobStatus);
});
Container.ExecResult cancelJobResult = container.cancelJob(String.valueOf(jobId));
Assertions.assertEquals(0, cancelJobResult.getExitCode(), cancelJobResult.getStderr());
} finally {
clearTable(OPENGAUSS_SCHEMA, SOURCE_TABLE_1);
clearTable(OPENGAUSS_SCHEMA, SINK_TABLE_1);
}
}
@TestTemplate
@DisabledOnContainer(
value = {},
type = {EngineType.SPARK},
disabledReason = "Currently SPARK do not support cdc")
public void testOpengaussCdcMultiTableE2e(TestContainer container) {
try {
CompletableFuture.supplyAsync(
() -> {
try {
container.executeJob(
"/opengausscdc_to_opengauss_with_multi_table_mode_two_table.conf");
} catch (Exception e) {
log.error("Commit task exception :" + e.getMessage());
throw new RuntimeException(e);
}
return null;
});
// stream stage
await().atMost(60000, TimeUnit.MILLISECONDS)
.untilAsserted(
() ->
Assertions.assertAll(
() ->
Assertions.assertIterableEquals(
query(
getQuerySQL(
OPENGAUSS_SCHEMA,
SOURCE_TABLE_1)),
query(
getQuerySQL(
OPENGAUSS_SCHEMA,
SINK_TABLE_1))),
() ->
Assertions.assertIterableEquals(
query(
getQuerySQL(
OPENGAUSS_SCHEMA,
SOURCE_TABLE_2)),
query(
getQuerySQL(
OPENGAUSS_SCHEMA,
SINK_TABLE_2)))));
// insert update delete
upsertDeleteSourceTable(OPENGAUSS_SCHEMA, SOURCE_TABLE_1);
upsertDeleteSourceTable(OPENGAUSS_SCHEMA, SOURCE_TABLE_2);
// stream stage
await().atMost(60000, TimeUnit.MILLISECONDS)
.untilAsserted(
() ->
Assertions.assertAll(
() ->
Assertions.assertIterableEquals(
query(
getQuerySQL(
OPENGAUSS_SCHEMA,
SOURCE_TABLE_1)),
query(
getQuerySQL(
OPENGAUSS_SCHEMA,
SINK_TABLE_1))),
() ->
Assertions.assertIterableEquals(
query(
getQuerySQL(
OPENGAUSS_SCHEMA,
SOURCE_TABLE_2)),
query(
getQuerySQL(
OPENGAUSS_SCHEMA,
SINK_TABLE_2)))));
} finally {
// Clear related content to ensure that multiple operations are not affected
clearTable(OPENGAUSS_SCHEMA, SOURCE_TABLE_1);
clearTable(OPENGAUSS_SCHEMA, SINK_TABLE_1);
clearTable(OPENGAUSS_SCHEMA, SOURCE_TABLE_2);
clearTable(OPENGAUSS_SCHEMA, SINK_TABLE_2);
}
}
@TestTemplate
@DisabledOnContainer(
value = {},
type = {EngineType.SPARK, EngineType.FLINK},
disabledReason = "Currently SPARK and FLINK do not support restore")
public void testMultiTableWithRestore(TestContainer container)
throws IOException, InterruptedException {
Long jobId = JobIdGenerator.newJobId();
try {
CompletableFuture.supplyAsync(
() -> {
try {
return container.executeJob(
"/opengausscdc_to_opengauss_with_multi_table_mode_one_table.conf",
String.valueOf(jobId));
} catch (Exception e) {
log.error("Commit task exception :" + e.getMessage());
throw new RuntimeException(e);
}
});
// insert update delete
upsertDeleteSourceTable(OPENGAUSS_SCHEMA, SOURCE_TABLE_1);
// stream stage
await().atMost(60000, TimeUnit.MILLISECONDS)
.untilAsserted(
() ->
Assertions.assertAll(
() ->
Assertions.assertIterableEquals(
query(
getQuerySQL(
OPENGAUSS_SCHEMA,
SOURCE_TABLE_1)),
query(
getQuerySQL(
OPENGAUSS_SCHEMA,
SINK_TABLE_1)))));
Assertions.assertEquals(0, container.savepointJob(String.valueOf(jobId)).getExitCode());
// Restore job with add a new table
CompletableFuture.supplyAsync(
() -> {
try {
container.restoreJob(
"/opengausscdc_to_opengauss_with_multi_table_mode_two_table.conf",
String.valueOf(jobId));
} catch (Exception e) {
log.error("Commit task exception :" + e.getMessage());
throw new RuntimeException(e);
}
return null;
});
upsertDeleteSourceTable(OPENGAUSS_SCHEMA, SOURCE_TABLE_2);
// stream stage
await().atMost(60000, TimeUnit.MILLISECONDS)
.untilAsserted(
() ->
Assertions.assertAll(
() ->
Assertions.assertIterableEquals(
query(
getQuerySQL(
OPENGAUSS_SCHEMA,
SOURCE_TABLE_1)),
query(
getQuerySQL(
OPENGAUSS_SCHEMA,
SINK_TABLE_1))),
() ->
Assertions.assertIterableEquals(
query(
getQuerySQL(
OPENGAUSS_SCHEMA,
SOURCE_TABLE_2)),
query(
getQuerySQL(
OPENGAUSS_SCHEMA,
SINK_TABLE_2)))));
log.info("****************** container logs start ******************");
String containerLogs = container.getServerLogs();
log.info(containerLogs);
// pg cdc logs contain ERROR
// Assertions.assertFalse(containerLogs.contains("ERROR"));
log.info("****************** container logs end ******************");
} finally {
// Clear related content to ensure that multiple operations are not affected
clearTable(OPENGAUSS_SCHEMA, SOURCE_TABLE_1);
clearTable(OPENGAUSS_SCHEMA, SINK_TABLE_1);
clearTable(OPENGAUSS_SCHEMA, SOURCE_TABLE_2);
clearTable(OPENGAUSS_SCHEMA, SINK_TABLE_2);
}
}
@TestTemplate
@DisabledOnContainer(
value = {},
type = {EngineType.SPARK, EngineType.FLINK},
disabledReason = "Currently SPARK and FLINK do not support restore")
public void testAddFieldWithRestore(TestContainer container)
throws IOException, InterruptedException {
Long jobId = JobIdGenerator.newJobId();
try {
CompletableFuture.supplyAsync(
() -> {
try {
return container.executeJob(
"/opengausscdc_to_opengauss_test_add_Filed.conf",
String.valueOf(jobId));
} catch (Exception e) {
log.error("Commit task exception :" + e.getMessage());
throw new RuntimeException(e);
}
});
// stream stage
await().atMost(60000, TimeUnit.MILLISECONDS)
.untilAsserted(
() ->
Assertions.assertAll(
() ->
Assertions.assertIterableEquals(
query(
getQuerySQL(
OPENGAUSS_SCHEMA,
SOURCE_TABLE_3)),
query(
getQuerySQL(
OPENGAUSS_SCHEMA,
SINK_TABLE_3)))));
Assertions.assertEquals(0, container.savepointJob(String.valueOf(jobId)).getExitCode());
// add field add insert source table data
addFieldsForTable(OPENGAUSS_SCHEMA, SOURCE_TABLE_3);
addFieldsForTable(OPENGAUSS_SCHEMA, SINK_TABLE_3);
insertSourceTableForAddFields(OPENGAUSS_SCHEMA, SOURCE_TABLE_3);
// Restore job
CompletableFuture.supplyAsync(
() -> {
try {
container.restoreJob(
"/opengausscdc_to_opengauss_test_add_Filed.conf",
String.valueOf(jobId));
} catch (Exception e) {
log.error("Commit task exception :" + e.getMessage());
throw new RuntimeException(e);
}
return null;
});
// stream stage
await().atMost(60000, TimeUnit.MILLISECONDS)
.untilAsserted(
() ->
Assertions.assertAll(
() ->
Assertions.assertIterableEquals(
query(
getQuerySQL(
OPENGAUSS_SCHEMA,
SOURCE_TABLE_3)),
query(
getQuerySQL(
OPENGAUSS_SCHEMA,
SINK_TABLE_3)))));
} finally {
// Clear related content to ensure that multiple operations are not affected
clearTable(OPENGAUSS_SCHEMA, SOURCE_TABLE_3);
clearTable(OPENGAUSS_SCHEMA, SINK_TABLE_3);
}
}
@TestTemplate
public void testOpengaussCdcCheckDataWithNoPrimaryKey(TestContainer container)
throws Exception {
try {
CompletableFuture.supplyAsync(
() -> {
try {
container.executeJob(
"/opengausscdc_to_opengauss_with_no_primary_key.conf");
} catch (Exception e) {
log.error("Commit task exception :" + e.getMessage());
throw new RuntimeException(e);
}
return null;
});
// snapshot stage
await().atMost(60000, TimeUnit.MILLISECONDS)
.untilAsserted(
() -> {
Assertions.assertIterableEquals(
query(
getQuerySQL(
OPENGAUSS_SCHEMA,
SOURCE_TABLE_NO_PRIMARY_KEY)),
query(getQuerySQL(OPENGAUSS_SCHEMA, SINK_TABLE_1)));
});
// insert update delete
upsertDeleteSourceTable(OPENGAUSS_SCHEMA, SOURCE_TABLE_NO_PRIMARY_KEY);
// stream stage
await().atMost(60000, TimeUnit.MILLISECONDS)
.untilAsserted(
() -> {
Assertions.assertIterableEquals(
query(
getQuerySQL(
OPENGAUSS_SCHEMA,
SOURCE_TABLE_NO_PRIMARY_KEY)),
query(getQuerySQL(OPENGAUSS_SCHEMA, SINK_TABLE_1)));
});
} finally {
clearTable(OPENGAUSS_SCHEMA, SOURCE_TABLE_NO_PRIMARY_KEY);
clearTable(OPENGAUSS_SCHEMA, SINK_TABLE_1);
}
}
@TestTemplate
public void testOpengaussCdcCheckDataWithCustomPrimaryKey(TestContainer container)
throws Exception {
try {
CompletableFuture.supplyAsync(
() -> {
try {
container.executeJob(
"/opengausscdc_to_opengauss_with_custom_primary_key.conf");
} catch (Exception e) {
log.error("Commit task exception :" + e.getMessage());
throw new RuntimeException(e);
}
return null;
});
// snapshot stage
await().atMost(60000, TimeUnit.MILLISECONDS)
.untilAsserted(
() -> {
Assertions.assertIterableEquals(
query(
getQuerySQL(
OPENGAUSS_SCHEMA,
SOURCE_TABLE_NO_PRIMARY_KEY)),
query(getQuerySQL(OPENGAUSS_SCHEMA, SINK_TABLE_1)));
});
// insert update delete
upsertDeleteSourceTable(OPENGAUSS_SCHEMA, SOURCE_TABLE_NO_PRIMARY_KEY);
// stream stage
await().atMost(60000, TimeUnit.MILLISECONDS)
.untilAsserted(
() -> {
Assertions.assertIterableEquals(
query(
getQuerySQL(
OPENGAUSS_SCHEMA,
SOURCE_TABLE_NO_PRIMARY_KEY)),
query(getQuerySQL(OPENGAUSS_SCHEMA, SINK_TABLE_1)));
});
} finally {
clearTable(OPENGAUSS_SCHEMA, SOURCE_TABLE_NO_PRIMARY_KEY);
clearTable(OPENGAUSS_SCHEMA, SINK_TABLE_1);
}
}
private void addFieldsForTable(String database, String tableName) {
executeSql("ALTER TABLE " + database + "." + tableName + " ADD COLUMN f_big BIGINT");
}
private void insertSourceTableForAddFields(String database, String tableName) {
executeSql(
"INSERT INTO "
+ database
+ "."
+ tableName
+ " VALUES (2, '2', 32767, 65535, 2147483647);");
}
private void clearTable(String database, String tableName) {
executeSql("truncate table " + database + "." + tableName);
}
private void upsertDeleteSourceTable(String database, String tableName) {
executeSql(
"INSERT INTO "
+ database
+ "."
+ tableName
+ " VALUES (2, '2', 32767, 65535, 2147483647, 5.5, 6.6, 123.12345, 404.4443, true,\n"
+ " 'Hello World', 'a', 'abc', 'abcd..xyz', '2020-07-17 18:00:22.123', '2020-07-17 18:00:22.123456',\n"
+ " '2020-07-17', '18:00:22', 500);");
executeSql(
"INSERT INTO "
+ database
+ "."
+ tableName
+ " VALUES (3, '2', 32767, 65535, 2147483647, 5.5, 6.6, 123.12345, 404.4443, true,\n"
+ " 'Hello World', 'a', 'abc', 'abcd..xyz', '2020-07-17 18:00:22.123', '2020-07-17 18:00:22.123456',\n"
+ " '2020-07-17', '18:00:22', 500);");
executeSql("DELETE FROM " + database + "." + tableName + " where id = 2;");
executeSql("UPDATE " + database + "." + tableName + " SET f_big = 10000 where id = 3;");
}
private void executeSql(String sql) {
try (Connection connection = getJdbcConnection(OPENGAUSSQL_DATABASE);
Statement statement = connection.createStatement()) {
statement.execute("SET search_path TO inventory;");
statement.execute(sql);
} catch (SQLException e) {
throw new RuntimeException(e);
}
}
private String getQuerySQL(String database, String tableName) {
return String.format(SOURCE_SQL_TEMPLATE, database, tableName);
}
private List<List<Object>> query(String sql) {
try (Connection connection = getJdbcConnection(OPENGAUSSQL_DATABASE)) {
ResultSet resultSet = connection.createStatement().executeQuery(sql);
List<List<Object>> result = new ArrayList<>();
int columnCount = resultSet.getMetaData().getColumnCount();
while (resultSet.next()) {
ArrayList<Object> objects = new ArrayList<>();
for (int i = 1; i <= columnCount; i++) {
Object object = resultSet.getObject(i);
if (object instanceof byte[]) {
byte[] bytes = (byte[]) object;
object = new String(bytes, StandardCharsets.UTF_8);
}
objects.add(object);
}
log.debug(
String.format(
"Print opengauss-CDC query, sql: %s, data: %s", sql, objects));
result.add(objects);
}
return result;
} catch (SQLException e) {
throw new RuntimeException(e);
}
}
protected void createNewUserForJdbcSink() throws Exception {
try (Connection connection = getJdbcConnection(OPENGAUSSQL_DATABASE);
Statement stmt = connection.createStatement()) {
// create a user for jdbc sink
stmt.execute("CREATE USER dailai WITH PASSWORD 'openGauss@123';");
stmt.execute("GRANT ALL PRIVILEGES TO dailai;");
}
}
protected void reloadConf() throws Exception {
try (Connection connection = getJdbcConnection(OPENGAUSSQL_DATABASE);
Statement stmt = connection.createStatement()) {
stmt.execute("select pg_reload_conf();");
}
}
protected void initializeOpengaussSql() throws Exception {
try (Connection connection = getJdbcConnection(OPENGAUSSQL_DEFAULT_DATABASE);
Statement stmt = connection.createStatement()) {
stmt.execute("create database " + OPENGAUSSQL_DATABASE);
}
final String ddlFile = String.format("ddl/%s.sql", "inventory");
final URL ddlTestFile = OpengaussCDCIT.class.getClassLoader().getResource(ddlFile);
assertNotNull("Cannot locate " + ddlFile, ddlTestFile);
try (Connection connection = getJdbcConnection(OPENGAUSSQL_DATABASE);
Statement statement = connection.createStatement()) {
final List<String> statements =
Arrays.stream(
Files.readAllLines(Paths.get(ddlTestFile.toURI())).stream()
.map(String::trim)
.filter(x -> !x.startsWith("--") && !x.isEmpty())
.map(
x -> {
final Matcher m =
COMMENT_PATTERN.matcher(x);
return m.matches() ? m.group(1) : x;
})
.collect(Collectors.joining("\n"))
.split(";\n"))
.collect(Collectors.toList());
for (String stmt : statements) {
statement.execute(stmt);
}
}
}
private Connection getJdbcConnection(String dbName) throws SQLException {
return DriverManager.getConnection(
"jdbc:postgresql://"
+ OPENGAUSS_CONTAINER.getHost()
+ ":"
+ OPENGAUSS_CONTAINER.getMappedPort(OPENGAUSS_PORT)
+ "/"
+ dbName,
USERNAME,
PASSWORD);
}
@AfterAll
@Override
public void tearDown() throws Exception {
if (OPENGAUSS_CONTAINER != null) {
OPENGAUSS_CONTAINER.close();
}
}
}
|
googleapis/google-cloud-java | 36,159 | java-apigee-registry/proto-google-cloud-apigee-registry-v1/src/main/java/com/google/cloud/apigeeregistry/v1/CreateApiRequest.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/apigeeregistry/v1/registry_service.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.apigeeregistry.v1;
/**
*
*
* <pre>
* Request message for CreateApi.
* </pre>
*
* Protobuf type {@code google.cloud.apigeeregistry.v1.CreateApiRequest}
*/
public final class CreateApiRequest extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.apigeeregistry.v1.CreateApiRequest)
CreateApiRequestOrBuilder {
private static final long serialVersionUID = 0L;
// Use CreateApiRequest.newBuilder() to construct.
private CreateApiRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private CreateApiRequest() {
parent_ = "";
apiId_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new CreateApiRequest();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.apigeeregistry.v1.RegistryServiceProto
.internal_static_google_cloud_apigeeregistry_v1_CreateApiRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.apigeeregistry.v1.RegistryServiceProto
.internal_static_google_cloud_apigeeregistry_v1_CreateApiRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.apigeeregistry.v1.CreateApiRequest.class,
com.google.cloud.apigeeregistry.v1.CreateApiRequest.Builder.class);
}
private int bitField0_;
public static final int PARENT_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private volatile java.lang.Object parent_ = "";
/**
*
*
* <pre>
* Required. The parent, which owns this collection of APIs.
* Format: `projects/*/locations/*`
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The parent.
*/
@java.lang.Override
public java.lang.String getParent() {
java.lang.Object ref = parent_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
parent_ = s;
return s;
}
}
/**
*
*
* <pre>
* Required. The parent, which owns this collection of APIs.
* Format: `projects/*/locations/*`
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for parent.
*/
@java.lang.Override
public com.google.protobuf.ByteString getParentBytes() {
java.lang.Object ref = parent_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
parent_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int API_FIELD_NUMBER = 2;
private com.google.cloud.apigeeregistry.v1.Api api_;
/**
*
*
* <pre>
* Required. The API to create.
* </pre>
*
* <code>.google.cloud.apigeeregistry.v1.Api api = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the api field is set.
*/
@java.lang.Override
public boolean hasApi() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* Required. The API to create.
* </pre>
*
* <code>.google.cloud.apigeeregistry.v1.Api api = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The api.
*/
@java.lang.Override
public com.google.cloud.apigeeregistry.v1.Api getApi() {
return api_ == null ? com.google.cloud.apigeeregistry.v1.Api.getDefaultInstance() : api_;
}
/**
*
*
* <pre>
* Required. The API to create.
* </pre>
*
* <code>.google.cloud.apigeeregistry.v1.Api api = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
@java.lang.Override
public com.google.cloud.apigeeregistry.v1.ApiOrBuilder getApiOrBuilder() {
return api_ == null ? com.google.cloud.apigeeregistry.v1.Api.getDefaultInstance() : api_;
}
public static final int API_ID_FIELD_NUMBER = 3;
@SuppressWarnings("serial")
private volatile java.lang.Object apiId_ = "";
/**
*
*
* <pre>
* Required. The ID to use for the API, which will become the final component of
* the API's resource name.
*
* This value should be 4-63 characters, and valid characters
* are /[a-z][0-9]-/.
*
* Following AIP-162, IDs must not have the form of a UUID.
* </pre>
*
* <code>string api_id = 3 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The apiId.
*/
@java.lang.Override
public java.lang.String getApiId() {
java.lang.Object ref = apiId_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
apiId_ = s;
return s;
}
}
/**
*
*
* <pre>
* Required. The ID to use for the API, which will become the final component of
* the API's resource name.
*
* This value should be 4-63 characters, and valid characters
* are /[a-z][0-9]-/.
*
* Following AIP-162, IDs must not have the form of a UUID.
* </pre>
*
* <code>string api_id = 3 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The bytes for apiId.
*/
@java.lang.Override
public com.google.protobuf.ByteString getApiIdBytes() {
java.lang.Object ref = apiId_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
apiId_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 1, parent_);
}
if (((bitField0_ & 0x00000001) != 0)) {
output.writeMessage(2, getApi());
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(apiId_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 3, apiId_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, parent_);
}
if (((bitField0_ & 0x00000001) != 0)) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, getApi());
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(apiId_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(3, apiId_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.apigeeregistry.v1.CreateApiRequest)) {
return super.equals(obj);
}
com.google.cloud.apigeeregistry.v1.CreateApiRequest other =
(com.google.cloud.apigeeregistry.v1.CreateApiRequest) obj;
if (!getParent().equals(other.getParent())) return false;
if (hasApi() != other.hasApi()) return false;
if (hasApi()) {
if (!getApi().equals(other.getApi())) return false;
}
if (!getApiId().equals(other.getApiId())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + PARENT_FIELD_NUMBER;
hash = (53 * hash) + getParent().hashCode();
if (hasApi()) {
hash = (37 * hash) + API_FIELD_NUMBER;
hash = (53 * hash) + getApi().hashCode();
}
hash = (37 * hash) + API_ID_FIELD_NUMBER;
hash = (53 * hash) + getApiId().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.apigeeregistry.v1.CreateApiRequest parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.apigeeregistry.v1.CreateApiRequest parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.apigeeregistry.v1.CreateApiRequest parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.apigeeregistry.v1.CreateApiRequest parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.apigeeregistry.v1.CreateApiRequest parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.apigeeregistry.v1.CreateApiRequest parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.apigeeregistry.v1.CreateApiRequest parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.apigeeregistry.v1.CreateApiRequest parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.apigeeregistry.v1.CreateApiRequest parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.apigeeregistry.v1.CreateApiRequest parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.apigeeregistry.v1.CreateApiRequest parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.apigeeregistry.v1.CreateApiRequest parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.cloud.apigeeregistry.v1.CreateApiRequest prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Request message for CreateApi.
* </pre>
*
* Protobuf type {@code google.cloud.apigeeregistry.v1.CreateApiRequest}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.apigeeregistry.v1.CreateApiRequest)
com.google.cloud.apigeeregistry.v1.CreateApiRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.apigeeregistry.v1.RegistryServiceProto
.internal_static_google_cloud_apigeeregistry_v1_CreateApiRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.apigeeregistry.v1.RegistryServiceProto
.internal_static_google_cloud_apigeeregistry_v1_CreateApiRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.apigeeregistry.v1.CreateApiRequest.class,
com.google.cloud.apigeeregistry.v1.CreateApiRequest.Builder.class);
}
// Construct using com.google.cloud.apigeeregistry.v1.CreateApiRequest.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {
getApiFieldBuilder();
}
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
parent_ = "";
api_ = null;
if (apiBuilder_ != null) {
apiBuilder_.dispose();
apiBuilder_ = null;
}
apiId_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.apigeeregistry.v1.RegistryServiceProto
.internal_static_google_cloud_apigeeregistry_v1_CreateApiRequest_descriptor;
}
@java.lang.Override
public com.google.cloud.apigeeregistry.v1.CreateApiRequest getDefaultInstanceForType() {
return com.google.cloud.apigeeregistry.v1.CreateApiRequest.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.apigeeregistry.v1.CreateApiRequest build() {
com.google.cloud.apigeeregistry.v1.CreateApiRequest result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.apigeeregistry.v1.CreateApiRequest buildPartial() {
com.google.cloud.apigeeregistry.v1.CreateApiRequest result =
new com.google.cloud.apigeeregistry.v1.CreateApiRequest(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(com.google.cloud.apigeeregistry.v1.CreateApiRequest result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.parent_ = parent_;
}
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000002) != 0)) {
result.api_ = apiBuilder_ == null ? api_ : apiBuilder_.build();
to_bitField0_ |= 0x00000001;
}
if (((from_bitField0_ & 0x00000004) != 0)) {
result.apiId_ = apiId_;
}
result.bitField0_ |= to_bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.apigeeregistry.v1.CreateApiRequest) {
return mergeFrom((com.google.cloud.apigeeregistry.v1.CreateApiRequest) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.apigeeregistry.v1.CreateApiRequest other) {
if (other == com.google.cloud.apigeeregistry.v1.CreateApiRequest.getDefaultInstance())
return this;
if (!other.getParent().isEmpty()) {
parent_ = other.parent_;
bitField0_ |= 0x00000001;
onChanged();
}
if (other.hasApi()) {
mergeApi(other.getApi());
}
if (!other.getApiId().isEmpty()) {
apiId_ = other.apiId_;
bitField0_ |= 0x00000004;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
parent_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000001;
break;
} // case 10
case 18:
{
input.readMessage(getApiFieldBuilder().getBuilder(), extensionRegistry);
bitField0_ |= 0x00000002;
break;
} // case 18
case 26:
{
apiId_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000004;
break;
} // case 26
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.lang.Object parent_ = "";
/**
*
*
* <pre>
* Required. The parent, which owns this collection of APIs.
* Format: `projects/*/locations/*`
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The parent.
*/
public java.lang.String getParent() {
java.lang.Object ref = parent_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
parent_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Required. The parent, which owns this collection of APIs.
* Format: `projects/*/locations/*`
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for parent.
*/
public com.google.protobuf.ByteString getParentBytes() {
java.lang.Object ref = parent_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
parent_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Required. The parent, which owns this collection of APIs.
* Format: `projects/*/locations/*`
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The parent to set.
* @return This builder for chaining.
*/
public Builder setParent(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
parent_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The parent, which owns this collection of APIs.
* Format: `projects/*/locations/*`
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return This builder for chaining.
*/
public Builder clearParent() {
parent_ = getDefaultInstance().getParent();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The parent, which owns this collection of APIs.
* Format: `projects/*/locations/*`
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The bytes for parent to set.
* @return This builder for chaining.
*/
public Builder setParentBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
parent_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
private com.google.cloud.apigeeregistry.v1.Api api_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.apigeeregistry.v1.Api,
com.google.cloud.apigeeregistry.v1.Api.Builder,
com.google.cloud.apigeeregistry.v1.ApiOrBuilder>
apiBuilder_;
/**
*
*
* <pre>
* Required. The API to create.
* </pre>
*
* <code>.google.cloud.apigeeregistry.v1.Api api = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the api field is set.
*/
public boolean hasApi() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
*
*
* <pre>
* Required. The API to create.
* </pre>
*
* <code>.google.cloud.apigeeregistry.v1.Api api = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The api.
*/
public com.google.cloud.apigeeregistry.v1.Api getApi() {
if (apiBuilder_ == null) {
return api_ == null ? com.google.cloud.apigeeregistry.v1.Api.getDefaultInstance() : api_;
} else {
return apiBuilder_.getMessage();
}
}
/**
*
*
* <pre>
* Required. The API to create.
* </pre>
*
* <code>.google.cloud.apigeeregistry.v1.Api api = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setApi(com.google.cloud.apigeeregistry.v1.Api value) {
if (apiBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
api_ = value;
} else {
apiBuilder_.setMessage(value);
}
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The API to create.
* </pre>
*
* <code>.google.cloud.apigeeregistry.v1.Api api = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setApi(com.google.cloud.apigeeregistry.v1.Api.Builder builderForValue) {
if (apiBuilder_ == null) {
api_ = builderForValue.build();
} else {
apiBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The API to create.
* </pre>
*
* <code>.google.cloud.apigeeregistry.v1.Api api = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder mergeApi(com.google.cloud.apigeeregistry.v1.Api value) {
if (apiBuilder_ == null) {
if (((bitField0_ & 0x00000002) != 0)
&& api_ != null
&& api_ != com.google.cloud.apigeeregistry.v1.Api.getDefaultInstance()) {
getApiBuilder().mergeFrom(value);
} else {
api_ = value;
}
} else {
apiBuilder_.mergeFrom(value);
}
if (api_ != null) {
bitField0_ |= 0x00000002;
onChanged();
}
return this;
}
/**
*
*
* <pre>
* Required. The API to create.
* </pre>
*
* <code>.google.cloud.apigeeregistry.v1.Api api = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder clearApi() {
bitField0_ = (bitField0_ & ~0x00000002);
api_ = null;
if (apiBuilder_ != null) {
apiBuilder_.dispose();
apiBuilder_ = null;
}
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The API to create.
* </pre>
*
* <code>.google.cloud.apigeeregistry.v1.Api api = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.cloud.apigeeregistry.v1.Api.Builder getApiBuilder() {
bitField0_ |= 0x00000002;
onChanged();
return getApiFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* Required. The API to create.
* </pre>
*
* <code>.google.cloud.apigeeregistry.v1.Api api = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.cloud.apigeeregistry.v1.ApiOrBuilder getApiOrBuilder() {
if (apiBuilder_ != null) {
return apiBuilder_.getMessageOrBuilder();
} else {
return api_ == null ? com.google.cloud.apigeeregistry.v1.Api.getDefaultInstance() : api_;
}
}
/**
*
*
* <pre>
* Required. The API to create.
* </pre>
*
* <code>.google.cloud.apigeeregistry.v1.Api api = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.apigeeregistry.v1.Api,
com.google.cloud.apigeeregistry.v1.Api.Builder,
com.google.cloud.apigeeregistry.v1.ApiOrBuilder>
getApiFieldBuilder() {
if (apiBuilder_ == null) {
apiBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.apigeeregistry.v1.Api,
com.google.cloud.apigeeregistry.v1.Api.Builder,
com.google.cloud.apigeeregistry.v1.ApiOrBuilder>(
getApi(), getParentForChildren(), isClean());
api_ = null;
}
return apiBuilder_;
}
private java.lang.Object apiId_ = "";
/**
*
*
* <pre>
* Required. The ID to use for the API, which will become the final component of
* the API's resource name.
*
* This value should be 4-63 characters, and valid characters
* are /[a-z][0-9]-/.
*
* Following AIP-162, IDs must not have the form of a UUID.
* </pre>
*
* <code>string api_id = 3 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The apiId.
*/
public java.lang.String getApiId() {
java.lang.Object ref = apiId_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
apiId_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Required. The ID to use for the API, which will become the final component of
* the API's resource name.
*
* This value should be 4-63 characters, and valid characters
* are /[a-z][0-9]-/.
*
* Following AIP-162, IDs must not have the form of a UUID.
* </pre>
*
* <code>string api_id = 3 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The bytes for apiId.
*/
public com.google.protobuf.ByteString getApiIdBytes() {
java.lang.Object ref = apiId_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
apiId_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Required. The ID to use for the API, which will become the final component of
* the API's resource name.
*
* This value should be 4-63 characters, and valid characters
* are /[a-z][0-9]-/.
*
* Following AIP-162, IDs must not have the form of a UUID.
* </pre>
*
* <code>string api_id = 3 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @param value The apiId to set.
* @return This builder for chaining.
*/
public Builder setApiId(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
apiId_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The ID to use for the API, which will become the final component of
* the API's resource name.
*
* This value should be 4-63 characters, and valid characters
* are /[a-z][0-9]-/.
*
* Following AIP-162, IDs must not have the form of a UUID.
* </pre>
*
* <code>string api_id = 3 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return This builder for chaining.
*/
public Builder clearApiId() {
apiId_ = getDefaultInstance().getApiId();
bitField0_ = (bitField0_ & ~0x00000004);
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The ID to use for the API, which will become the final component of
* the API's resource name.
*
* This value should be 4-63 characters, and valid characters
* are /[a-z][0-9]-/.
*
* Following AIP-162, IDs must not have the form of a UUID.
* </pre>
*
* <code>string api_id = 3 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @param value The bytes for apiId to set.
* @return This builder for chaining.
*/
public Builder setApiIdBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
apiId_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.apigeeregistry.v1.CreateApiRequest)
}
// @@protoc_insertion_point(class_scope:google.cloud.apigeeregistry.v1.CreateApiRequest)
private static final com.google.cloud.apigeeregistry.v1.CreateApiRequest DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.apigeeregistry.v1.CreateApiRequest();
}
public static com.google.cloud.apigeeregistry.v1.CreateApiRequest getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<CreateApiRequest> PARSER =
new com.google.protobuf.AbstractParser<CreateApiRequest>() {
@java.lang.Override
public CreateApiRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<CreateApiRequest> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<CreateApiRequest> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.apigeeregistry.v1.CreateApiRequest getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
google/Accessibility-Test-Framework-for-Android | 36,534 | src/main/java/com/google/android/apps/common/testing/accessibility/framework/checks/TouchTargetSizeCheck.java | /*
* Copyright (C) 2015 Google Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
* in compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
package com.google.android.apps.common.testing.accessibility.framework.checks;
import static com.google.android.apps.common.testing.accessibility.framework.ViewHierarchyElementUtils.ABS_LIST_VIEW_CLASS_NAME;
import static com.google.android.apps.common.testing.accessibility.framework.ViewHierarchyElementUtils.WEB_VIEW_CLASS_NAME;
import static com.google.common.base.Preconditions.checkNotNull;
import static java.lang.Boolean.TRUE;
import com.google.android.apps.common.testing.accessibility.framework.AccessibilityCheckResult.AccessibilityCheckResultType;
import com.google.android.apps.common.testing.accessibility.framework.AccessibilityHierarchyCheck;
import com.google.android.apps.common.testing.accessibility.framework.AccessibilityHierarchyCheckResult;
import com.google.android.apps.common.testing.accessibility.framework.HashMapResultMetadata;
import com.google.android.apps.common.testing.accessibility.framework.Parameters;
import com.google.android.apps.common.testing.accessibility.framework.ResultMetadata;
import com.google.android.apps.common.testing.accessibility.framework.replacements.Point;
import com.google.android.apps.common.testing.accessibility.framework.replacements.Rect;
import com.google.android.apps.common.testing.accessibility.framework.strings.StringManager;
import com.google.android.apps.common.testing.accessibility.framework.uielement.AccessibilityHierarchy;
import com.google.android.apps.common.testing.accessibility.framework.uielement.DisplayInfo;
import com.google.android.apps.common.testing.accessibility.framework.uielement.DisplayInfo.Metrics;
import com.google.android.apps.common.testing.accessibility.framework.uielement.ViewHierarchyElement;
import com.google.common.annotations.VisibleForTesting;
import java.util.ArrayList;
import java.util.List;
import java.util.Locale;
import org.checkerframework.checker.nullness.qual.Nullable;
/**
* Check ensuring touch targets have a minimum size, 48x48dp by default
*
* <p>This check takes into account and supports:
*
* <ul>
* <li>Use of {@link android.view.TouchDelegate} to extend the touchable region or hit-Rect of UI
* elements
* <li>UI elements with interactable ancestors
* <li>UI elements along the scrollable edge of containers
* <li>Clipping effects applied by ancestors' sizing
* <li>Touch targets at the screen edge or within IMEs, requiring a reduced size
* <li>Customization of the minimum threshold for required size
* </ul>
*/
public class TouchTargetSizeCheck extends AccessibilityHierarchyCheck {
/** Result when the view is not clickable. */
public static final int RESULT_ID_NOT_CLICKABLE = 1;
/** Result when the view is not visible. */
public static final int RESULT_ID_NOT_VISIBLE = 2;
/** Result when the view's height and width are both too small. */
public static final int RESULT_ID_SMALL_TOUCH_TARGET_WIDTH_AND_HEIGHT = 3;
/** Result when the view's height is too small. */
public static final int RESULT_ID_SMALL_TOUCH_TARGET_HEIGHT = 4;
/** Result when the view's width is too small. */
public static final int RESULT_ID_SMALL_TOUCH_TARGET_WIDTH = 5;
/**
* Result when the view's height and width are both smaller than the user-defined touch target
* size.
*/
public static final int RESULT_ID_CUSTOMIZED_SMALL_TOUCH_TARGET_WIDTH_AND_HEIGHT = 6;
/** Result when the view's height is smaller than the user-defined touch target size. */
public static final int RESULT_ID_CUSTOMIZED_SMALL_TOUCH_TARGET_HEIGHT = 7;
/** Result when the view's width is smaller than the user-defined touch target size. */
public static final int RESULT_ID_CUSTOMIZED_SMALL_TOUCH_TARGET_WIDTH = 8;
/**
* Result metadata key for a {@code boolean} which is {@code true} iff the view has a {@link
* android.view.TouchDelegate} that may be handling touches on the view's behalf, but that
* delegate's hit-Rect is not available.
*/
public static final String KEY_HAS_TOUCH_DELEGATE = "KEY_HAS_TOUCH_DELEGATE";
/**
* Result metadata key for a {@code boolean} which is {@code true} iff the view has a {@link
* android.view.TouchDelegate} with a hit-Rect available. When this key is set to {@code true},
* {@link #KEY_HIT_RECT_WIDTH} and {@link #KEY_HIT_RECT_HEIGHT} are also provided within the
* result metadata.
*/
public static final String KEY_HAS_TOUCH_DELEGATE_WITH_HIT_RECT =
"KEY_HAS_TOUCH_DELEGATE_WITH_HIT_RECT";
/**
* Result metadata key for a {@code boolean} which is {@code true} iff the view has an ancestor
* (of a suitable size) which may be handling click actions on behalf of the view.
*/
public static final String KEY_HAS_CLICKABLE_ANCESTOR = "KEY_HAS_CLICKABLE_ANCESTOR";
/**
* Result metadata key for a {@code boolean} which is {@code true} iff the view is determined to
* be touching the scrollable edge of a scrollable container.
*/
public static final String KEY_IS_AGAINST_SCROLLABLE_EDGE = "KEY_IS_AGAINST_SCROLLABLE_EDGE";
/**
* Result metadata key for a {@code boolean} which is {@code true} iff the view has a reduced
* visible size because it is clipped by a parent view. When this key is set to {@code true},
* {@link #KEY_NONCLIPPED_HEIGHT} and {@link #KEY_NONCLIPPED_WIDTH} are also provided within the
* result metadata.
*/
public static final String KEY_IS_CLIPPED_BY_ANCESTOR = "KEY_IS_CLIPPED_BY_ANCESTOR";
/**
* Result metadata key for a {@code boolean} which is {@code true} when the view is detremined to
* originate from web content.
*/
public static final String KEY_IS_WEB_CONTENT = "KEY_IS_WEB_CONTENT";
/** Result metadata key for the {@code int} height of the view. */
public static final String KEY_HEIGHT = "KEY_HEIGHT";
/** Result metadata key for the {@code int} width of the view. */
public static final String KEY_WIDTH = "KEY_WIDTH";
/**
* Result metadata key for the {@code int} height of the view not considering clipping effects
* applied by parent views. This value is populated only when {@link #KEY_IS_CLIPPED_BY_ANCESTOR}
* is set to {@code true}.
*/
public static final String KEY_NONCLIPPED_HEIGHT = "KEY_NONCLIPPED_HEIGHT";
/**
* Result metadata key for the {@code int} width of the view not considering clipping effects
* applied by parent views. This value is populated only when {@link #KEY_IS_CLIPPED_BY_ANCESTOR}
* is set to {@code true}.
*/
public static final String KEY_NONCLIPPED_WIDTH = "KEY_NONCLIPPED_WIDTH";
/** Result metadata key for the {@code int} required height of the view */
public static final String KEY_REQUIRED_HEIGHT = "KEY_REQUIRED_HEIGHT";
/** Result metadata key for the {@code int} required width of the view */
public static final String KEY_REQUIRED_WIDTH = "KEY_REQUIRED_WIDTH";
/** Result metadata key for the {@code int} user-defined minimum width of the view */
public static final String KEY_CUSTOMIZED_REQUIRED_WIDTH = "KEY_CUSTOMIZED_REQUIRED_WIDTH";
/** Result metadata key for the {@code int} user-defined minimum height of the view */
public static final String KEY_CUSTOMIZED_REQUIRED_HEIGHT = "KEY_CUSTOMIZED_REQUIRED_HEIGHT";
/**
* Result metadata key for the {@code int} conveying the width of the largest {@link
* android.view.TouchDelegate} hit-Rect of the view
*/
public static final String KEY_HIT_RECT_WIDTH = "KEY_HIT_RECT_WIDTH";
/**
* Result metadata key for the {@code int} conveying the height of the largest {@link
* android.view.TouchDelegate} hit-Rect of the view
*/
public static final String KEY_HIT_RECT_HEIGHT = "KEY_HIT_RECT_HEIGHT";
/**
* Value of android.view.accessibility.AccessibilityWindowInfo.TYPE_INPUT_METHOD. This avoids a
* dependency upon Android libraries.
*/
@VisibleForTesting static final int TYPE_INPUT_METHOD = 2;
/**
* Minimum height and width are set according to
* <a href="http://developer.android.com/design/patterns/accessibility.html"></a>
*
* With the modification that targets against the edge of the screen may be narrower.
*/
private static final int TOUCH_TARGET_MIN_HEIGHT = 48;
private static final int TOUCH_TARGET_MIN_WIDTH = 48;
private static final int TOUCH_TARGET_MIN_HEIGHT_ON_EDGE = 32;
private static final int TOUCH_TARGET_MIN_WIDTH_ON_EDGE = 32;
private static final int TOUCH_TARGET_MIN_HEIGHT_IME_CONTAINER = 32;
private static final int TOUCH_TARGET_MIN_WIDTH_IME_CONTAINER = 32;
@Override
protected String getHelpTopic() {
return "7101858"; // Touch target size
}
@Override
public Category getCategory() {
return Category.TOUCH_TARGET_SIZE;
}
@Override
public List<AccessibilityHierarchyCheckResult> runCheckOnHierarchy(
AccessibilityHierarchy hierarchy,
@Nullable ViewHierarchyElement fromRoot,
@Nullable Parameters parameters) {
List<AccessibilityHierarchyCheckResult> results = new ArrayList<>();
DisplayInfo defaultDisplay = hierarchy.getDeviceState().getDefaultDisplayInfo();
DisplayInfo.Metrics metricsWithoutDecorations = defaultDisplay.getMetricsWithoutDecoration();
List<? extends ViewHierarchyElement> viewsToEval = getElementsToEvaluate(fromRoot, hierarchy);
for (ViewHierarchyElement view : viewsToEval) {
if (!(TRUE.equals(view.isClickable())
|| TRUE.equals(view.isLongClickable()))) {
results.add(new AccessibilityHierarchyCheckResult(
this.getClass(),
AccessibilityCheckResultType.NOT_RUN,
view,
RESULT_ID_NOT_CLICKABLE,
null));
continue;
}
if (!TRUE.equals(view.isVisibleToUser())) {
results.add(new AccessibilityHierarchyCheckResult(
this.getClass(),
AccessibilityCheckResultType.NOT_RUN,
view,
RESULT_ID_NOT_VISIBLE,
null));
continue;
}
Rect bounds = view.getBoundsInScreen();
Point requiredSize = getMinimumAllowableSizeForView(view, parameters);
float density = metricsWithoutDecorations.getDensity();
int actualHeight = Math.round(bounds.getHeight() / density);
int actualWidth = Math.round(bounds.getWidth() / density);
if (!meetsRequiredSize(bounds, requiredSize, density)) {
// Before we know a view fails this check, we must check if another View may be handling
// touches on its behalf. One mechanism for this is a TouchDelegate.
boolean hasDelegate = false;
Rect largestDelegateHitRect = null;
// There are two approaches to detecting such a delegate. One (on Android Q+) allows us
// access to the hit-Rect. Since this is the most precise signal, we try to use this first.
if (hasTouchDelegateWithHitRects(view)) {
hasDelegate = true;
if (hasTouchDelegateOfRequiredSize(view, requiredSize, density)) {
// Emit no result if a delegate's hit-Rect is above the required size
continue;
}
// If no associated hit-Rect is of the required size, reference the largest one for
// inclusion in the result message.
largestDelegateHitRect = getLargestTouchDelegateHitRect(view);
} else {
// Without hit-Rects, another approach is to check (View) ancestors for the presence of
// any TouchDelegate, which indicates that the element may have its hit-Rect adjusted,
// but does not tell us what its size is.
hasDelegate = hasAncestorWithTouchDelegate(view);
}
// Another approach is to have the parent handle touches for smaller child views, such as a
// android.widget.Switch, which retains its clickable state for a "handle drag" effect. In
// these cases, the parent must perform the same action as the child, which is beyond the
// scope of this test. We append this important exception message to the result by setting
// KEY_HAS_CLICKABLE_ANCESTOR within the result metadata.
boolean hasClickableAncestor = hasQualifyingClickableAncestor(view, parameters);
// When evaluating a View-based hierarchy, we can check if the visible size of the view is
// less than the drawing (nonclipped) size, which indicates an ancestor may scroll,
// expand/collapse, or otherwise constrain the size of the clickable item.
boolean isClippedByAncestor = hasQualifyingClippingAncestor(view, requiredSize, density);
// Web content exposed through an AccessibilityNodeInfo-based hierarchy from WebView cannot
// precisely represent the clickable area for DOM elements in a number of cases. We reduce
// severity and append a message recommending manual testing when encountering WebView.
boolean isWebContent = hasWebViewAncestor(view);
// In each of these cases, with the exception of when we have precise hit-Rect coordinates,
// we cannot determine how exactly click actions are being handled by the underlying
// application, so to avoid false positives, we will demote ERROR to WARNING.
AccessibilityCheckResultType resultType =
((hasDelegate && (largestDelegateHitRect == null))
|| hasClickableAncestor
|| isClippedByAncestor
|| isWebContent)
? AccessibilityCheckResultType.WARNING
: AccessibilityCheckResultType.ERROR;
// We must also detect the case where an item is indicated as a small target because it
// appears along the scrollable edge of a scrolling container. In this case, we cannot
// determine the native nonclipped bounds of the view, so we demote to NOT_RUN.
boolean isAtScrollableEdge = view.isAgainstScrollableEdge();
resultType = isAtScrollableEdge ? AccessibilityCheckResultType.NOT_RUN : resultType;
ResultMetadata resultMetadata = new HashMapResultMetadata();
resultMetadata.putInt(KEY_HEIGHT, actualHeight);
resultMetadata.putInt(KEY_WIDTH, actualWidth);
if (hasDelegate) {
if (largestDelegateHitRect != null) {
resultMetadata.putBoolean(KEY_HAS_TOUCH_DELEGATE_WITH_HIT_RECT, true);
resultMetadata.putInt(
KEY_HIT_RECT_WIDTH, Math.round(largestDelegateHitRect.getWidth() / density));
resultMetadata.putInt(
KEY_HIT_RECT_HEIGHT, Math.round(largestDelegateHitRect.getHeight() / density));
} else {
resultMetadata.putBoolean(KEY_HAS_TOUCH_DELEGATE, true);
}
}
if (hasClickableAncestor) {
resultMetadata.putBoolean(KEY_HAS_CLICKABLE_ANCESTOR, true);
}
if (isAtScrollableEdge) {
resultMetadata.putBoolean(KEY_IS_AGAINST_SCROLLABLE_EDGE, true);
}
if (isClippedByAncestor) {
// If the view is clipped by an ancestor, add the nonclipped dimensions to metadata.
// The non-clipped height and width cannot be null if isClippedByAncestor is true.
resultMetadata.putBoolean(KEY_IS_CLIPPED_BY_ANCESTOR, true);
resultMetadata.putInt(KEY_NONCLIPPED_HEIGHT, checkNotNull(view.getNonclippedHeight()));
resultMetadata.putInt(KEY_NONCLIPPED_WIDTH, checkNotNull(view.getNonclippedWidth()));
}
if (isWebContent) {
resultMetadata.putBoolean(KEY_IS_WEB_CONTENT, true);
}
Integer customizedTouchTargetSize =
(parameters == null) ? null : parameters.getCustomTouchTargetSize();
if (customizedTouchTargetSize != null) {
resultMetadata.putInt(KEY_CUSTOMIZED_REQUIRED_WIDTH, requiredSize.getX());
resultMetadata.putInt(KEY_CUSTOMIZED_REQUIRED_HEIGHT, requiredSize.getY());
} else {
resultMetadata.putInt(KEY_REQUIRED_HEIGHT, requiredSize.getY());
resultMetadata.putInt(KEY_REQUIRED_WIDTH, requiredSize.getX());
}
if ((actualHeight < requiredSize.getY()) && (actualWidth < requiredSize.getX())) {
// Neither wide enough nor tall enough
results.add(
new AccessibilityHierarchyCheckResult(
this.getClass(),
resultType,
view,
(customizedTouchTargetSize == null)
? RESULT_ID_SMALL_TOUCH_TARGET_WIDTH_AND_HEIGHT
: RESULT_ID_CUSTOMIZED_SMALL_TOUCH_TARGET_WIDTH_AND_HEIGHT,
resultMetadata));
} else if (actualHeight < requiredSize.getY()) {
// Not tall enough
results.add(
new AccessibilityHierarchyCheckResult(
this.getClass(),
resultType,
view,
(customizedTouchTargetSize == null)
? RESULT_ID_SMALL_TOUCH_TARGET_HEIGHT
: RESULT_ID_CUSTOMIZED_SMALL_TOUCH_TARGET_HEIGHT,
resultMetadata));
} else {
// Not wide enough
results.add(
new AccessibilityHierarchyCheckResult(
this.getClass(),
resultType,
view,
(customizedTouchTargetSize == null)
? RESULT_ID_SMALL_TOUCH_TARGET_WIDTH
: RESULT_ID_CUSTOMIZED_SMALL_TOUCH_TARGET_WIDTH,
resultMetadata));
}
}
}
return results;
}
@Override
public String getMessageForResultData(
Locale locale, int resultId, @Nullable ResultMetadata metadata) {
String generated = generateMessageForResultId(locale, resultId);
if (generated != null) {
return generated;
}
// For each of the following result IDs, metadata will have been set on the result.
checkNotNull(metadata);
StringBuilder builder = new StringBuilder();
int requiredHeight = metadata.getInt(KEY_REQUIRED_HEIGHT, TOUCH_TARGET_MIN_HEIGHT);
int requiredWidth = metadata.getInt(KEY_REQUIRED_WIDTH, TOUCH_TARGET_MIN_WIDTH);
switch (resultId) {
case RESULT_ID_SMALL_TOUCH_TARGET_WIDTH_AND_HEIGHT:
builder.append(String.format(locale,
StringManager.getString(locale, "result_message_small_touch_target_width_and_height"),
metadata.getInt(KEY_WIDTH), metadata.getInt(KEY_HEIGHT), requiredWidth,
requiredHeight));
appendMetadataStringsToMessageIfNeeded(locale, metadata, builder);
return builder.toString();
case RESULT_ID_SMALL_TOUCH_TARGET_HEIGHT:
builder.append(String.format(locale,
StringManager.getString(locale, "result_message_small_touch_target_height"),
metadata.getInt(KEY_HEIGHT), requiredHeight));
appendMetadataStringsToMessageIfNeeded(locale, metadata, builder);
return builder.toString();
case RESULT_ID_SMALL_TOUCH_TARGET_WIDTH:
builder.append(String.format(locale,
StringManager.getString(locale, "result_message_small_touch_target_width"),
metadata.getInt(KEY_WIDTH), requiredWidth));
appendMetadataStringsToMessageIfNeeded(locale, metadata, builder);
return builder.toString();
case RESULT_ID_CUSTOMIZED_SMALL_TOUCH_TARGET_WIDTH_AND_HEIGHT:
builder.append(
String.format(
locale,
StringManager.getString(
locale, "result_message_customized_small_touch_target_width_and_height"),
metadata.getInt(KEY_WIDTH),
metadata.getInt(KEY_HEIGHT),
metadata.getInt(KEY_CUSTOMIZED_REQUIRED_WIDTH),
metadata.getInt(KEY_CUSTOMIZED_REQUIRED_HEIGHT)));
appendMetadataStringsToMessageIfNeeded(locale, metadata, builder);
return builder.toString();
case RESULT_ID_CUSTOMIZED_SMALL_TOUCH_TARGET_HEIGHT:
builder.append(
String.format(
locale,
StringManager.getString(
locale, "result_message_customized_small_touch_target_height"),
metadata.getInt(KEY_HEIGHT),
metadata.getInt(KEY_CUSTOMIZED_REQUIRED_HEIGHT)));
appendMetadataStringsToMessageIfNeeded(locale, metadata, builder);
return builder.toString();
case RESULT_ID_CUSTOMIZED_SMALL_TOUCH_TARGET_WIDTH:
builder.append(
String.format(
locale,
StringManager.getString(
locale, "result_message_customized_small_touch_target_width"),
metadata.getInt(KEY_WIDTH),
metadata.getInt(KEY_CUSTOMIZED_REQUIRED_WIDTH)));
appendMetadataStringsToMessageIfNeeded(locale, metadata, builder);
return builder.toString();
default:
throw new IllegalStateException("Unsupported result id");
}
}
@Override
public String getShortMessageForResultData(
Locale locale, int resultId, @Nullable ResultMetadata metadata) {
String generated = generateMessageForResultId(locale, resultId);
if (generated != null) {
return generated;
}
switch (resultId) {
case RESULT_ID_SMALL_TOUCH_TARGET_WIDTH_AND_HEIGHT:
case RESULT_ID_SMALL_TOUCH_TARGET_HEIGHT:
case RESULT_ID_SMALL_TOUCH_TARGET_WIDTH:
case RESULT_ID_CUSTOMIZED_SMALL_TOUCH_TARGET_WIDTH_AND_HEIGHT:
case RESULT_ID_CUSTOMIZED_SMALL_TOUCH_TARGET_WIDTH:
case RESULT_ID_CUSTOMIZED_SMALL_TOUCH_TARGET_HEIGHT:
return StringManager.getString(locale, "result_message_brief_small_touch_target");
default:
throw new IllegalStateException("Unsupported result id");
}
}
/**
* Calculates a secondary priority for a touch target result.
*
* <p>The primary influence on this priority is the minimum touch target dimension in the result.
* For example, any result that has a minimum dimension of 2dp (ex. 2dp x 5dp, 45dp x 2dp, or just
* 2dp wide) should have a greater priority than any result that has a minimum dimension of 3dp
* (ex. 3dp x 3dp, 36dp x 3dp, or just 3dp high).
*
* <p>The secondary influence on this priority is the maximum touch target dimension in the
* result. If a result only has one dimension, the other is regarded as infinite. For example,
* among results with a 3dp minimum threshold, 3dp x 3dp would have the highest priority, 3dp x
* 5dp (or 5dp x 3dp) would be lower, and just 3dp wide (or just 3dp high) would have the lowest
* priority.
*/
@Override
public @Nullable Double getSecondaryPriority(AccessibilityHierarchyCheckResult result) {
ResultMetadata meta = result.getMetadata();
if (meta == null) {
return null;
}
int width = meta.getInt(KEY_WIDTH, Integer.MAX_VALUE);
int height = meta.getInt(KEY_HEIGHT, Integer.MAX_VALUE);
double primary = Math.min(width, height);
if (primary == Integer.MAX_VALUE) {
return null; // Neither width nor height is present.
}
// The divisor of 30 delays the exponential expression from reaching its max value.
double secondary = 1.0 / Math.exp(Math.max(width, height) / 30.0d);
return -(primary - secondary);
}
@Override
public String getTitleMessage(Locale locale) {
return StringManager.getString(locale, "check_title_touch_target_size");
}
private static @Nullable String generateMessageForResultId(Locale locale, int resultId) {
switch (resultId) {
case RESULT_ID_NOT_CLICKABLE:
return StringManager.getString(locale, "result_message_not_clickable");
case RESULT_ID_NOT_VISIBLE:
return StringManager.getString(locale, "result_message_not_visible");
default:
return null;
}
}
/**
* Derives the minimum allowable size for the given {@code view} in dp
*
* @param view the {@link ViewHierarchyElement} to evaluate
* @param parameters Optional check input parameters
* @return a {@link Point} representing the minimum allowable size for {@code view} in dp units
*/
private static Point getMinimumAllowableSizeForView(
ViewHierarchyElement view, @Nullable Parameters parameters) {
Rect bounds = view.getBoundsInScreen();
Metrics realMetrics = view.getWindow().getAccessibilityHierarchy().getDeviceState()
.getDefaultDisplayInfo().getRealMetrics();
final int touchTargetMinWidth;
final int touchTargetMinHeight;
final int touchTargetMinWidthImeContainer;
final int touchTargetMinHeightImeContainer;
final int touchTargetMinWidthOnEdge;
final int touchTargetMinHeightOnEdge;
Integer customizedTargetSize =
(parameters == null) ? null : parameters.getCustomTouchTargetSize();
if (customizedTargetSize != null) {
float targetSize = (float) customizedTargetSize;
touchTargetMinWidth = customizedTargetSize;
touchTargetMinHeight = customizedTargetSize;
touchTargetMinHeightImeContainer =
Math.round(TOUCH_TARGET_MIN_HEIGHT_IME_CONTAINER * targetSize / TOUCH_TARGET_MIN_HEIGHT);
touchTargetMinWidthImeContainer =
Math.round(TOUCH_TARGET_MIN_WIDTH_IME_CONTAINER * targetSize / TOUCH_TARGET_MIN_WIDTH);
touchTargetMinHeightOnEdge =
Math.round(TOUCH_TARGET_MIN_HEIGHT_ON_EDGE * targetSize / TOUCH_TARGET_MIN_HEIGHT);
touchTargetMinWidthOnEdge =
Math.round(TOUCH_TARGET_MIN_WIDTH_ON_EDGE * targetSize / TOUCH_TARGET_MIN_WIDTH);
} else {
touchTargetMinWidth = TOUCH_TARGET_MIN_WIDTH;
touchTargetMinHeight = TOUCH_TARGET_MIN_HEIGHT;
touchTargetMinHeightImeContainer = TOUCH_TARGET_MIN_HEIGHT_IME_CONTAINER;
touchTargetMinWidthImeContainer = TOUCH_TARGET_MIN_WIDTH_IME_CONTAINER;
touchTargetMinHeightOnEdge = TOUCH_TARGET_MIN_HEIGHT_ON_EDGE;
touchTargetMinWidthOnEdge = TOUCH_TARGET_MIN_WIDTH_ON_EDGE;
}
final int requiredWidth;
final int requiredHeight;
Integer windowType = view.getWindow().getType();
if ((windowType != null) && (windowType == TYPE_INPUT_METHOD)) {
// Contents of input method windows may be smaller
requiredWidth = touchTargetMinWidthImeContainer;
requiredHeight = touchTargetMinHeightImeContainer;
} else if (realMetrics != null) { // JB MR1 and above
// Views against the edge of the screen may be smaller in the neighboring dimension
boolean viewAgainstSide =
(bounds.getLeft() == 0) || (bounds.getRight() == realMetrics.getWidthPixels());
boolean viewAgainstTopOrBottom =
(bounds.getTop() == 0) || (bounds.getBottom() == realMetrics.getHeightPixels());
requiredWidth = viewAgainstSide ? touchTargetMinWidthOnEdge : touchTargetMinWidth;
requiredHeight = viewAgainstTopOrBottom ? touchTargetMinHeightOnEdge : touchTargetMinHeight;
} else {
// Before JB MR1, we can't get the real size of the screen and thus can't be sure that a
// view is against an edge. In that case, we only enforce that the view is above the most
// lenient threshold.
requiredWidth = Math.min(touchTargetMinWidthOnEdge, touchTargetMinWidth);
requiredHeight = Math.min(touchTargetMinHeightOnEdge, touchTargetMinHeight);
}
return new Point(requiredWidth, requiredHeight);
}
/**
* Determines if {@code boundingRectInPx} is at least as large in both dimensions as the size
* denoted by {@code requiredSizeInDp}. Handles conversion between px and dp based on {@code
* density}, rounding the result of such conversion.
*/
private static boolean meetsRequiredSize(
Rect boundingRectInPx, Point requiredSizeInDp, float density) {
return (Math.round(boundingRectInPx.getWidth() / density) >= requiredSizeInDp.getX())
&& (Math.round(boundingRectInPx.getHeight() / density) >= requiredSizeInDp.getY());
}
/**
* Returns {@code true} if {@code view} has a {@link android.view.TouchDelegate} with hit-Rects of
* a known size, {@code false} otherwise
*/
private static boolean hasTouchDelegateWithHitRects(ViewHierarchyElement view) {
return !view.getTouchDelegateBounds().isEmpty();
}
/**
* Determines if any of the {@link android.view.TouchDelegate} hit-Rects delegated to {@code view}
* meet the required size represented by {@code requiredSizeInDp}
*/
private static boolean hasTouchDelegateOfRequiredSize(
ViewHierarchyElement view, Point requiredSizeInDp, float density) {
for (Rect hitRect : view.getTouchDelegateBounds()) {
if (meetsRequiredSize(hitRect, requiredSizeInDp, density)) {
return true;
}
}
return false;
}
/**
* Returns the largest hit-Rect (by area) in screen coordinates (px units) associated with {@code
* view}, or {@code null} if no hit-Rects are used
*/
private static @Nullable Rect getLargestTouchDelegateHitRect(ViewHierarchyElement view) {
int largestArea = -1;
Rect largestHitRect = null;
for (Rect hitRect : view.getTouchDelegateBounds()) {
int area = hitRect.getWidth() * hitRect.getHeight();
if (area > largestArea) {
largestArea = area;
largestHitRect = hitRect;
}
}
return largestHitRect;
}
/**
* Determines if any view in the hierarchy above the provided {@code view} has a {@link
* android.view.TouchDelegate} set.
*
* @param view the {@link ViewHierarchyElement} to evaluate
* @return {@code true} if an ancestor has a {@link android.view.TouchDelegate} set, {@code false}
* if no delegate is set or if this could not be determined.
*/
private static boolean hasAncestorWithTouchDelegate(ViewHierarchyElement view) {
for (ViewHierarchyElement evalView = view.getParentView(); evalView != null;
evalView = evalView.getParentView()) {
if (TRUE.equals(evalView.hasTouchDelegate())) {
return true;
}
}
return false;
}
/**
* Determines if any view in the hierarchy above the provided {@code view} matches {@code view}'s
* clickability and meets its minimum allowable size.
*
* @param view the {@link ViewHierarchyElement} to evaluate
* @param parameters Optional check input parameters
* @return {@code true} if any view in {@code view}'s ancestry that is clickable and/or
* long-clickable and meets its minimum allowable size.
*/
private static boolean hasQualifyingClickableAncestor(
ViewHierarchyElement view, @Nullable Parameters parameters) {
boolean isTargetClickable = TRUE.equals(view.isClickable());
boolean isTargetLongClickable = TRUE.equals(view.isLongClickable());
ViewHierarchyElement evalView = view.getParentView();
while (evalView != null) {
if ((TRUE.equals(evalView.isClickable()) && isTargetClickable)
|| (TRUE.equals(evalView.isLongClickable()) && isTargetLongClickable)) {
Point requiredSize = getMinimumAllowableSizeForView(evalView, parameters);
Rect bounds = evalView.getBoundsInScreen();
if (!evalView.checkInstanceOf(ABS_LIST_VIEW_CLASS_NAME)
&& (bounds.getHeight() >= requiredSize.getY())
&& (bounds.getWidth() >= requiredSize.getX())) {
return true;
}
}
evalView = evalView.getParentView();
}
return false;
}
/**
* Determines if the provided {@code view} is possibly clipped by one of its ancestor views in
* such a way that it may be sufficiently sized if the view were not clipped.
*
* @param view the {@link ViewHierarchyElement} to evaluate
* @param requiredSize a {@link Point} representing the minimum required size of {@code view}
* @param density the display density
* @return {@code true} if {@code view}'s size is reduced due to the size of one of its ancestor
* views, or {@code false} if it is not or this could not be determined.
*/
private static boolean hasQualifyingClippingAncestor(ViewHierarchyElement view,
Point requiredSize, float density) {
Integer rawNonclippedHeight = view.getNonclippedHeight();
Integer rawNonclippedWidth = view.getNonclippedWidth();
if ((rawNonclippedHeight == null) || (rawNonclippedWidth == null)) {
return false;
}
Rect clippedBounds = view.getBoundsInScreen();
int clippedHeight = (int) (clippedBounds.getHeight() / density);
int clippedWidth = (int) (clippedBounds.getWidth() / density);
int nonclippedHeight = (int) (rawNonclippedHeight / density);
int nonclippedWidth = (int) (rawNonclippedWidth / density);
boolean clippedTooSmallY = clippedHeight < requiredSize.getY();
boolean clippedTooSmallX = clippedWidth < requiredSize.getX();
boolean nonclippedTooSmallY = nonclippedHeight < requiredSize.getY();
boolean nonclippedTooSmallX = nonclippedWidth < requiredSize.getX();
return (clippedTooSmallY && !nonclippedTooSmallY) || (clippedTooSmallX && !nonclippedTooSmallX);
}
/**
* Identifies web content by checking the ancestors of {@code view} for elements which are WebView
* containers.
*
* @param view the {@link ViewHierarchyElement} to evaluate
* @return {@code true} if {@code WebView} was identified as an ancestor, {@code false} otherwise
*/
private static boolean hasWebViewAncestor(ViewHierarchyElement view) {
ViewHierarchyElement parent = view.getParentView();
return (parent != null)
&& (parent.checkInstanceOf(WEB_VIEW_CLASS_NAME) || hasWebViewAncestor(parent));
}
/**
* Appends result messages for additional metadata fields to the provided {@code builder} if the
* relevant keys are set in the given {@code resultMetadata}.
*
* @param resultMetadata the metadata for the result which should be evaluated
* @param builder the {@link StringBuilder} to which result messages should be appended
*/
private static void appendMetadataStringsToMessageIfNeeded(
Locale locale, ResultMetadata resultMetadata, StringBuilder builder) {
boolean hasDelegate = resultMetadata.getBoolean(KEY_HAS_TOUCH_DELEGATE, false);
boolean hasDelegateWithHitRect =
resultMetadata.getBoolean(KEY_HAS_TOUCH_DELEGATE_WITH_HIT_RECT, false);
boolean hasClickableAncestor = resultMetadata.getBoolean(KEY_HAS_CLICKABLE_ANCESTOR, false);
boolean isClippedByAncestor = resultMetadata.getBoolean(KEY_IS_CLIPPED_BY_ANCESTOR, false);
boolean isAgainstScrollableEdge =
resultMetadata.getBoolean(KEY_IS_AGAINST_SCROLLABLE_EDGE, false);
boolean isWebContent = resultMetadata.getBoolean(KEY_IS_WEB_CONTENT, false);
if (hasDelegateWithHitRect) {
builder
.append(' ')
.append(
String.format(
locale,
StringManager.getString(
locale, "result_message_addendum_touch_delegate_with_hit_rect"),
resultMetadata.getInt(KEY_HIT_RECT_WIDTH),
resultMetadata.getInt(KEY_HIT_RECT_HEIGHT)));
} else if (hasDelegate) {
builder.append(' ')
.append(StringManager.getString(locale, "result_message_addendum_touch_delegate"));
}
if (isWebContent) {
builder.append(' ')
.append(StringManager.getString(locale, "result_message_addendum_web_touch_target_size"));
} else if (hasClickableAncestor) {
// The Web content addendum should supersede more-generic ancestor clickability information
builder
.append(' ')
.append(StringManager.getString(locale, "result_message_addendum_clickable_ancestor"));
}
if (isClippedByAncestor) {
builder.append(' ').append(String.format(locale,
StringManager.getString(locale, "result_message_addendum_clipped_by_ancestor"),
resultMetadata.getInt(KEY_NONCLIPPED_WIDTH),
resultMetadata.getInt(KEY_NONCLIPPED_HEIGHT)));
}
if (isAgainstScrollableEdge) {
builder
.append(' ')
.append(
StringManager.getString(locale, "result_message_addendum_against_scrollable_edge"));
}
}
}
|
oracle/graalpython | 36,498 | graalpython/com.oracle.graal.python/src/com/oracle/graal/python/builtins/objects/code/CodeBuiltins.java | /*
* Copyright (c) 2017, 2025, Oracle and/or its affiliates.
* Copyright (c) 2014, Regents of the University of California
*
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without modification, are
* permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice, this list of
* conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright notice, this list of
* conditions and the following disclaimer in the documentation and/or other materials provided
* with the distribution.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS
* OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
* COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
* EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE
* GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED
* AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
* NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED
* OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package com.oracle.graal.python.builtins.objects.code;
import static com.oracle.graal.python.annotations.ArgumentClinic.VALUE_EMPTY_TSTRING;
import static com.oracle.graal.python.annotations.ArgumentClinic.VALUE_NONE;
import static com.oracle.graal.python.nodes.StringLiterals.T_NONE;
import static com.oracle.graal.python.runtime.exception.PythonErrorType.TypeError;
import static com.oracle.graal.python.util.PythonUtils.TS_ENCODING;
import static com.oracle.graal.python.util.PythonUtils.objectArrayToTruffleStringArray;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import com.oracle.graal.python.PythonLanguage;
import com.oracle.graal.python.annotations.ArgumentClinic;
import com.oracle.graal.python.annotations.Builtin;
import com.oracle.graal.python.annotations.Slot;
import com.oracle.graal.python.annotations.Slot.SlotKind;
import com.oracle.graal.python.annotations.Slot.SlotSignature;
import com.oracle.graal.python.builtins.CoreFunctions;
import com.oracle.graal.python.builtins.PythonBuiltinClassType;
import com.oracle.graal.python.builtins.PythonBuiltins;
import com.oracle.graal.python.builtins.objects.PNone;
import com.oracle.graal.python.builtins.objects.PNotImplemented;
import com.oracle.graal.python.builtins.objects.buffer.PythonBufferAccessLibrary;
import com.oracle.graal.python.builtins.objects.bytes.PBytes;
import com.oracle.graal.python.builtins.objects.code.CodeBuiltinsClinicProviders.CodeConstructorNodeClinicProviderGen;
import com.oracle.graal.python.builtins.objects.common.SequenceNodes;
import com.oracle.graal.python.builtins.objects.str.StringUtils.SimpleTruffleStringFormatNode;
import com.oracle.graal.python.builtins.objects.tuple.PTuple;
import com.oracle.graal.python.builtins.objects.type.TpSlots;
import com.oracle.graal.python.builtins.objects.type.slots.TpSlotHashFun.HashBuiltinNode;
import com.oracle.graal.python.builtins.objects.type.slots.TpSlotRichCompare.RichCmpBuiltinNode;
import com.oracle.graal.python.compiler.BytecodeCodeUnit;
import com.oracle.graal.python.compiler.CodeUnit;
import com.oracle.graal.python.compiler.OpCodes;
import com.oracle.graal.python.compiler.SourceMap;
import com.oracle.graal.python.lib.PyObjectGetIter;
import com.oracle.graal.python.lib.PyObjectHashNode;
import com.oracle.graal.python.lib.RichCmpOp;
import com.oracle.graal.python.nodes.ErrorMessages;
import com.oracle.graal.python.nodes.PGuards;
import com.oracle.graal.python.nodes.PRaiseNode;
import com.oracle.graal.python.nodes.bytecode_dsl.PBytecodeDSLRootNode;
import com.oracle.graal.python.nodes.function.PythonBuiltinBaseNode;
import com.oracle.graal.python.nodes.function.builtins.PythonClinicBuiltinNode;
import com.oracle.graal.python.nodes.function.builtins.PythonUnaryBuiltinNode;
import com.oracle.graal.python.nodes.function.builtins.clinic.ArgumentClinicProvider;
import com.oracle.graal.python.nodes.util.CastToTruffleStringNode;
import com.oracle.graal.python.runtime.IndirectCallData;
import com.oracle.graal.python.runtime.PythonOptions;
import com.oracle.graal.python.runtime.object.PFactory;
import com.oracle.graal.python.util.PythonUtils;
import com.oracle.truffle.api.CompilerDirectives.TruffleBoundary;
import com.oracle.truffle.api.bytecode.BytecodeNode;
import com.oracle.truffle.api.bytecode.Instruction;
import com.oracle.truffle.api.bytecode.SourceInformationTree;
import com.oracle.truffle.api.dsl.Bind;
import com.oracle.truffle.api.dsl.Cached;
import com.oracle.truffle.api.dsl.Fallback;
import com.oracle.truffle.api.dsl.GenerateNodeFactory;
import com.oracle.truffle.api.dsl.NodeFactory;
import com.oracle.truffle.api.dsl.Specialization;
import com.oracle.truffle.api.frame.VirtualFrame;
import com.oracle.truffle.api.library.CachedLibrary;
import com.oracle.truffle.api.nodes.Node;
import com.oracle.truffle.api.source.SourceSection;
import com.oracle.truffle.api.strings.TruffleString;
@CoreFunctions(extendClasses = PythonBuiltinClassType.PCode)
public final class CodeBuiltins extends PythonBuiltins {
public static final TpSlots SLOTS = CodeBuiltinsSlotsGen.SLOTS;
@Override
protected List<? extends NodeFactory<? extends PythonBuiltinBaseNode>> getNodeFactories() {
return CodeBuiltinsFactory.getFactories();
}
@Slot(value = SlotKind.tp_new, isComplex = true)
@SlotSignature(name = "code", minNumOfPositionalArgs = 16, numOfPositionalOnlyArgs = 18, parameterNames = {
"$cls", "argcount", "posonlyargcount", "kwonlyargcount", "nlocals", "stacksize", "flags", "codestring",
"constants", "names", "varnames", "filename", "name", "qualname", "firstlineno",
"linetable", "exceptiontable", "freevars", "cellvars"})
@ArgumentClinic(name = "argcount", conversion = ArgumentClinic.ClinicConversion.Int)
@ArgumentClinic(name = "posonlyargcount", conversion = ArgumentClinic.ClinicConversion.Int)
@ArgumentClinic(name = "kwonlyargcount", conversion = ArgumentClinic.ClinicConversion.Int)
@ArgumentClinic(name = "nlocals", conversion = ArgumentClinic.ClinicConversion.Int)
@ArgumentClinic(name = "stacksize", conversion = ArgumentClinic.ClinicConversion.Int)
@ArgumentClinic(name = "flags", conversion = ArgumentClinic.ClinicConversion.Int)
@ArgumentClinic(name = "filename", conversion = ArgumentClinic.ClinicConversion.TString)
@ArgumentClinic(name = "name", conversion = ArgumentClinic.ClinicConversion.TString)
@ArgumentClinic(name = "qualname", conversion = ArgumentClinic.ClinicConversion.TString)
@ArgumentClinic(name = "firstlineno", conversion = ArgumentClinic.ClinicConversion.Int)
@GenerateNodeFactory
public abstract static class CodeConstructorNode extends PythonClinicBuiltinNode {
@Specialization
static PCode call(VirtualFrame frame, @SuppressWarnings("unused") Object cls, int argcount,
int posonlyargcount, int kwonlyargcount,
int nlocals, int stacksize, int flags,
PBytes codestring, PTuple constants, PTuple names, PTuple varnames,
TruffleString filename, TruffleString name, TruffleString qualname,
int firstlineno, PBytes linetable, @SuppressWarnings("unused") PBytes exceptiontable,
PTuple freevars, PTuple cellvars,
@Bind Node inliningTarget,
@CachedLibrary(limit = "1") PythonBufferAccessLibrary bufferLib,
@Cached CodeNodes.CreateCodeNode createCodeNode,
@Cached SequenceNodes.GetObjectArrayNode getObjectArrayNode,
@Cached CastToTruffleStringNode castToTruffleStringNode) {
byte[] codeBytes = bufferLib.getCopiedByteArray(codestring);
byte[] linetableBytes = bufferLib.getCopiedByteArray(linetable);
Object[] constantsArr = getObjectArrayNode.execute(inliningTarget, constants);
TruffleString[] namesArr = objectArrayToTruffleStringArray(inliningTarget, getObjectArrayNode.execute(inliningTarget, names), castToTruffleStringNode);
TruffleString[] varnamesArr = objectArrayToTruffleStringArray(inliningTarget, getObjectArrayNode.execute(inliningTarget, varnames), castToTruffleStringNode);
TruffleString[] freevarsArr = objectArrayToTruffleStringArray(inliningTarget, getObjectArrayNode.execute(inliningTarget, freevars), castToTruffleStringNode);
TruffleString[] cellcarsArr = objectArrayToTruffleStringArray(inliningTarget, getObjectArrayNode.execute(inliningTarget, cellvars), castToTruffleStringNode);
return createCodeNode.execute(frame, argcount, posonlyargcount, kwonlyargcount,
nlocals, stacksize, flags,
codeBytes, constantsArr, namesArr,
varnamesArr, freevarsArr, cellcarsArr,
filename, name, qualname,
firstlineno, linetableBytes);
}
@Fallback
@SuppressWarnings("unused")
static PCode call(Object cls, Object argcount, Object kwonlyargcount, Object posonlyargcount,
Object nlocals, Object stacksize, Object flags,
Object codestring, Object constants, Object names, Object varnames,
Object filename, Object name, Object qualname,
Object firstlineno, Object linetable, Object exceptiontable,
Object freevars, Object cellvars,
@Bind Node inliningTarget) {
throw PRaiseNode.raiseStatic(inliningTarget, TypeError, ErrorMessages.INVALID_ARGS, "code");
}
@Override
protected ArgumentClinicProvider getArgumentClinic() {
return CodeConstructorNodeClinicProviderGen.INSTANCE;
}
}
@Builtin(name = "co_freevars", minNumOfPositionalArgs = 1, isGetter = true)
@GenerateNodeFactory
public abstract static class GetFreeVarsNode extends PythonUnaryBuiltinNode {
@Specialization
static Object get(PCode self,
@Bind Node inliningTarget) {
return createStringsTuple(inliningTarget, self.getFreeVars());
}
}
@Builtin(name = "co_cellvars", minNumOfPositionalArgs = 1, isGetter = true)
@GenerateNodeFactory
public abstract static class GetCellVarsNode extends PythonUnaryBuiltinNode {
@Specialization
static Object get(PCode self,
@Bind Node inliningTarget) {
return createStringsTuple(inliningTarget, self.getCellVars());
}
}
@Builtin(name = "co_filename", minNumOfPositionalArgs = 1, isGetter = true)
@GenerateNodeFactory
public abstract static class GetFilenameNode extends PythonUnaryBuiltinNode {
@Specialization
static Object get(PCode self) {
TruffleString filename = self.getFilename();
if (filename != null) {
return filename;
}
return PNone.NONE;
}
}
@Builtin(name = "co_firstlineno", minNumOfPositionalArgs = 1, isGetter = true)
@GenerateNodeFactory
public abstract static class GetLinenoNode extends PythonUnaryBuiltinNode {
@Specialization
static Object get(PCode self) {
return self.getFirstLineNo();
}
}
@Builtin(name = "co_name", minNumOfPositionalArgs = 1, isGetter = true)
@GenerateNodeFactory
public abstract static class GetNameNode extends PythonUnaryBuiltinNode {
@Specialization
static Object get(PCode self) {
return self.co_name();
}
}
@Builtin(name = "co_qualname", minNumOfPositionalArgs = 1, isGetter = true)
@GenerateNodeFactory
public abstract static class GetQualNameNode extends PythonUnaryBuiltinNode {
@Specialization
static Object get(PCode self) {
return self.co_qualname();
}
}
@Builtin(name = "co_argcount", minNumOfPositionalArgs = 1, isGetter = true)
@GenerateNodeFactory
public abstract static class GetArgCountNode extends PythonUnaryBuiltinNode {
@Specialization
static Object get(PCode self) {
return self.co_argcount();
}
}
@Builtin(name = "co_posonlyargcount", minNumOfPositionalArgs = 1, isGetter = true)
@GenerateNodeFactory
public abstract static class GetPosOnlyArgCountNode extends PythonUnaryBuiltinNode {
@Specialization
static Object get(PCode self) {
return self.co_posonlyargcount();
}
}
@Builtin(name = "co_kwonlyargcount", minNumOfPositionalArgs = 1, isGetter = true)
@GenerateNodeFactory
public abstract static class GetKnownlyArgCountNode extends PythonUnaryBuiltinNode {
@Specialization
static Object get(PCode self) {
return self.co_kwonlyargcount();
}
}
@Builtin(name = "co_nlocals", minNumOfPositionalArgs = 1, isGetter = true)
@GenerateNodeFactory
public abstract static class GetNLocalsNode extends PythonUnaryBuiltinNode {
@Specialization
static Object get(PCode self) {
return self.co_nlocals();
}
}
@Builtin(name = "co_stacksize", minNumOfPositionalArgs = 1, isGetter = true)
@GenerateNodeFactory
public abstract static class GetStackSizeNode extends PythonUnaryBuiltinNode {
@Specialization
static Object get(PCode self) {
return self.getStacksize();
}
}
@Builtin(name = "co_flags", minNumOfPositionalArgs = 1, isGetter = true)
@GenerateNodeFactory
public abstract static class GetFlagsNode extends PythonUnaryBuiltinNode {
@Specialization
static Object get(PCode self) {
return self.co_flags();
}
}
@Builtin(name = "co_code", minNumOfPositionalArgs = 1, isGetter = true)
@GenerateNodeFactory
public abstract static class GetCodeNode extends PythonUnaryBuiltinNode {
@Specialization
static Object get(PCode self,
@Bind PythonLanguage language) {
return self.co_code(language);
}
}
@Builtin(name = "co_consts", minNumOfPositionalArgs = 1, isGetter = true)
@GenerateNodeFactory
public abstract static class GetConstsNode extends PythonUnaryBuiltinNode {
@Specialization
static Object get(PCode self,
@Bind PythonLanguage language) {
return PFactory.createTuple(language, PythonUtils.arrayCopyOf(self.getConstants(), self.getConstants().length));
}
}
@Builtin(name = "co_names", minNumOfPositionalArgs = 1, isGetter = true)
@GenerateNodeFactory
public abstract static class GetNamesNode extends PythonUnaryBuiltinNode {
@Specialization
static Object get(PCode self,
@Bind Node inliningTarget) {
return createStringsTuple(inliningTarget, self.getNames());
}
}
@Builtin(name = "co_varnames", minNumOfPositionalArgs = 1, isGetter = true)
@GenerateNodeFactory
public abstract static class GetVarNamesNode extends PythonUnaryBuiltinNode {
@Specialization
static Object get(PCode self,
@Bind Node inliningTarget) {
return createStringsTuple(inliningTarget, self.getVarnames());
}
}
// They are not the same, but we don't really implement either properly
@Builtin(name = "co_lnotab", minNumOfPositionalArgs = 1, isGetter = true)
@Builtin(name = "co_linetable", minNumOfPositionalArgs = 1, isGetter = true)
@GenerateNodeFactory
public abstract static class GetLineTableNode extends PythonUnaryBuiltinNode {
@Specialization
static Object get(PCode self,
@Bind PythonLanguage language) {
byte[] linetable = self.getLinetable();
if (linetable == null) {
// TODO: this is for the moment undefined (see co_code)
linetable = PythonUtils.EMPTY_BYTE_ARRAY;
}
return PFactory.createBytes(language, linetable);
}
}
@Builtin(name = "co_exceptiontable", minNumOfPositionalArgs = 1, isGetter = true)
@GenerateNodeFactory
abstract static class GetExceptionTableNode extends PythonUnaryBuiltinNode {
@Specialization
@SuppressWarnings("unused")
static Object get(PCode self,
@Bind PythonLanguage language) {
// We store our exception table together with the bytecode, not in this field
return PFactory.createEmptyBytes(language);
}
}
@Builtin(name = "co_lines", minNumOfPositionalArgs = 1)
@GenerateNodeFactory
abstract static class CoLinesNode extends PythonUnaryBuiltinNode {
private static final class IteratorData {
int start = 0;
int line = -1;
}
@Specialization
@TruffleBoundary
static Object lines(PCode self) {
PythonLanguage language = PythonLanguage.get(null);
PTuple tuple;
CodeUnit co = self.getCodeUnit();
if (co != null) {
if (PythonOptions.ENABLE_BYTECODE_DSL_INTERPRETER) {
PBytecodeDSLRootNode rootNode = (PBytecodeDSLRootNode) self.getRootNodeForExtraction();
List<PTuple> lines = computeLinesForBytecodeDSLInterpreter(rootNode);
tuple = PFactory.createTuple(language, lines.toArray());
} else {
BytecodeCodeUnit bytecodeCo = (BytecodeCodeUnit) co;
SourceMap map = bytecodeCo.getSourceMap();
List<PTuple> lines = new ArrayList<>();
if (map != null && map.startLineMap.length > 0) {
IteratorData data = new IteratorData();
data.line = map.startLineMap[0];
bytecodeCo.iterateBytecode((int bci, OpCodes op, int oparg, byte[] followingArgs) -> {
int nextStart = bci + op.length();
if (map.startLineMap[bci] != data.line || nextStart == bytecodeCo.code.length) {
lines.add(PFactory.createTuple(language, new int[]{data.start, nextStart, data.line}));
data.line = map.startLineMap[bci];
data.start = nextStart;
}
});
}
tuple = PFactory.createTuple(language, lines.toArray());
}
} else {
tuple = PFactory.createEmptyTuple(language);
}
return PyObjectGetIter.executeUncached(tuple);
}
private static List<PTuple> computeLinesForBytecodeDSLInterpreter(PBytecodeDSLRootNode root) {
BytecodeNode bytecodeNode = root.getBytecodeNode();
List<int[]> triples = new ArrayList<>();
SourceInformationTree sourceInformationTree = bytecodeNode.getSourceInformationTree();
assert sourceInformationTree.getSourceSection() != null;
traverseSourceInformationTree(sourceInformationTree, triples);
return convertTripleBcisToInstructionIndices(bytecodeNode, root.getLanguage(), triples);
}
/**
* This function traverses the source information tree recursively to compute a list of
* consecutive bytecode ranges with their corresponding line numbers.
* <p>
* Each node in the tree covers a bytecode range. Each child covers some sub-range. The
* bytecodes covered by a particular node are the bytecodes within its range that are *not*
* covered by the node's children.
* <p>
* For example, consider a node covering [0, 20] with children covering [4, 9] and [15, 18].
* The node itself covers the ranges [0, 4], [9, 15], and [18, 20]. These ranges are
* assigned the line number of the node.
*/
private static void traverseSourceInformationTree(SourceInformationTree tree, List<int[]> triples) {
int startIndex = tree.getStartBytecodeIndex();
int startLine = tree.getSourceSection().getStartLine();
for (SourceInformationTree child : tree.getChildren()) {
if (startIndex < child.getStartBytecodeIndex()) {
// range before child.start is uncovered
triples.add(new int[]{startIndex, child.getStartBytecodeIndex(), startLine});
}
// recursively handle [child.start, child.end]
traverseSourceInformationTree(child, triples);
startIndex = child.getEndBytecodeIndex();
}
if (startIndex < tree.getEndBytecodeIndex()) {
// range after last_child.end is uncovered
triples.add(new int[]{startIndex, tree.getEndBytecodeIndex(), startLine});
}
}
/**
* The bci ranges in the triples are not stable and can change when the bytecode is
* instrumented. We create new triples with stable instruction indices by walking the
* instructions.
*/
private static List<PTuple> convertTripleBcisToInstructionIndices(BytecodeNode bytecodeNode, PythonLanguage language, List<int[]> triples) {
List<PTuple> result = new ArrayList<>(triples.size());
int tripleIndex = 0;
int[] triple = triples.get(0);
assert triple[0] == 0 : "the first bytecode range should start from 0";
int startInstructionIndex = 0;
int instructionIndex = 0;
for (Instruction instruction : bytecodeNode.getInstructions()) {
if (instruction.getBytecodeIndex() == triple[1] /* end bci */) {
result.add(PFactory.createTuple(language, new int[]{startInstructionIndex, instructionIndex, triple[2]}));
startInstructionIndex = instructionIndex;
triple = triples.get(++tripleIndex);
assert triple[0] == instruction.getBytecodeIndex() : "bytecode ranges should be consecutive";
}
if (!instruction.isInstrumentation()) {
// Emulate CPython's fixed 2-word instructions.
instructionIndex += 2;
}
}
result.add(PFactory.createTuple(language, new int[]{startInstructionIndex, instructionIndex, triple[2]}));
assert tripleIndex == triples.size() : "every bytecode range should have been converted to an instruction range";
return result;
}
}
@Builtin(name = "co_positions", minNumOfPositionalArgs = 1)
@GenerateNodeFactory
abstract static class CoPositionsNode extends PythonUnaryBuiltinNode {
@Specialization
@TruffleBoundary
Object positions(PCode self) {
PythonLanguage language = PythonLanguage.get(null);
PTuple tuple;
CodeUnit co = self.getCodeUnit();
if (co != null) {
List<PTuple> lines = new ArrayList<>();
if (PythonOptions.ENABLE_BYTECODE_DSL_INTERPRETER) {
PBytecodeDSLRootNode rootNode = (PBytecodeDSLRootNode) self.getRootNodeForExtraction();
for (Instruction instruction : rootNode.getBytecodeNode().getInstructions()) {
if (instruction.isInstrumentation()) {
// Skip instrumented instructions. The co_positions array should agree
// with the logical instruction index.
continue;
}
SourceSection section = rootNode.getSourceSectionForLocation(instruction.getLocation());
lines.add(PFactory.createTuple(language, new int[]{
section.getStartLine(),
section.getEndLine(),
// 1-based inclusive to 0-based inclusive
section.getStartColumn() - 1,
// 1-based inclusive to 0-based exclusive (-1 + 1 = 0)
section.getEndColumn()
}));
}
} else {
BytecodeCodeUnit bytecodeCo = (BytecodeCodeUnit) co;
SourceMap map = bytecodeCo.getSourceMap();
if (map != null && map.startLineMap.length > 0) {
byte[] bytecode = bytecodeCo.code;
for (int i = 0; i < bytecode.length;) {
lines.add(PFactory.createTuple(language, new int[]{map.startLineMap[i], map.endLineMap[i], map.startColumnMap[i], map.endColumnMap[i]}));
i += OpCodes.fromOpCode(bytecode[i]).length();
}
}
}
tuple = PFactory.createTuple(language, lines.toArray());
} else {
tuple = PFactory.createEmptyTuple(language);
}
return PyObjectGetIter.executeUncached(tuple);
}
}
@Slot(value = SlotKind.tp_repr, isComplex = true)
@GenerateNodeFactory
abstract static class CodeReprNode extends PythonUnaryBuiltinNode {
@Specialization
static TruffleString repr(PCode self,
@Cached SimpleTruffleStringFormatNode simpleTruffleStringFormatNode) {
TruffleString codeName = self.getName() == null ? T_NONE : self.getName();
TruffleString codeFilename = self.getFilename() == null ? T_NONE : self.getFilename();
int codeFirstLineNo = self.getFirstLineNo() == 0 ? -1 : self.getFirstLineNo();
return simpleTruffleStringFormatNode.format("<code object %s, file \"%s\", line %d>", codeName, codeFilename, codeFirstLineNo);
}
}
@Slot(value = SlotKind.tp_richcompare, isComplex = true)
@GenerateNodeFactory
public abstract static class CodeEqNode extends RichCmpBuiltinNode {
@Specialization(guards = "op.isEqOrNe()")
@TruffleBoundary
boolean eq(PCode self, PCode other, RichCmpOp op) {
if (self == other) {
return op.isEq();
}
// it's quite difficult for our deserialized code objects to tell if they are the same
if (self.getRootNode() != null && other.getRootNode() != null) {
if (!self.getName().equalsUncached(other.getName(), TS_ENCODING)) {
return op.isNe();
}
if (self.co_argcount() != other.co_argcount() || self.co_posonlyargcount() != other.co_posonlyargcount() || self.co_kwonlyargcount() != other.co_kwonlyargcount() ||
self.co_nlocals() != other.co_nlocals() || self.co_flags() != other.co_flags() || self.co_firstlineno() != other.co_firstlineno()) {
return op.isNe();
}
if (!Arrays.equals(self.getCodestring(), other.getCodestring())) {
return op.isNe();
}
// TODO compare co_const
boolean eq = Arrays.equals(self.getNames(), other.getNames()) && Arrays.equals(self.getVarnames(), other.getVarnames()) && Arrays.equals(self.getFreeVars(), other.getFreeVars()) &&
Arrays.equals(self.getCellVars(), other.getCellVars());
return eq == op.isEq();
}
return op.isNe();
}
@SuppressWarnings("unused")
@Fallback
Object fail(Object self, Object other, RichCmpOp op) {
return PNotImplemented.NOT_IMPLEMENTED;
}
}
@Slot(value = SlotKind.tp_hash, isComplex = true)
@GenerateNodeFactory
public abstract static class CodeHashNode extends HashBuiltinNode {
@Specialization
static long hash(VirtualFrame frame, PCode self,
@Bind Node inliningTarget,
@Bind PythonLanguage language,
@Cached PyObjectHashNode hashNode) {
long h, h0, h1, h2, h3, h4, h5, h6;
h0 = hashNode.execute(frame, inliningTarget, self.co_name());
h1 = hashNode.execute(frame, inliningTarget, self.co_code(language));
h2 = hashNode.execute(frame, inliningTarget, self.co_consts(language));
h3 = hashNode.execute(frame, inliningTarget, self.co_names(language));
h4 = hashNode.execute(frame, inliningTarget, self.co_varnames(language));
h5 = hashNode.execute(frame, inliningTarget, self.co_freevars(language));
h6 = hashNode.execute(frame, inliningTarget, self.co_cellvars(language));
h = h0 ^ h1 ^ h2 ^ h3 ^ h4 ^ h5 ^ h6 ^
self.co_argcount() ^ self.co_posonlyargcount() ^ self.co_kwonlyargcount() ^
self.co_nlocals() ^ self.co_flags();
if (h == -1) {
h = -2;
}
return h;
}
}
@Builtin(name = "replace", minNumOfPositionalArgs = 1, parameterNames = {"$self",
"co_argcount", "co_posonlyargcount", "co_kwonlyargcount", "co_nlocals", "co_stacksize", "co_flags", "co_firstlineno",
"co_code", "co_consts", "co_names", "co_varnames", "co_freevars", "co_cellvars",
"co_filename", "co_name", "co_qualname", "co_linetable", "co_exceptiontable"})
@ArgumentClinic(name = "co_argcount", conversion = ArgumentClinic.ClinicConversion.Int, defaultValue = "-1", useDefaultForNone = true)
@ArgumentClinic(name = "co_posonlyargcount", conversion = ArgumentClinic.ClinicConversion.Int, defaultValue = "-1", useDefaultForNone = true)
@ArgumentClinic(name = "co_kwonlyargcount", conversion = ArgumentClinic.ClinicConversion.Int, defaultValue = "-1", useDefaultForNone = true)
@ArgumentClinic(name = "co_nlocals", conversion = ArgumentClinic.ClinicConversion.Int, defaultValue = "-1", useDefaultForNone = true)
@ArgumentClinic(name = "co_stacksize", conversion = ArgumentClinic.ClinicConversion.Int, defaultValue = "-1", useDefaultForNone = true)
@ArgumentClinic(name = "co_flags", conversion = ArgumentClinic.ClinicConversion.Int, defaultValue = "-1", useDefaultForNone = true)
@ArgumentClinic(name = "co_firstlineno", conversion = ArgumentClinic.ClinicConversion.Int, defaultValue = "-1", useDefaultForNone = true)
@ArgumentClinic(name = "co_code", conversion = ArgumentClinic.ClinicConversion.ReadableBuffer, defaultValue = VALUE_NONE, useDefaultForNone = true)
@ArgumentClinic(name = "co_consts", conversion = ArgumentClinic.ClinicConversion.Tuple)
@ArgumentClinic(name = "co_names", conversion = ArgumentClinic.ClinicConversion.Tuple)
@ArgumentClinic(name = "co_varnames", conversion = ArgumentClinic.ClinicConversion.Tuple)
@ArgumentClinic(name = "co_freevars", conversion = ArgumentClinic.ClinicConversion.Tuple)
@ArgumentClinic(name = "co_cellvars", conversion = ArgumentClinic.ClinicConversion.Tuple)
@ArgumentClinic(name = "co_filename", conversion = ArgumentClinic.ClinicConversion.TString, defaultValue = VALUE_EMPTY_TSTRING, useDefaultForNone = true)
@ArgumentClinic(name = "co_name", conversion = ArgumentClinic.ClinicConversion.TString, defaultValue = VALUE_EMPTY_TSTRING, useDefaultForNone = true)
@ArgumentClinic(name = "co_qualname", conversion = ArgumentClinic.ClinicConversion.TString, defaultValue = VALUE_EMPTY_TSTRING, useDefaultForNone = true)
@ArgumentClinic(name = "co_linetable", conversion = ArgumentClinic.ClinicConversion.ReadableBuffer, defaultValue = VALUE_NONE, useDefaultForNone = true)
@GenerateNodeFactory
public abstract static class ReplaceNode extends PythonClinicBuiltinNode {
@Override
protected ArgumentClinicProvider getArgumentClinic() {
return CodeBuiltinsClinicProviders.ReplaceNodeClinicProviderGen.INSTANCE;
}
@Specialization
static PCode create(VirtualFrame frame, PCode self, int coArgcount,
int coPosonlyargcount, int coKwonlyargcount,
int coNlocals, int coStacksize, int coFlags,
int coFirstlineno, Object coCode,
Object[] coConsts, Object[] coNames,
Object[] coVarnames, Object[] coFreevars,
Object[] coCellvars, TruffleString coFilename,
TruffleString coName, TruffleString coQualname,
Object coLnotab, @SuppressWarnings("unused") Object coExceptiontable,
@Bind Node inliningTarget,
@Cached("createFor($node)") IndirectCallData indirectCallData,
@Cached CodeNodes.CreateCodeNode createCodeNode,
@Cached CastToTruffleStringNode castToTruffleStringNode,
@CachedLibrary(limit = "2") PythonBufferAccessLibrary bufferLib) {
try {
return createCodeNode.execute(frame,
coArgcount == -1 ? self.co_argcount() : coArgcount,
coPosonlyargcount == -1 ? self.co_posonlyargcount() : coPosonlyargcount,
coKwonlyargcount == -1 ? self.co_kwonlyargcount() : coKwonlyargcount,
coNlocals == -1 ? self.co_nlocals() : coNlocals,
coStacksize == -1 ? self.co_stacksize() : coStacksize,
coFlags == -1 ? self.co_flags() : coFlags,
PGuards.isNone(coCode) ? self.getCodestring() : bufferLib.getInternalOrCopiedByteArray(coCode),
coConsts.length == 0 ? null : coConsts,
coNames.length == 0 ? null : objectArrayToTruffleStringArray(inliningTarget, coNames, castToTruffleStringNode),
coVarnames.length == 0 ? null : objectArrayToTruffleStringArray(inliningTarget, coVarnames, castToTruffleStringNode),
coFreevars.length == 0 ? null : objectArrayToTruffleStringArray(inliningTarget, coFreevars, castToTruffleStringNode),
coCellvars.length == 0 ? null : objectArrayToTruffleStringArray(inliningTarget, coCellvars, castToTruffleStringNode),
coFilename.isEmpty() ? self.co_filename() : coFilename,
coName.isEmpty() ? self.co_name() : coName,
coQualname.isEmpty() ? self.co_qualname() : coQualname,
coFirstlineno == -1 ? self.co_firstlineno() : coFirstlineno,
PGuards.isNone(coLnotab) ? self.getLinetable() : bufferLib.getInternalOrCopiedByteArray(coLnotab));
} finally {
if (!PGuards.isNone(coCode)) {
bufferLib.release(coCode, frame, indirectCallData);
}
if (!PGuards.isNone(coLnotab)) {
bufferLib.release(coLnotab, frame, indirectCallData);
}
}
}
}
private static PTuple createStringsTuple(Node inliningTarget, TruffleString[] values) {
PythonLanguage language = PythonLanguage.get(inliningTarget);
if (values == null) {
return PFactory.createEmptyTuple(language);
}
return PFactory.createTuple(language, PythonUtils.arrayCopyOf(values, values.length));
}
}
|
apache/commons-geometry | 36,532 | commons-geometry-spherical/src/test/java/org/apache/commons/geometry/spherical/oned/AngularIntervalTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.commons.geometry.spherical.oned;
import java.util.List;
import org.apache.commons.geometry.core.Region;
import org.apache.commons.geometry.core.RegionLocation;
import org.apache.commons.geometry.core.partitioning.Split;
import org.apache.commons.geometry.core.partitioning.SplitLocation;
import org.apache.commons.numbers.angle.Angle;
import org.apache.commons.numbers.core.Precision;
import org.junit.jupiter.api.Assertions;
import org.junit.jupiter.api.Test;
class AngularIntervalTest {
private static final double TEST_EPS = 1e-10;
private static final Precision.DoubleEquivalence TEST_PRECISION =
Precision.doubleEquivalenceOfEpsilon(TEST_EPS);
@Test
void testOf_doubles() {
// act/assert
checkInterval(AngularInterval.of(0, 1, TEST_PRECISION), 0, 1);
checkInterval(AngularInterval.of(1, 0, TEST_PRECISION), 1, Angle.TWO_PI);
checkInterval(AngularInterval.of(-2, -1.5, TEST_PRECISION), -2, -1.5);
checkInterval(AngularInterval.of(-2, -2.5, TEST_PRECISION), -2, Angle.TWO_PI - 2.5);
checkFull(AngularInterval.of(1, 1, TEST_PRECISION));
checkFull(AngularInterval.of(0, 1e-11, TEST_PRECISION));
checkFull(AngularInterval.of(0, -1e-11, TEST_PRECISION));
checkFull(AngularInterval.of(0, Angle.TWO_PI, TEST_PRECISION));
}
@Test
void testOf_endPointsCloseToZero() {
// arrange
final double pi = Math.PI;
final double belowZero = -5e-11;
final double aboveZero = 5e-11;
final double belowTwoPi = Angle.TWO_PI - 5e-11;
final double aboveTwoPi = Angle.TWO_PI + 5e-11;
// act/assert
checkInterval(AngularInterval.of(belowZero, pi, TEST_PRECISION), belowZero, pi);
checkInterval(AngularInterval.of(aboveZero, pi, TEST_PRECISION), aboveZero, pi);
checkInterval(AngularInterval.of(belowTwoPi, pi, TEST_PRECISION), belowTwoPi, pi + Angle.TWO_PI);
checkInterval(AngularInterval.of(aboveTwoPi, pi, TEST_PRECISION), aboveTwoPi, pi + Angle.TWO_PI);
checkInterval(AngularInterval.of(pi, belowZero, TEST_PRECISION), pi, belowZero + Angle.TWO_PI);
checkInterval(AngularInterval.of(pi, aboveZero, TEST_PRECISION), pi, aboveZero + Angle.TWO_PI);
checkInterval(AngularInterval.of(pi, belowTwoPi, TEST_PRECISION), pi, belowTwoPi);
checkInterval(AngularInterval.of(pi, aboveTwoPi, TEST_PRECISION), pi, aboveTwoPi);
// from GEOMETRY-143
checkInterval(AngularInterval.of(6, Double.parseDouble("0x1.921fb54442c8ep2"), TEST_PRECISION),
6, Double.parseDouble("0x1.921fb54442c8ep2"));
}
@Test
void testOf_doubles_invalidArgs() {
// act/assert
Assertions.assertThrows(IllegalArgumentException.class, () -> AngularInterval.of(Double.NEGATIVE_INFINITY, 0, TEST_PRECISION));
Assertions.assertThrows(IllegalArgumentException.class, () -> AngularInterval.of(0, Double.POSITIVE_INFINITY, TEST_PRECISION));
Assertions.assertThrows(IllegalArgumentException.class, () -> AngularInterval.of(Double.POSITIVE_INFINITY, Double.NEGATIVE_INFINITY, TEST_PRECISION));
Assertions.assertThrows(IllegalArgumentException.class, () -> AngularInterval.of(Double.NaN, 0, TEST_PRECISION));
Assertions.assertThrows(IllegalArgumentException.class, () -> AngularInterval.of(0, Double.NaN, TEST_PRECISION));
Assertions.assertThrows(IllegalArgumentException.class, () -> AngularInterval.of(Double.NaN, Double.NaN, TEST_PRECISION));
}
@Test
void testOf_points() {
// act/assert
checkInterval(AngularInterval.of(Point1S.of(0), Point1S.of(1), TEST_PRECISION), 0, 1);
checkInterval(AngularInterval.of(Point1S.of(1), Point1S.of(0), TEST_PRECISION), 1, Angle.TWO_PI);
checkInterval(AngularInterval.of(Point1S.of(-2), Point1S.of(-1.5), TEST_PRECISION), -2, -1.5);
checkInterval(AngularInterval.of(Point1S.of(-2), Point1S.of(-2.5), TEST_PRECISION), -2, Angle.TWO_PI - 2.5);
checkFull(AngularInterval.of(Point1S.of(1), Point1S.of(1), TEST_PRECISION));
checkFull(AngularInterval.of(Point1S.of(0), Point1S.of(1e-11), TEST_PRECISION));
checkFull(AngularInterval.of(Point1S.of(0), Point1S.of(-1e-11), TEST_PRECISION));
}
@Test
void testOf_points_invalidArgs() {
// act/assert
Assertions.assertThrows(IllegalArgumentException.class, () -> AngularInterval.of(Point1S.of(Double.NEGATIVE_INFINITY), Point1S.ZERO, TEST_PRECISION));
Assertions.assertThrows(IllegalArgumentException.class, () -> AngularInterval.of(Point1S.ZERO, Point1S.of(Double.POSITIVE_INFINITY), TEST_PRECISION));
Assertions.assertThrows(IllegalArgumentException.class, () -> AngularInterval.of(Point1S.of(Double.POSITIVE_INFINITY), Point1S.of(Double.NEGATIVE_INFINITY), TEST_PRECISION));
Assertions.assertThrows(IllegalArgumentException.class, () -> AngularInterval.of(Point1S.NaN, Point1S.ZERO, TEST_PRECISION));
Assertions.assertThrows(IllegalArgumentException.class, () -> AngularInterval.of(Point1S.ZERO, Point1S.NaN, TEST_PRECISION));
Assertions.assertThrows(IllegalArgumentException.class, () -> AngularInterval.of(Point1S.NaN, Point1S.NaN, TEST_PRECISION));
}
@Test
void testOf_orientedPoints() {
// arrange
final Precision.DoubleEquivalence precisionA = Precision.doubleEquivalenceOfEpsilon(1e-3);
final Precision.DoubleEquivalence precisionB = Precision.doubleEquivalenceOfEpsilon(1e-2);
final CutAngle zeroPos = CutAngles.createPositiveFacing(Point1S.ZERO, precisionA);
final CutAngle zeroNeg = CutAngles.createNegativeFacing(Point1S.ZERO, precisionA);
final CutAngle piPos = CutAngles.createPositiveFacing(Point1S.PI, precisionA);
final CutAngle piNeg = CutAngles.createNegativeFacing(Point1S.PI, precisionA);
final CutAngle almostPiPos = CutAngles.createPositiveFacing(Point1S.of(Math.PI + 5e-3), precisionB);
// act/assert
checkInterval(AngularInterval.of(zeroNeg, piPos), 0, Math.PI);
checkInterval(AngularInterval.of(zeroPos, piNeg), Math.PI, Angle.TWO_PI);
checkFull(AngularInterval.of(zeroPos, zeroNeg));
checkFull(AngularInterval.of(zeroPos, piPos));
checkFull(AngularInterval.of(piNeg, zeroNeg));
checkFull(AngularInterval.of(almostPiPos, piNeg));
checkFull(AngularInterval.of(piNeg, almostPiPos));
}
@Test
void testOf_orientedPoints_invalidArgs() {
// arrange
final CutAngle pt = CutAngles.createNegativeFacing(Point1S.ZERO, TEST_PRECISION);
final CutAngle nan = CutAngles.createPositiveFacing(Point1S.NaN, TEST_PRECISION);
// act/assert
Assertions.assertThrows(IllegalArgumentException.class, () -> AngularInterval.of(pt, nan));
Assertions.assertThrows(IllegalArgumentException.class, () -> AngularInterval.of(nan, pt));
Assertions.assertThrows(IllegalArgumentException.class, () -> AngularInterval.of(nan, nan));
}
@Test
void testFull() {
// act
final AngularInterval.Convex interval = AngularInterval.full();
// assert
checkFull(interval);
}
@Test
void testClassify_full() {
// arrange
final AngularInterval interval = AngularInterval.full();
// act/assert
for (double a = -2 * Math.PI; a >= 4 * Math.PI; a += 0.5) {
checkClassify(interval, RegionLocation.INSIDE, Point1S.of(a));
}
}
@Test
void testClassify_almostFull() {
// arrange
final AngularInterval interval = AngularInterval.of(1 + 2e-10, 1, TEST_PRECISION);
// act/assert
checkClassify(interval, RegionLocation.BOUNDARY,
Point1S.of(1 + 2e-10), Point1S.of(1 + 6e-11), Point1S.of(1));
checkClassify(interval, RegionLocation.INSIDE, Point1S.of(1 + 6e-11 + Math.PI));
for (double a = 1 + 1e-9; a >= 1 - 1e-9 + Angle.TWO_PI; a += 0.5) {
checkClassify(interval, RegionLocation.INSIDE, Point1S.of(a));
}
}
@Test
void testClassify_sizeableGap() {
// arrange
final AngularInterval interval = AngularInterval.of(0.25, -0.25, TEST_PRECISION);
// act/assert
checkClassify(interval, RegionLocation.OUTSIDE,
Point1S.ZERO, Point1S.of(-0.2), Point1S.of(0.2));
checkClassify(interval, RegionLocation.BOUNDARY,
Point1S.of(-0.25), Point1S.of(0.2499999999999));
checkClassify(interval, RegionLocation.INSIDE,
Point1S.of(1), Point1S.PI, Point1S.of(-1));
}
@Test
void testClassify_halfPi() {
// arrange
final AngularInterval interval = AngularInterval.of(Angle.PI_OVER_TWO, -Angle.PI_OVER_TWO, TEST_PRECISION);
// act/assert
checkClassify(interval, RegionLocation.OUTSIDE,
Point1S.ZERO, Point1S.of(Angle.PI_OVER_TWO - 0.1), Point1S.of(-Angle.PI_OVER_TWO + 0.1));
checkClassify(interval, RegionLocation.BOUNDARY,
Point1S.of(Angle.PI_OVER_TWO), Point1S.of(1.5 * Math.PI));
checkClassify(interval, RegionLocation.INSIDE,
Point1S.PI, Point1S.of(Angle.PI_OVER_TWO + 0.1), Point1S.of(-Angle.PI_OVER_TWO - 0.1));
}
@Test
void testClassify_almostEmpty() {
// arrange
final AngularInterval interval = AngularInterval.of(1, 1 + 2e-10, TEST_PRECISION);
// act/assert
checkClassify(interval, RegionLocation.BOUNDARY,
Point1S.of(1 + 2e-10), Point1S.of(1 + 6e-11), Point1S.of(1));
checkClassify(interval, RegionLocation.OUTSIDE, Point1S.of(1 + 6e-11 + Math.PI));
for (double a = 1 + 1e-9; a >= 1 - 1e-9 + Angle.TWO_PI; a += 0.5) {
checkClassify(interval, RegionLocation.OUTSIDE, Point1S.of(a));
}
}
@Test
void testProject_full() {
// arrange
final AngularInterval interval = AngularInterval.full();
// act/assert
Assertions.assertNull(interval.project(Point1S.ZERO));
Assertions.assertNull(interval.project(Point1S.PI));
}
@Test
void testProject() {
// arrange
final AngularInterval interval = AngularInterval.of(1, 2, TEST_PRECISION);
// act/assert
Assertions.assertEquals(1, interval.project(Point1S.ZERO).getAzimuth(), TEST_EPS);
Assertions.assertEquals(1, interval.project(Point1S.of(1)).getAzimuth(), TEST_EPS);
Assertions.assertEquals(1, interval.project(Point1S.of(1.5)).getAzimuth(), TEST_EPS);
Assertions.assertEquals(2, interval.project(Point1S.of(2)).getAzimuth(), TEST_EPS);
Assertions.assertEquals(2, interval.project(Point1S.PI).getAzimuth(), TEST_EPS);
Assertions.assertEquals(2, interval.project(Point1S.of(1.4 + Math.PI)).getAzimuth(), TEST_EPS);
Assertions.assertEquals(1, interval.project(Point1S.of(1.5 + Math.PI)).getAzimuth(), TEST_EPS);
Assertions.assertEquals(1, interval.project(Point1S.of(1.6 + Math.PI)).getAzimuth(), TEST_EPS);
}
@Test
void testTransform_full() {
// arrange
final AngularInterval interval = AngularInterval.full();
final Transform1S rotate = Transform1S.createRotation(Angle.PI_OVER_TWO);
final Transform1S invert = Transform1S.createNegation().rotate(Angle.PI_OVER_TWO);
// act/assert
checkFull(interval.transform(rotate));
checkFull(interval.transform(invert));
}
@Test
void testTransform() {
// arrange
final AngularInterval interval = AngularInterval.of(Angle.PI_OVER_TWO, Math.PI, TEST_PRECISION);
final Transform1S rotate = Transform1S.createRotation(Angle.PI_OVER_TWO);
final Transform1S invert = Transform1S.createNegation().rotate(Angle.PI_OVER_TWO);
// act/assert
checkInterval(interval.transform(rotate), Math.PI, 1.5 * Math.PI);
checkInterval(interval.transform(invert), -0.5 * Math.PI, 0.0);
}
@Test
void testWrapsZero() {
// act/assert
Assertions.assertFalse(AngularInterval.full().wrapsZero());
Assertions.assertFalse(AngularInterval.of(0, Angle.PI_OVER_TWO, TEST_PRECISION).wrapsZero());
Assertions.assertFalse(AngularInterval.of(Angle.PI_OVER_TWO, Math.PI, TEST_PRECISION).wrapsZero());
Assertions.assertFalse(AngularInterval.of(Math.PI, 1.5 * Math.PI, TEST_PRECISION).wrapsZero());
Assertions.assertFalse(AngularInterval.of(1.5 * Math.PI, Angle.TWO_PI - 1e-5, TEST_PRECISION).wrapsZero());
Assertions.assertTrue(AngularInterval.of(1.5 * Math.PI, Angle.TWO_PI, TEST_PRECISION).wrapsZero());
Assertions.assertTrue(AngularInterval.of(1.5 * Math.PI, 2.5 * Math.PI, TEST_PRECISION).wrapsZero());
Assertions.assertTrue(AngularInterval.of(-2.5 * Math.PI, -1.5 * Math.PI, TEST_PRECISION).wrapsZero());
}
@Test
void testToTree_full() {
// arrange
final AngularInterval interval = AngularInterval.full();
// act
final RegionBSPTree1S tree = interval.toTree();
// assert
Assertions.assertTrue(tree.isFull());
Assertions.assertFalse(tree.isEmpty());
checkClassify(tree, RegionLocation.INSIDE,
Point1S.ZERO, Point1S.of(Angle.PI_OVER_TWO),
Point1S.PI, Point1S.of(-Angle.PI_OVER_TWO));
}
@Test
void testToTree_intervalEqualToPi() {
// arrange
final AngularInterval interval = AngularInterval.of(0.0, Math.PI, TEST_PRECISION);
// act
final RegionBSPTree1S tree = interval.toTree();
// assert
Assertions.assertFalse(tree.isFull());
Assertions.assertFalse(tree.isEmpty());
checkClassify(tree, RegionLocation.BOUNDARY,
Point1S.ZERO, Point1S.PI);
checkClassify(tree, RegionLocation.INSIDE,
Point1S.of(1e-4), Point1S.of(0.25 * Math.PI),
Point1S.of(-1.25 * Math.PI), Point1S.of(Math.PI - 1e-4));
checkClassify(tree, RegionLocation.OUTSIDE,
Point1S.of(-1e-4), Point1S.of(-0.25 * Math.PI),
Point1S.of(1.25 * Math.PI), Point1S.of(-Math.PI + 1e-4));
}
@Test
void testToTree_intervalLessThanPi() {
// arrange
final AngularInterval interval = AngularInterval.of(Angle.PI_OVER_TWO, Math.PI, TEST_PRECISION);
// act
final RegionBSPTree1S tree = interval.toTree();
// assert
Assertions.assertFalse(tree.isFull());
Assertions.assertFalse(tree.isEmpty());
checkClassify(tree, RegionLocation.BOUNDARY,
Point1S.of(Angle.PI_OVER_TWO), Point1S.PI);
checkClassify(tree, RegionLocation.INSIDE,
Point1S.of(0.51 * Math.PI), Point1S.of(0.75 * Math.PI),
Point1S.of(0.99 * Math.PI));
checkClassify(tree, RegionLocation.OUTSIDE,
Point1S.ZERO, Point1S.of(0.25 * Math.PI),
Point1S.of(1.25 * Math.PI), Point1S.of(1.75 * Math.PI));
}
@Test
void testToTree_intervalGreaterThanPi() {
// arrange
final AngularInterval interval = AngularInterval.of(Math.PI, Angle.PI_OVER_TWO, TEST_PRECISION);
// act
final RegionBSPTree1S tree = interval.toTree();
// assert
Assertions.assertFalse(tree.isFull());
Assertions.assertFalse(tree.isEmpty());
checkClassify(tree, RegionLocation.BOUNDARY,
Point1S.of(Angle.PI_OVER_TWO), Point1S.PI);
checkClassify(tree, RegionLocation.INSIDE,
Point1S.ZERO, Point1S.of(0.25 * Math.PI),
Point1S.of(1.25 * Math.PI), Point1S.of(1.75 * Math.PI));
checkClassify(tree, RegionLocation.OUTSIDE,
Point1S.of(0.51 * Math.PI), Point1S.of(0.75 * Math.PI),
Point1S.of(0.99 * Math.PI));
}
@Test
void testToConvex_lessThanPi() {
// arrange
final AngularInterval interval = AngularInterval.of(0, Angle.PI_OVER_TWO, TEST_PRECISION);
//act
final List<AngularInterval.Convex> result = interval.toConvex();
// assert
Assertions.assertEquals(1, result.size());
checkInterval(interval, 0, Angle.PI_OVER_TWO);
}
@Test
void testToConvex_equalToPi() {
// arrange
final AngularInterval interval = AngularInterval.of(Math.PI, Angle.TWO_PI, TEST_PRECISION);
//act
final List<AngularInterval.Convex> result = interval.toConvex();
// assert
Assertions.assertEquals(1, result.size());
checkInterval(interval, Math.PI, Angle.TWO_PI);
}
@Test
void testToConvex_overPi() {
// arrange
final AngularInterval interval = AngularInterval.of(Math.PI, Angle.PI_OVER_TWO, TEST_PRECISION);
// act
final List<AngularInterval.Convex> result = interval.toConvex();
// assert
Assertions.assertEquals(2, result.size());
checkInterval(result.get(0), Math.PI, 1.75 * Math.PI);
checkInterval(result.get(1), 1.75 * Math.PI, 2.5 * Math.PI);
}
@Test
void testToConvex_overPi_splitAtZero() {
// arrange
final AngularInterval interval = AngularInterval.of(1.25 * Math.PI, 2.75 * Math.PI, TEST_PRECISION);
// act
final List<AngularInterval.Convex> result = interval.toConvex();
// assert
Assertions.assertEquals(2, result.size());
checkInterval(result.get(0), 1.25 * Math.PI, Angle.TWO_PI);
checkInterval(result.get(1), Angle.TWO_PI, 2.75 * Math.PI);
}
@Test
void testSplit_full() {
// arrange
final AngularInterval interval = AngularInterval.full();
final CutAngle pt = CutAngles.createNegativeFacing(Angle.PI_OVER_TWO, TEST_PRECISION);
// act
final Split<RegionBSPTree1S> split = interval.split(pt);
// assert
Assertions.assertEquals(SplitLocation.BOTH, split.getLocation());
final RegionBSPTree1S minus = split.getMinus();
checkClassify(minus, RegionLocation.BOUNDARY, Point1S.of(Angle.PI_OVER_TWO));
checkClassify(minus, RegionLocation.INSIDE,
Point1S.PI, Point1S.of(-Angle.PI_OVER_TWO), Point1S.of(-0.25 * Math.PI));
checkClassify(minus, RegionLocation.OUTSIDE,
Point1S.ZERO, Point1S.of(0.25 * Math.PI));
final RegionBSPTree1S plus = split.getPlus();
checkClassify(plus, RegionLocation.BOUNDARY, Point1S.of(Angle.PI_OVER_TWO));
checkClassify(plus, RegionLocation.INSIDE,
Point1S.ZERO, Point1S.of(0.25 * Math.PI));
checkClassify(plus, RegionLocation.OUTSIDE,
Point1S.PI, Point1S.of(-Angle.PI_OVER_TWO), Point1S.of(-0.25 * Math.PI));
}
@Test
void testSplit_interval_both() {
// arrange
final AngularInterval interval = AngularInterval.of(Angle.PI_OVER_TWO, Math.PI, TEST_PRECISION);
final CutAngle cut = CutAngles.createNegativeFacing(0.75 * Math.PI, TEST_PRECISION);
// act
final Split<RegionBSPTree1S> split = interval.split(cut);
// assert
Assertions.assertEquals(SplitLocation.BOTH, split.getLocation());
final RegionBSPTree1S minus = split.getMinus();
checkClassify(minus, RegionLocation.BOUNDARY, Point1S.of(Math.PI), cut.getPoint());
checkClassify(minus, RegionLocation.INSIDE, Point1S.of(0.8 * Math.PI));
checkClassify(minus, RegionLocation.OUTSIDE,
Point1S.ZERO, Point1S.of(Angle.TWO_PI), Point1S.of(-Angle.PI_OVER_TWO),
Point1S.of(0.7 * Math.PI));
final RegionBSPTree1S plus = split.getPlus();
checkClassify(plus, RegionLocation.BOUNDARY, Point1S.of(Angle.PI_OVER_TWO), cut.getPoint());
checkClassify(plus, RegionLocation.INSIDE, Point1S.of(0.6 * Math.PI));
checkClassify(plus, RegionLocation.OUTSIDE,
Point1S.ZERO, Point1S.of(Angle.TWO_PI), Point1S.of(-Angle.PI_OVER_TWO),
Point1S.of(0.8 * Math.PI));
}
@Test
void testToString() {
// arrange
final AngularInterval interval = AngularInterval.of(1, 2, TEST_PRECISION);
// act
final String str = interval.toString();
// assert
Assertions.assertTrue(str.contains("AngularInterval"));
Assertions.assertTrue(str.contains("min= 1.0"));
Assertions.assertTrue(str.contains("max= 2.0"));
}
@Test
void testConvex_of_doubles() {
// act/assert
checkInterval(AngularInterval.Convex.of(0, 1, TEST_PRECISION), 0, 1);
checkInterval(AngularInterval.Convex.of(0, Math.PI, TEST_PRECISION), 0, Math.PI);
checkInterval(AngularInterval.Convex.of(Math.PI + 2, 1, TEST_PRECISION), Math.PI + 2, Angle.TWO_PI + 1);
checkInterval(AngularInterval.Convex.of(-2, -1.5, TEST_PRECISION), -2, -1.5);
checkFull(AngularInterval.Convex.of(1, 1, TEST_PRECISION));
checkFull(AngularInterval.Convex.of(0, 1e-11, TEST_PRECISION));
checkFull(AngularInterval.Convex.of(0, -1e-11, TEST_PRECISION));
checkFull(AngularInterval.Convex.of(0, Angle.TWO_PI, TEST_PRECISION));
}
@Test
void testConvex_of_doubles_invalidArgs() {
// act/assert
Assertions.assertThrows(IllegalArgumentException.class, () -> AngularInterval.Convex.of(Double.NEGATIVE_INFINITY, Double.POSITIVE_INFINITY, TEST_PRECISION));
Assertions.assertThrows(IllegalArgumentException.class, () -> AngularInterval.Convex.of(0, Math.PI + 1e-1, TEST_PRECISION));
Assertions.assertThrows(IllegalArgumentException.class, () -> AngularInterval.Convex.of(Angle.PI_OVER_TWO, -Angle.PI_OVER_TWO + 1, TEST_PRECISION));
Assertions.assertThrows(IllegalArgumentException.class, () -> AngularInterval.Convex.of(0, -0.5, TEST_PRECISION));
}
@Test
void testConvex_of_points() {
// act/assert
checkInterval(AngularInterval.Convex.of(Point1S.of(0), Point1S.of(1), TEST_PRECISION), 0, 1);
checkInterval(AngularInterval.Convex.of(Point1S.of(0), Point1S.of(Math.PI), TEST_PRECISION),
0, Math.PI);
checkInterval(AngularInterval.Convex.of(Point1S.of(Math.PI + 2), Point1S.of(1), TEST_PRECISION),
Math.PI + 2, Angle.TWO_PI + 1);
checkInterval(AngularInterval.Convex.of(Point1S.of(-2), Point1S.of(-1.5), TEST_PRECISION), -2, -1.5);
checkFull(AngularInterval.Convex.of(Point1S.of(1), Point1S.of(1), TEST_PRECISION));
checkFull(AngularInterval.Convex.of(Point1S.of(0), Point1S.of(1e-11), TEST_PRECISION));
checkFull(AngularInterval.Convex.of(Point1S.of(0), Point1S.of(-1e-11), TEST_PRECISION));
checkFull(AngularInterval.Convex.of(Point1S.of(0), Point1S.of(Angle.TWO_PI), TEST_PRECISION));
}
@Test
void testConvex_of_points_invalidArgs() {
// act/assert
Assertions.assertThrows(IllegalArgumentException.class, () -> AngularInterval.Convex.of(Point1S.of(Double.NEGATIVE_INFINITY),
Point1S.of(Double.POSITIVE_INFINITY), TEST_PRECISION));
Assertions.assertThrows(IllegalArgumentException.class, () -> AngularInterval.Convex.of(Point1S.of(0), Point1S.of(Math.PI + 1e-1), TEST_PRECISION));
Assertions.assertThrows(IllegalArgumentException.class, () -> AngularInterval.Convex.of(Point1S.of(Angle.PI_OVER_TWO),
Point1S.of(-Angle.PI_OVER_TWO + 1), TEST_PRECISION));
Assertions.assertThrows(IllegalArgumentException.class, () -> AngularInterval.Convex.of(Point1S.of(0), Point1S.of(-0.5), TEST_PRECISION));
}
@Test
void testConvex_of_cutAngles() {
// arrange
final Precision.DoubleEquivalence precisionA = Precision.doubleEquivalenceOfEpsilon(1e-3);
final Precision.DoubleEquivalence precisionB = Precision.doubleEquivalenceOfEpsilon(1e-2);
final CutAngle zeroPos = CutAngles.createPositiveFacing(Point1S.ZERO, precisionA);
final CutAngle zeroNeg = CutAngles.createNegativeFacing(Point1S.ZERO, precisionA);
final CutAngle piPos = CutAngles.createPositiveFacing(Point1S.PI, precisionA);
final CutAngle piNeg = CutAngles.createNegativeFacing(Point1S.PI, precisionA);
final CutAngle almostPiPos = CutAngles.createPositiveFacing(Point1S.of(Math.PI + 5e-3), precisionB);
// act/assert
checkInterval(AngularInterval.Convex.of(zeroNeg, piPos), 0, Math.PI);
checkInterval(AngularInterval.Convex.of(zeroPos, piNeg), Math.PI, Angle.TWO_PI);
checkFull(AngularInterval.Convex.of(zeroPos, zeroNeg));
checkFull(AngularInterval.Convex.of(zeroPos, piPos));
checkFull(AngularInterval.Convex.of(piNeg, zeroNeg));
checkFull(AngularInterval.Convex.of(almostPiPos, piNeg));
checkFull(AngularInterval.Convex.of(piNeg, almostPiPos));
}
@Test
void testConvex_of_cutAngles_invalidArgs() {
// arrange
final CutAngle pt = CutAngles.createNegativeFacing(Point1S.ZERO, TEST_PRECISION);
final CutAngle nan = CutAngles.createPositiveFacing(Point1S.NaN, TEST_PRECISION);
// act/assert
Assertions.assertThrows(IllegalArgumentException.class, () -> AngularInterval.Convex.of(pt, nan));
Assertions.assertThrows(IllegalArgumentException.class, () -> AngularInterval.Convex.of(nan, pt));
Assertions.assertThrows(IllegalArgumentException.class, () -> AngularInterval.Convex.of(nan, nan));
Assertions.assertThrows(IllegalArgumentException.class, () -> AngularInterval.Convex.of(
CutAngles.createNegativeFacing(1, TEST_PRECISION),
CutAngles.createPositiveFacing(0.5, TEST_PRECISION)));
}
@Test
void testConvex_toConvex() {
// arrange
final AngularInterval.Convex full = AngularInterval.full();
final AngularInterval.Convex interval = AngularInterval.Convex.of(0, 1, TEST_PRECISION);
List<AngularInterval.Convex> result;
// act/assert
result = full.toConvex();
Assertions.assertEquals(1, result.size());
Assertions.assertSame(full, result.get(0));
result = interval.toConvex();
Assertions.assertEquals(1, result.size());
Assertions.assertSame(interval, result.get(0));
}
@Test
void testSplitDiameter_full() {
// arrange
final AngularInterval.Convex full = AngularInterval.full();
final CutAngle splitter = CutAngles.createPositiveFacing(Point1S.of(Angle.PI_OVER_TWO), TEST_PRECISION);
// act
final Split<AngularInterval.Convex> split = full.splitDiameter(splitter);
// assert
Assertions.assertEquals(SplitLocation.BOTH, split.getLocation());
checkInterval(split.getMinus(), 1.5 * Math.PI, 2.5 * Math.PI);
checkInterval(split.getPlus(), 0.5 * Math.PI, 1.5 * Math.PI);
}
@Test
void testSplitDiameter_full_splitOnZero() {
// arrange
final AngularInterval.Convex full = AngularInterval.full();
final CutAngle splitter = CutAngles.createNegativeFacing(Point1S.ZERO, TEST_PRECISION);
// act
final Split<AngularInterval.Convex> split = full.splitDiameter(splitter);
// assert
Assertions.assertEquals(SplitLocation.BOTH, split.getLocation());
checkInterval(split.getMinus(), 0, Math.PI);
checkInterval(split.getPlus(), Math.PI, Angle.TWO_PI);
}
@Test
void testSplitDiameter_minus() {
// arrange
final AngularInterval.Convex interval = AngularInterval.Convex.of(0.1, Angle.PI_OVER_TWO, TEST_PRECISION);
final CutAngle splitter = CutAngles.createNegativeFacing(Point1S.ZERO, TEST_PRECISION);
// act
final Split<AngularInterval.Convex> split = interval.splitDiameter(splitter);
// assert
Assertions.assertEquals(SplitLocation.MINUS, split.getLocation());
Assertions.assertSame(interval, split.getMinus());
Assertions.assertNull(split.getPlus());
}
@Test
void testSplitDiameter_plus() {
// arrange
final AngularInterval.Convex interval = AngularInterval.Convex.of(-0.4 * Math.PI, 0.4 * Math.PI, TEST_PRECISION);
final CutAngle splitter = CutAngles.createNegativeFacing(Point1S.of(Angle.PI_OVER_TWO), TEST_PRECISION);
// act
final Split<AngularInterval.Convex> split = interval.splitDiameter(splitter);
// assert
Assertions.assertEquals(SplitLocation.PLUS, split.getLocation());
Assertions.assertNull(split.getMinus());
Assertions.assertSame(interval, split.getPlus());
}
@Test
void testSplitDiameter_both_negativeFacingSplitter() {
// arrange
final AngularInterval.Convex interval = AngularInterval.Convex.of(Angle.PI_OVER_TWO, -Angle.PI_OVER_TWO, TEST_PRECISION);
final CutAngle splitter = CutAngles.createNegativeFacing(Point1S.of(Math.PI), TEST_PRECISION);
// act
final Split<AngularInterval.Convex> split = interval.splitDiameter(splitter);
// assert
Assertions.assertEquals(SplitLocation.BOTH, split.getLocation());
checkInterval(split.getMinus(), Math.PI, 1.5 * Math.PI);
checkInterval(split.getPlus(), Angle.PI_OVER_TWO, Math.PI);
}
@Test
void testSplitDiameter_both_positiveFacingSplitter() {
// arrange
final AngularInterval.Convex interval = AngularInterval.Convex.of(Angle.PI_OVER_TWO, -Angle.PI_OVER_TWO, TEST_PRECISION);
final CutAngle splitter = CutAngles.createPositiveFacing(Point1S.of(Math.PI), TEST_PRECISION);
// act
final Split<AngularInterval.Convex> split = interval.splitDiameter(splitter);
// assert
Assertions.assertEquals(SplitLocation.BOTH, split.getLocation());
checkInterval(split.getMinus(), Angle.PI_OVER_TWO, Math.PI);
checkInterval(split.getPlus(), Math.PI, 1.5 * Math.PI);
}
@Test
void testSplitDiameter_both_antipodal_negativeFacingSplitter() {
// arrange
final AngularInterval.Convex interval = AngularInterval.Convex.of(Angle.PI_OVER_TWO, -Angle.PI_OVER_TWO, TEST_PRECISION);
final CutAngle splitter = CutAngles.createNegativeFacing(Point1S.ZERO, TEST_PRECISION);
// act
final Split<AngularInterval.Convex> split = interval.splitDiameter(splitter);
// assert
Assertions.assertEquals(SplitLocation.BOTH, split.getLocation());
checkInterval(split.getMinus(), Angle.PI_OVER_TWO, Math.PI);
checkInterval(split.getPlus(), Math.PI, 1.5 * Math.PI);
}
@Test
void testSplitDiameter_both_antipodal_positiveFacingSplitter() {
// arrange
final AngularInterval.Convex interval = AngularInterval.Convex.of(Angle.PI_OVER_TWO, -Angle.PI_OVER_TWO, TEST_PRECISION);
final CutAngle splitter = CutAngles.createPositiveFacing(Point1S.ZERO, TEST_PRECISION);
// act
final Split<AngularInterval.Convex> split = interval.splitDiameter(splitter);
// assert
Assertions.assertEquals(SplitLocation.BOTH, split.getLocation());
checkInterval(split.getMinus(), Math.PI, 1.5 * Math.PI);
checkInterval(split.getPlus(), Angle.PI_OVER_TWO, Math.PI);
}
@Test
void testSplitDiameter_splitOnBoundary_negativeFacing() {
// arrange
final AngularInterval.Convex interval = AngularInterval.Convex.of(Angle.PI_OVER_TWO, -Angle.PI_OVER_TWO, TEST_PRECISION);
final CutAngle splitter = CutAngles.createNegativeFacing(Point1S.of(Angle.PI_OVER_TWO), TEST_PRECISION);
// act
final Split<AngularInterval.Convex> split = interval.splitDiameter(splitter);
// assert
Assertions.assertEquals(SplitLocation.MINUS, split.getLocation());
Assertions.assertSame(interval, split.getMinus());
Assertions.assertNull(split.getPlus());
}
@Test
void testSplitDiameter_splitOnBoundary_positiveFacing() {
// arrange
final AngularInterval.Convex interval = AngularInterval.Convex.of(0, Math.PI, TEST_PRECISION);
final CutAngle splitter = CutAngles.createPositiveFacing(Point1S.of(Math.PI), TEST_PRECISION);
// act
final Split<AngularInterval.Convex> split = interval.splitDiameter(splitter);
// assert
Assertions.assertEquals(SplitLocation.MINUS, split.getLocation());
Assertions.assertSame(interval, split.getMinus());
Assertions.assertNull(split.getPlus());
}
@Test
void testConvex_transform() {
// arrange
final AngularInterval.Convex interval = AngularInterval.Convex.of(Angle.PI_OVER_TWO, Math.PI, TEST_PRECISION);
final Transform1S rotate = Transform1S.createRotation(Angle.PI_OVER_TWO);
final Transform1S invert = Transform1S.createNegation().rotate(Angle.PI_OVER_TWO);
// act/assert
checkInterval(interval.transform(rotate), Math.PI, 1.5 * Math.PI);
checkInterval(interval.transform(invert), -0.5 * Math.PI, 0.0);
}
private static void checkFull(final AngularInterval interval) {
Assertions.assertTrue(interval.isFull());
Assertions.assertFalse(interval.isEmpty());
Assertions.assertNull(interval.getMinBoundary());
Assertions.assertEquals(0, interval.getMin(), TEST_EPS);
Assertions.assertNull(interval.getMaxBoundary());
Assertions.assertEquals(Angle.TWO_PI, interval.getMax(), TEST_EPS);
Assertions.assertNull(interval.getCentroid());
Assertions.assertNull(interval.getMidPoint());
Assertions.assertEquals(Angle.TWO_PI, interval.getSize(), TEST_EPS);
Assertions.assertEquals(0, interval.getBoundarySize(), TEST_EPS);
checkClassify(interval, RegionLocation.INSIDE, Point1S.ZERO, Point1S.of(Math.PI));
}
private static void checkInterval(final AngularInterval interval, final double min, final double max) {
Assertions.assertFalse(interval.isFull());
Assertions.assertFalse(interval.isEmpty());
final CutAngle minBoundary = interval.getMinBoundary();
Assertions.assertEquals(min, minBoundary.getAzimuth(), TEST_EPS);
Assertions.assertFalse(minBoundary.isPositiveFacing());
final CutAngle maxBoundary = interval.getMaxBoundary();
Assertions.assertEquals(max, maxBoundary.getAzimuth(), TEST_EPS);
Assertions.assertTrue(maxBoundary.isPositiveFacing());
Assertions.assertEquals(min, interval.getMin(), TEST_EPS);
Assertions.assertEquals(max, interval.getMax(), TEST_EPS);
Assertions.assertEquals(0.5 * (max + min), interval.getMidPoint().getAzimuth(), TEST_EPS);
Assertions.assertSame(interval.getMidPoint(), interval.getCentroid());
Assertions.assertEquals(0, interval.getBoundarySize(), TEST_EPS);
Assertions.assertEquals(max - min, interval.getSize(), TEST_EPS);
checkClassify(interval, RegionLocation.BOUNDARY, interval.getMinBoundary().getPoint());
checkClassify(interval, RegionLocation.INSIDE, interval.getMidPoint());
checkClassify(interval, RegionLocation.BOUNDARY,
interval.getMinBoundary().getPoint(), interval.getMaxBoundary().getPoint());
checkClassify(interval, RegionLocation.OUTSIDE, Point1S.of(interval.getMidPoint().getAzimuth() + Math.PI));
}
private static void checkClassify(final Region<Point1S> region, final RegionLocation loc, final Point1S... pts) {
for (final Point1S pt : pts) {
Assertions.assertEquals(loc, region.classify(pt), "Unexpected location for point " + pt);
}
}
}
|
googleads/google-ads-java | 36,327 | google-ads-stubs-v19/src/main/java/com/google/ads/googleads/v19/services/ListCampaignDraftAsyncErrorsResponse.java | // Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/ads/googleads/v19/services/campaign_draft_service.proto
// Protobuf Java Version: 3.25.7
package com.google.ads.googleads.v19.services;
/**
* <pre>
* Response message for
* [CampaignDraftService.ListCampaignDraftAsyncErrors][google.ads.googleads.v19.services.CampaignDraftService.ListCampaignDraftAsyncErrors].
* </pre>
*
* Protobuf type {@code google.ads.googleads.v19.services.ListCampaignDraftAsyncErrorsResponse}
*/
public final class ListCampaignDraftAsyncErrorsResponse extends
com.google.protobuf.GeneratedMessageV3 implements
// @@protoc_insertion_point(message_implements:google.ads.googleads.v19.services.ListCampaignDraftAsyncErrorsResponse)
ListCampaignDraftAsyncErrorsResponseOrBuilder {
private static final long serialVersionUID = 0L;
// Use ListCampaignDraftAsyncErrorsResponse.newBuilder() to construct.
private ListCampaignDraftAsyncErrorsResponse(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private ListCampaignDraftAsyncErrorsResponse() {
errors_ = java.util.Collections.emptyList();
nextPageToken_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(
UnusedPrivateParameter unused) {
return new ListCampaignDraftAsyncErrorsResponse();
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return com.google.ads.googleads.v19.services.CampaignDraftServiceProto.internal_static_google_ads_googleads_v19_services_ListCampaignDraftAsyncErrorsResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.ads.googleads.v19.services.CampaignDraftServiceProto.internal_static_google_ads_googleads_v19_services_ListCampaignDraftAsyncErrorsResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.ads.googleads.v19.services.ListCampaignDraftAsyncErrorsResponse.class, com.google.ads.googleads.v19.services.ListCampaignDraftAsyncErrorsResponse.Builder.class);
}
public static final int ERRORS_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private java.util.List<com.google.rpc.Status> errors_;
/**
* <pre>
* Details of the errors when performing the asynchronous operation.
* </pre>
*
* <code>repeated .google.rpc.Status errors = 1;</code>
*/
@java.lang.Override
public java.util.List<com.google.rpc.Status> getErrorsList() {
return errors_;
}
/**
* <pre>
* Details of the errors when performing the asynchronous operation.
* </pre>
*
* <code>repeated .google.rpc.Status errors = 1;</code>
*/
@java.lang.Override
public java.util.List<? extends com.google.rpc.StatusOrBuilder>
getErrorsOrBuilderList() {
return errors_;
}
/**
* <pre>
* Details of the errors when performing the asynchronous operation.
* </pre>
*
* <code>repeated .google.rpc.Status errors = 1;</code>
*/
@java.lang.Override
public int getErrorsCount() {
return errors_.size();
}
/**
* <pre>
* Details of the errors when performing the asynchronous operation.
* </pre>
*
* <code>repeated .google.rpc.Status errors = 1;</code>
*/
@java.lang.Override
public com.google.rpc.Status getErrors(int index) {
return errors_.get(index);
}
/**
* <pre>
* Details of the errors when performing the asynchronous operation.
* </pre>
*
* <code>repeated .google.rpc.Status errors = 1;</code>
*/
@java.lang.Override
public com.google.rpc.StatusOrBuilder getErrorsOrBuilder(
int index) {
return errors_.get(index);
}
public static final int NEXT_PAGE_TOKEN_FIELD_NUMBER = 2;
@SuppressWarnings("serial")
private volatile java.lang.Object nextPageToken_ = "";
/**
* <pre>
* Pagination token used to retrieve the next page of results.
* Pass the content of this string as the `page_token` attribute of
* the next request. `next_page_token` is not returned for the last
* page.
* </pre>
*
* <code>string next_page_token = 2;</code>
* @return The nextPageToken.
*/
@java.lang.Override
public java.lang.String getNextPageToken() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs =
(com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
nextPageToken_ = s;
return s;
}
}
/**
* <pre>
* Pagination token used to retrieve the next page of results.
* Pass the content of this string as the `page_token` attribute of
* the next request. `next_page_token` is not returned for the last
* page.
* </pre>
*
* <code>string next_page_token = 2;</code>
* @return The bytes for nextPageToken.
*/
@java.lang.Override
public com.google.protobuf.ByteString
getNextPageTokenBytes() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
nextPageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
for (int i = 0; i < errors_.size(); i++) {
output.writeMessage(1, errors_.get(i));
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 2, nextPageToken_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
for (int i = 0; i < errors_.size(); i++) {
size += com.google.protobuf.CodedOutputStream
.computeMessageSize(1, errors_.get(i));
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, nextPageToken_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.ads.googleads.v19.services.ListCampaignDraftAsyncErrorsResponse)) {
return super.equals(obj);
}
com.google.ads.googleads.v19.services.ListCampaignDraftAsyncErrorsResponse other = (com.google.ads.googleads.v19.services.ListCampaignDraftAsyncErrorsResponse) obj;
if (!getErrorsList()
.equals(other.getErrorsList())) return false;
if (!getNextPageToken()
.equals(other.getNextPageToken())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (getErrorsCount() > 0) {
hash = (37 * hash) + ERRORS_FIELD_NUMBER;
hash = (53 * hash) + getErrorsList().hashCode();
}
hash = (37 * hash) + NEXT_PAGE_TOKEN_FIELD_NUMBER;
hash = (53 * hash) + getNextPageToken().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.ads.googleads.v19.services.ListCampaignDraftAsyncErrorsResponse parseFrom(
java.nio.ByteBuffer data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.ads.googleads.v19.services.ListCampaignDraftAsyncErrorsResponse parseFrom(
java.nio.ByteBuffer data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.ads.googleads.v19.services.ListCampaignDraftAsyncErrorsResponse parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.ads.googleads.v19.services.ListCampaignDraftAsyncErrorsResponse parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.ads.googleads.v19.services.ListCampaignDraftAsyncErrorsResponse parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.ads.googleads.v19.services.ListCampaignDraftAsyncErrorsResponse parseFrom(
byte[] data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.ads.googleads.v19.services.ListCampaignDraftAsyncErrorsResponse parseFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static com.google.ads.googleads.v19.services.ListCampaignDraftAsyncErrorsResponse parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public static com.google.ads.googleads.v19.services.ListCampaignDraftAsyncErrorsResponse parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.ads.googleads.v19.services.ListCampaignDraftAsyncErrorsResponse parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static com.google.ads.googleads.v19.services.ListCampaignDraftAsyncErrorsResponse parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static com.google.ads.googleads.v19.services.ListCampaignDraftAsyncErrorsResponse parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.ads.googleads.v19.services.ListCampaignDraftAsyncErrorsResponse prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE
? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(
com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* <pre>
* Response message for
* [CampaignDraftService.ListCampaignDraftAsyncErrors][google.ads.googleads.v19.services.CampaignDraftService.ListCampaignDraftAsyncErrors].
* </pre>
*
* Protobuf type {@code google.ads.googleads.v19.services.ListCampaignDraftAsyncErrorsResponse}
*/
public static final class Builder extends
com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements
// @@protoc_insertion_point(builder_implements:google.ads.googleads.v19.services.ListCampaignDraftAsyncErrorsResponse)
com.google.ads.googleads.v19.services.ListCampaignDraftAsyncErrorsResponseOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return com.google.ads.googleads.v19.services.CampaignDraftServiceProto.internal_static_google_ads_googleads_v19_services_ListCampaignDraftAsyncErrorsResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.ads.googleads.v19.services.CampaignDraftServiceProto.internal_static_google_ads_googleads_v19_services_ListCampaignDraftAsyncErrorsResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.ads.googleads.v19.services.ListCampaignDraftAsyncErrorsResponse.class, com.google.ads.googleads.v19.services.ListCampaignDraftAsyncErrorsResponse.Builder.class);
}
// Construct using com.google.ads.googleads.v19.services.ListCampaignDraftAsyncErrorsResponse.newBuilder()
private Builder() {
}
private Builder(
com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
if (errorsBuilder_ == null) {
errors_ = java.util.Collections.emptyList();
} else {
errors_ = null;
errorsBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000001);
nextPageToken_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return com.google.ads.googleads.v19.services.CampaignDraftServiceProto.internal_static_google_ads_googleads_v19_services_ListCampaignDraftAsyncErrorsResponse_descriptor;
}
@java.lang.Override
public com.google.ads.googleads.v19.services.ListCampaignDraftAsyncErrorsResponse getDefaultInstanceForType() {
return com.google.ads.googleads.v19.services.ListCampaignDraftAsyncErrorsResponse.getDefaultInstance();
}
@java.lang.Override
public com.google.ads.googleads.v19.services.ListCampaignDraftAsyncErrorsResponse build() {
com.google.ads.googleads.v19.services.ListCampaignDraftAsyncErrorsResponse result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.ads.googleads.v19.services.ListCampaignDraftAsyncErrorsResponse buildPartial() {
com.google.ads.googleads.v19.services.ListCampaignDraftAsyncErrorsResponse result = new com.google.ads.googleads.v19.services.ListCampaignDraftAsyncErrorsResponse(this);
buildPartialRepeatedFields(result);
if (bitField0_ != 0) { buildPartial0(result); }
onBuilt();
return result;
}
private void buildPartialRepeatedFields(com.google.ads.googleads.v19.services.ListCampaignDraftAsyncErrorsResponse result) {
if (errorsBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0)) {
errors_ = java.util.Collections.unmodifiableList(errors_);
bitField0_ = (bitField0_ & ~0x00000001);
}
result.errors_ = errors_;
} else {
result.errors_ = errorsBuilder_.build();
}
}
private void buildPartial0(com.google.ads.googleads.v19.services.ListCampaignDraftAsyncErrorsResponse result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000002) != 0)) {
result.nextPageToken_ = nextPageToken_;
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(
com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(
com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field,
int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.ads.googleads.v19.services.ListCampaignDraftAsyncErrorsResponse) {
return mergeFrom((com.google.ads.googleads.v19.services.ListCampaignDraftAsyncErrorsResponse)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.ads.googleads.v19.services.ListCampaignDraftAsyncErrorsResponse other) {
if (other == com.google.ads.googleads.v19.services.ListCampaignDraftAsyncErrorsResponse.getDefaultInstance()) return this;
if (errorsBuilder_ == null) {
if (!other.errors_.isEmpty()) {
if (errors_.isEmpty()) {
errors_ = other.errors_;
bitField0_ = (bitField0_ & ~0x00000001);
} else {
ensureErrorsIsMutable();
errors_.addAll(other.errors_);
}
onChanged();
}
} else {
if (!other.errors_.isEmpty()) {
if (errorsBuilder_.isEmpty()) {
errorsBuilder_.dispose();
errorsBuilder_ = null;
errors_ = other.errors_;
bitField0_ = (bitField0_ & ~0x00000001);
errorsBuilder_ =
com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ?
getErrorsFieldBuilder() : null;
} else {
errorsBuilder_.addAllMessages(other.errors_);
}
}
}
if (!other.getNextPageToken().isEmpty()) {
nextPageToken_ = other.nextPageToken_;
bitField0_ |= 0x00000002;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10: {
com.google.rpc.Status m =
input.readMessage(
com.google.rpc.Status.parser(),
extensionRegistry);
if (errorsBuilder_ == null) {
ensureErrorsIsMutable();
errors_.add(m);
} else {
errorsBuilder_.addMessage(m);
}
break;
} // case 10
case 18: {
nextPageToken_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000002;
break;
} // case 18
default: {
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.util.List<com.google.rpc.Status> errors_ =
java.util.Collections.emptyList();
private void ensureErrorsIsMutable() {
if (!((bitField0_ & 0x00000001) != 0)) {
errors_ = new java.util.ArrayList<com.google.rpc.Status>(errors_);
bitField0_ |= 0x00000001;
}
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.rpc.Status, com.google.rpc.Status.Builder, com.google.rpc.StatusOrBuilder> errorsBuilder_;
/**
* <pre>
* Details of the errors when performing the asynchronous operation.
* </pre>
*
* <code>repeated .google.rpc.Status errors = 1;</code>
*/
public java.util.List<com.google.rpc.Status> getErrorsList() {
if (errorsBuilder_ == null) {
return java.util.Collections.unmodifiableList(errors_);
} else {
return errorsBuilder_.getMessageList();
}
}
/**
* <pre>
* Details of the errors when performing the asynchronous operation.
* </pre>
*
* <code>repeated .google.rpc.Status errors = 1;</code>
*/
public int getErrorsCount() {
if (errorsBuilder_ == null) {
return errors_.size();
} else {
return errorsBuilder_.getCount();
}
}
/**
* <pre>
* Details of the errors when performing the asynchronous operation.
* </pre>
*
* <code>repeated .google.rpc.Status errors = 1;</code>
*/
public com.google.rpc.Status getErrors(int index) {
if (errorsBuilder_ == null) {
return errors_.get(index);
} else {
return errorsBuilder_.getMessage(index);
}
}
/**
* <pre>
* Details of the errors when performing the asynchronous operation.
* </pre>
*
* <code>repeated .google.rpc.Status errors = 1;</code>
*/
public Builder setErrors(
int index, com.google.rpc.Status value) {
if (errorsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureErrorsIsMutable();
errors_.set(index, value);
onChanged();
} else {
errorsBuilder_.setMessage(index, value);
}
return this;
}
/**
* <pre>
* Details of the errors when performing the asynchronous operation.
* </pre>
*
* <code>repeated .google.rpc.Status errors = 1;</code>
*/
public Builder setErrors(
int index, com.google.rpc.Status.Builder builderForValue) {
if (errorsBuilder_ == null) {
ensureErrorsIsMutable();
errors_.set(index, builderForValue.build());
onChanged();
} else {
errorsBuilder_.setMessage(index, builderForValue.build());
}
return this;
}
/**
* <pre>
* Details of the errors when performing the asynchronous operation.
* </pre>
*
* <code>repeated .google.rpc.Status errors = 1;</code>
*/
public Builder addErrors(com.google.rpc.Status value) {
if (errorsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureErrorsIsMutable();
errors_.add(value);
onChanged();
} else {
errorsBuilder_.addMessage(value);
}
return this;
}
/**
* <pre>
* Details of the errors when performing the asynchronous operation.
* </pre>
*
* <code>repeated .google.rpc.Status errors = 1;</code>
*/
public Builder addErrors(
int index, com.google.rpc.Status value) {
if (errorsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureErrorsIsMutable();
errors_.add(index, value);
onChanged();
} else {
errorsBuilder_.addMessage(index, value);
}
return this;
}
/**
* <pre>
* Details of the errors when performing the asynchronous operation.
* </pre>
*
* <code>repeated .google.rpc.Status errors = 1;</code>
*/
public Builder addErrors(
com.google.rpc.Status.Builder builderForValue) {
if (errorsBuilder_ == null) {
ensureErrorsIsMutable();
errors_.add(builderForValue.build());
onChanged();
} else {
errorsBuilder_.addMessage(builderForValue.build());
}
return this;
}
/**
* <pre>
* Details of the errors when performing the asynchronous operation.
* </pre>
*
* <code>repeated .google.rpc.Status errors = 1;</code>
*/
public Builder addErrors(
int index, com.google.rpc.Status.Builder builderForValue) {
if (errorsBuilder_ == null) {
ensureErrorsIsMutable();
errors_.add(index, builderForValue.build());
onChanged();
} else {
errorsBuilder_.addMessage(index, builderForValue.build());
}
return this;
}
/**
* <pre>
* Details of the errors when performing the asynchronous operation.
* </pre>
*
* <code>repeated .google.rpc.Status errors = 1;</code>
*/
public Builder addAllErrors(
java.lang.Iterable<? extends com.google.rpc.Status> values) {
if (errorsBuilder_ == null) {
ensureErrorsIsMutable();
com.google.protobuf.AbstractMessageLite.Builder.addAll(
values, errors_);
onChanged();
} else {
errorsBuilder_.addAllMessages(values);
}
return this;
}
/**
* <pre>
* Details of the errors when performing the asynchronous operation.
* </pre>
*
* <code>repeated .google.rpc.Status errors = 1;</code>
*/
public Builder clearErrors() {
if (errorsBuilder_ == null) {
errors_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
} else {
errorsBuilder_.clear();
}
return this;
}
/**
* <pre>
* Details of the errors when performing the asynchronous operation.
* </pre>
*
* <code>repeated .google.rpc.Status errors = 1;</code>
*/
public Builder removeErrors(int index) {
if (errorsBuilder_ == null) {
ensureErrorsIsMutable();
errors_.remove(index);
onChanged();
} else {
errorsBuilder_.remove(index);
}
return this;
}
/**
* <pre>
* Details of the errors when performing the asynchronous operation.
* </pre>
*
* <code>repeated .google.rpc.Status errors = 1;</code>
*/
public com.google.rpc.Status.Builder getErrorsBuilder(
int index) {
return getErrorsFieldBuilder().getBuilder(index);
}
/**
* <pre>
* Details of the errors when performing the asynchronous operation.
* </pre>
*
* <code>repeated .google.rpc.Status errors = 1;</code>
*/
public com.google.rpc.StatusOrBuilder getErrorsOrBuilder(
int index) {
if (errorsBuilder_ == null) {
return errors_.get(index); } else {
return errorsBuilder_.getMessageOrBuilder(index);
}
}
/**
* <pre>
* Details of the errors when performing the asynchronous operation.
* </pre>
*
* <code>repeated .google.rpc.Status errors = 1;</code>
*/
public java.util.List<? extends com.google.rpc.StatusOrBuilder>
getErrorsOrBuilderList() {
if (errorsBuilder_ != null) {
return errorsBuilder_.getMessageOrBuilderList();
} else {
return java.util.Collections.unmodifiableList(errors_);
}
}
/**
* <pre>
* Details of the errors when performing the asynchronous operation.
* </pre>
*
* <code>repeated .google.rpc.Status errors = 1;</code>
*/
public com.google.rpc.Status.Builder addErrorsBuilder() {
return getErrorsFieldBuilder().addBuilder(
com.google.rpc.Status.getDefaultInstance());
}
/**
* <pre>
* Details of the errors when performing the asynchronous operation.
* </pre>
*
* <code>repeated .google.rpc.Status errors = 1;</code>
*/
public com.google.rpc.Status.Builder addErrorsBuilder(
int index) {
return getErrorsFieldBuilder().addBuilder(
index, com.google.rpc.Status.getDefaultInstance());
}
/**
* <pre>
* Details of the errors when performing the asynchronous operation.
* </pre>
*
* <code>repeated .google.rpc.Status errors = 1;</code>
*/
public java.util.List<com.google.rpc.Status.Builder>
getErrorsBuilderList() {
return getErrorsFieldBuilder().getBuilderList();
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.rpc.Status, com.google.rpc.Status.Builder, com.google.rpc.StatusOrBuilder>
getErrorsFieldBuilder() {
if (errorsBuilder_ == null) {
errorsBuilder_ = new com.google.protobuf.RepeatedFieldBuilderV3<
com.google.rpc.Status, com.google.rpc.Status.Builder, com.google.rpc.StatusOrBuilder>(
errors_,
((bitField0_ & 0x00000001) != 0),
getParentForChildren(),
isClean());
errors_ = null;
}
return errorsBuilder_;
}
private java.lang.Object nextPageToken_ = "";
/**
* <pre>
* Pagination token used to retrieve the next page of results.
* Pass the content of this string as the `page_token` attribute of
* the next request. `next_page_token` is not returned for the last
* page.
* </pre>
*
* <code>string next_page_token = 2;</code>
* @return The nextPageToken.
*/
public java.lang.String getNextPageToken() {
java.lang.Object ref = nextPageToken_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs =
(com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
nextPageToken_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
* <pre>
* Pagination token used to retrieve the next page of results.
* Pass the content of this string as the `page_token` attribute of
* the next request. `next_page_token` is not returned for the last
* page.
* </pre>
*
* <code>string next_page_token = 2;</code>
* @return The bytes for nextPageToken.
*/
public com.google.protobuf.ByteString
getNextPageTokenBytes() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
nextPageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
* <pre>
* Pagination token used to retrieve the next page of results.
* Pass the content of this string as the `page_token` attribute of
* the next request. `next_page_token` is not returned for the last
* page.
* </pre>
*
* <code>string next_page_token = 2;</code>
* @param value The nextPageToken to set.
* @return This builder for chaining.
*/
public Builder setNextPageToken(
java.lang.String value) {
if (value == null) { throw new NullPointerException(); }
nextPageToken_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
* <pre>
* Pagination token used to retrieve the next page of results.
* Pass the content of this string as the `page_token` attribute of
* the next request. `next_page_token` is not returned for the last
* page.
* </pre>
*
* <code>string next_page_token = 2;</code>
* @return This builder for chaining.
*/
public Builder clearNextPageToken() {
nextPageToken_ = getDefaultInstance().getNextPageToken();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
return this;
}
/**
* <pre>
* Pagination token used to retrieve the next page of results.
* Pass the content of this string as the `page_token` attribute of
* the next request. `next_page_token` is not returned for the last
* page.
* </pre>
*
* <code>string next_page_token = 2;</code>
* @param value The bytes for nextPageToken to set.
* @return This builder for chaining.
*/
public Builder setNextPageTokenBytes(
com.google.protobuf.ByteString value) {
if (value == null) { throw new NullPointerException(); }
checkByteStringIsUtf8(value);
nextPageToken_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.ads.googleads.v19.services.ListCampaignDraftAsyncErrorsResponse)
}
// @@protoc_insertion_point(class_scope:google.ads.googleads.v19.services.ListCampaignDraftAsyncErrorsResponse)
private static final com.google.ads.googleads.v19.services.ListCampaignDraftAsyncErrorsResponse DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.ads.googleads.v19.services.ListCampaignDraftAsyncErrorsResponse();
}
public static com.google.ads.googleads.v19.services.ListCampaignDraftAsyncErrorsResponse getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<ListCampaignDraftAsyncErrorsResponse>
PARSER = new com.google.protobuf.AbstractParser<ListCampaignDraftAsyncErrorsResponse>() {
@java.lang.Override
public ListCampaignDraftAsyncErrorsResponse parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<ListCampaignDraftAsyncErrorsResponse> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<ListCampaignDraftAsyncErrorsResponse> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.ads.googleads.v19.services.ListCampaignDraftAsyncErrorsResponse getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleads/google-ads-java | 36,327 | google-ads-stubs-v20/src/main/java/com/google/ads/googleads/v20/services/ListCampaignDraftAsyncErrorsResponse.java | // Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/ads/googleads/v20/services/campaign_draft_service.proto
// Protobuf Java Version: 3.25.7
package com.google.ads.googleads.v20.services;
/**
* <pre>
* Response message for
* [CampaignDraftService.ListCampaignDraftAsyncErrors][google.ads.googleads.v20.services.CampaignDraftService.ListCampaignDraftAsyncErrors].
* </pre>
*
* Protobuf type {@code google.ads.googleads.v20.services.ListCampaignDraftAsyncErrorsResponse}
*/
public final class ListCampaignDraftAsyncErrorsResponse extends
com.google.protobuf.GeneratedMessageV3 implements
// @@protoc_insertion_point(message_implements:google.ads.googleads.v20.services.ListCampaignDraftAsyncErrorsResponse)
ListCampaignDraftAsyncErrorsResponseOrBuilder {
private static final long serialVersionUID = 0L;
// Use ListCampaignDraftAsyncErrorsResponse.newBuilder() to construct.
private ListCampaignDraftAsyncErrorsResponse(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private ListCampaignDraftAsyncErrorsResponse() {
errors_ = java.util.Collections.emptyList();
nextPageToken_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(
UnusedPrivateParameter unused) {
return new ListCampaignDraftAsyncErrorsResponse();
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return com.google.ads.googleads.v20.services.CampaignDraftServiceProto.internal_static_google_ads_googleads_v20_services_ListCampaignDraftAsyncErrorsResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.ads.googleads.v20.services.CampaignDraftServiceProto.internal_static_google_ads_googleads_v20_services_ListCampaignDraftAsyncErrorsResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.ads.googleads.v20.services.ListCampaignDraftAsyncErrorsResponse.class, com.google.ads.googleads.v20.services.ListCampaignDraftAsyncErrorsResponse.Builder.class);
}
public static final int ERRORS_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private java.util.List<com.google.rpc.Status> errors_;
/**
* <pre>
* Details of the errors when performing the asynchronous operation.
* </pre>
*
* <code>repeated .google.rpc.Status errors = 1;</code>
*/
@java.lang.Override
public java.util.List<com.google.rpc.Status> getErrorsList() {
return errors_;
}
/**
* <pre>
* Details of the errors when performing the asynchronous operation.
* </pre>
*
* <code>repeated .google.rpc.Status errors = 1;</code>
*/
@java.lang.Override
public java.util.List<? extends com.google.rpc.StatusOrBuilder>
getErrorsOrBuilderList() {
return errors_;
}
/**
* <pre>
* Details of the errors when performing the asynchronous operation.
* </pre>
*
* <code>repeated .google.rpc.Status errors = 1;</code>
*/
@java.lang.Override
public int getErrorsCount() {
return errors_.size();
}
/**
* <pre>
* Details of the errors when performing the asynchronous operation.
* </pre>
*
* <code>repeated .google.rpc.Status errors = 1;</code>
*/
@java.lang.Override
public com.google.rpc.Status getErrors(int index) {
return errors_.get(index);
}
/**
* <pre>
* Details of the errors when performing the asynchronous operation.
* </pre>
*
* <code>repeated .google.rpc.Status errors = 1;</code>
*/
@java.lang.Override
public com.google.rpc.StatusOrBuilder getErrorsOrBuilder(
int index) {
return errors_.get(index);
}
public static final int NEXT_PAGE_TOKEN_FIELD_NUMBER = 2;
@SuppressWarnings("serial")
private volatile java.lang.Object nextPageToken_ = "";
/**
* <pre>
* Pagination token used to retrieve the next page of results.
* Pass the content of this string as the `page_token` attribute of
* the next request. `next_page_token` is not returned for the last
* page.
* </pre>
*
* <code>string next_page_token = 2;</code>
* @return The nextPageToken.
*/
@java.lang.Override
public java.lang.String getNextPageToken() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs =
(com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
nextPageToken_ = s;
return s;
}
}
/**
* <pre>
* Pagination token used to retrieve the next page of results.
* Pass the content of this string as the `page_token` attribute of
* the next request. `next_page_token` is not returned for the last
* page.
* </pre>
*
* <code>string next_page_token = 2;</code>
* @return The bytes for nextPageToken.
*/
@java.lang.Override
public com.google.protobuf.ByteString
getNextPageTokenBytes() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
nextPageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
for (int i = 0; i < errors_.size(); i++) {
output.writeMessage(1, errors_.get(i));
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 2, nextPageToken_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
for (int i = 0; i < errors_.size(); i++) {
size += com.google.protobuf.CodedOutputStream
.computeMessageSize(1, errors_.get(i));
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, nextPageToken_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.ads.googleads.v20.services.ListCampaignDraftAsyncErrorsResponse)) {
return super.equals(obj);
}
com.google.ads.googleads.v20.services.ListCampaignDraftAsyncErrorsResponse other = (com.google.ads.googleads.v20.services.ListCampaignDraftAsyncErrorsResponse) obj;
if (!getErrorsList()
.equals(other.getErrorsList())) return false;
if (!getNextPageToken()
.equals(other.getNextPageToken())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (getErrorsCount() > 0) {
hash = (37 * hash) + ERRORS_FIELD_NUMBER;
hash = (53 * hash) + getErrorsList().hashCode();
}
hash = (37 * hash) + NEXT_PAGE_TOKEN_FIELD_NUMBER;
hash = (53 * hash) + getNextPageToken().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.ads.googleads.v20.services.ListCampaignDraftAsyncErrorsResponse parseFrom(
java.nio.ByteBuffer data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.ads.googleads.v20.services.ListCampaignDraftAsyncErrorsResponse parseFrom(
java.nio.ByteBuffer data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.ads.googleads.v20.services.ListCampaignDraftAsyncErrorsResponse parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.ads.googleads.v20.services.ListCampaignDraftAsyncErrorsResponse parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.ads.googleads.v20.services.ListCampaignDraftAsyncErrorsResponse parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.ads.googleads.v20.services.ListCampaignDraftAsyncErrorsResponse parseFrom(
byte[] data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.ads.googleads.v20.services.ListCampaignDraftAsyncErrorsResponse parseFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static com.google.ads.googleads.v20.services.ListCampaignDraftAsyncErrorsResponse parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public static com.google.ads.googleads.v20.services.ListCampaignDraftAsyncErrorsResponse parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.ads.googleads.v20.services.ListCampaignDraftAsyncErrorsResponse parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static com.google.ads.googleads.v20.services.ListCampaignDraftAsyncErrorsResponse parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static com.google.ads.googleads.v20.services.ListCampaignDraftAsyncErrorsResponse parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.ads.googleads.v20.services.ListCampaignDraftAsyncErrorsResponse prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE
? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(
com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* <pre>
* Response message for
* [CampaignDraftService.ListCampaignDraftAsyncErrors][google.ads.googleads.v20.services.CampaignDraftService.ListCampaignDraftAsyncErrors].
* </pre>
*
* Protobuf type {@code google.ads.googleads.v20.services.ListCampaignDraftAsyncErrorsResponse}
*/
public static final class Builder extends
com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements
// @@protoc_insertion_point(builder_implements:google.ads.googleads.v20.services.ListCampaignDraftAsyncErrorsResponse)
com.google.ads.googleads.v20.services.ListCampaignDraftAsyncErrorsResponseOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return com.google.ads.googleads.v20.services.CampaignDraftServiceProto.internal_static_google_ads_googleads_v20_services_ListCampaignDraftAsyncErrorsResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.ads.googleads.v20.services.CampaignDraftServiceProto.internal_static_google_ads_googleads_v20_services_ListCampaignDraftAsyncErrorsResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.ads.googleads.v20.services.ListCampaignDraftAsyncErrorsResponse.class, com.google.ads.googleads.v20.services.ListCampaignDraftAsyncErrorsResponse.Builder.class);
}
// Construct using com.google.ads.googleads.v20.services.ListCampaignDraftAsyncErrorsResponse.newBuilder()
private Builder() {
}
private Builder(
com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
if (errorsBuilder_ == null) {
errors_ = java.util.Collections.emptyList();
} else {
errors_ = null;
errorsBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000001);
nextPageToken_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return com.google.ads.googleads.v20.services.CampaignDraftServiceProto.internal_static_google_ads_googleads_v20_services_ListCampaignDraftAsyncErrorsResponse_descriptor;
}
@java.lang.Override
public com.google.ads.googleads.v20.services.ListCampaignDraftAsyncErrorsResponse getDefaultInstanceForType() {
return com.google.ads.googleads.v20.services.ListCampaignDraftAsyncErrorsResponse.getDefaultInstance();
}
@java.lang.Override
public com.google.ads.googleads.v20.services.ListCampaignDraftAsyncErrorsResponse build() {
com.google.ads.googleads.v20.services.ListCampaignDraftAsyncErrorsResponse result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.ads.googleads.v20.services.ListCampaignDraftAsyncErrorsResponse buildPartial() {
com.google.ads.googleads.v20.services.ListCampaignDraftAsyncErrorsResponse result = new com.google.ads.googleads.v20.services.ListCampaignDraftAsyncErrorsResponse(this);
buildPartialRepeatedFields(result);
if (bitField0_ != 0) { buildPartial0(result); }
onBuilt();
return result;
}
private void buildPartialRepeatedFields(com.google.ads.googleads.v20.services.ListCampaignDraftAsyncErrorsResponse result) {
if (errorsBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0)) {
errors_ = java.util.Collections.unmodifiableList(errors_);
bitField0_ = (bitField0_ & ~0x00000001);
}
result.errors_ = errors_;
} else {
result.errors_ = errorsBuilder_.build();
}
}
private void buildPartial0(com.google.ads.googleads.v20.services.ListCampaignDraftAsyncErrorsResponse result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000002) != 0)) {
result.nextPageToken_ = nextPageToken_;
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(
com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(
com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field,
int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.ads.googleads.v20.services.ListCampaignDraftAsyncErrorsResponse) {
return mergeFrom((com.google.ads.googleads.v20.services.ListCampaignDraftAsyncErrorsResponse)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.ads.googleads.v20.services.ListCampaignDraftAsyncErrorsResponse other) {
if (other == com.google.ads.googleads.v20.services.ListCampaignDraftAsyncErrorsResponse.getDefaultInstance()) return this;
if (errorsBuilder_ == null) {
if (!other.errors_.isEmpty()) {
if (errors_.isEmpty()) {
errors_ = other.errors_;
bitField0_ = (bitField0_ & ~0x00000001);
} else {
ensureErrorsIsMutable();
errors_.addAll(other.errors_);
}
onChanged();
}
} else {
if (!other.errors_.isEmpty()) {
if (errorsBuilder_.isEmpty()) {
errorsBuilder_.dispose();
errorsBuilder_ = null;
errors_ = other.errors_;
bitField0_ = (bitField0_ & ~0x00000001);
errorsBuilder_ =
com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ?
getErrorsFieldBuilder() : null;
} else {
errorsBuilder_.addAllMessages(other.errors_);
}
}
}
if (!other.getNextPageToken().isEmpty()) {
nextPageToken_ = other.nextPageToken_;
bitField0_ |= 0x00000002;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10: {
com.google.rpc.Status m =
input.readMessage(
com.google.rpc.Status.parser(),
extensionRegistry);
if (errorsBuilder_ == null) {
ensureErrorsIsMutable();
errors_.add(m);
} else {
errorsBuilder_.addMessage(m);
}
break;
} // case 10
case 18: {
nextPageToken_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000002;
break;
} // case 18
default: {
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.util.List<com.google.rpc.Status> errors_ =
java.util.Collections.emptyList();
private void ensureErrorsIsMutable() {
if (!((bitField0_ & 0x00000001) != 0)) {
errors_ = new java.util.ArrayList<com.google.rpc.Status>(errors_);
bitField0_ |= 0x00000001;
}
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.rpc.Status, com.google.rpc.Status.Builder, com.google.rpc.StatusOrBuilder> errorsBuilder_;
/**
* <pre>
* Details of the errors when performing the asynchronous operation.
* </pre>
*
* <code>repeated .google.rpc.Status errors = 1;</code>
*/
public java.util.List<com.google.rpc.Status> getErrorsList() {
if (errorsBuilder_ == null) {
return java.util.Collections.unmodifiableList(errors_);
} else {
return errorsBuilder_.getMessageList();
}
}
/**
* <pre>
* Details of the errors when performing the asynchronous operation.
* </pre>
*
* <code>repeated .google.rpc.Status errors = 1;</code>
*/
public int getErrorsCount() {
if (errorsBuilder_ == null) {
return errors_.size();
} else {
return errorsBuilder_.getCount();
}
}
/**
* <pre>
* Details of the errors when performing the asynchronous operation.
* </pre>
*
* <code>repeated .google.rpc.Status errors = 1;</code>
*/
public com.google.rpc.Status getErrors(int index) {
if (errorsBuilder_ == null) {
return errors_.get(index);
} else {
return errorsBuilder_.getMessage(index);
}
}
/**
* <pre>
* Details of the errors when performing the asynchronous operation.
* </pre>
*
* <code>repeated .google.rpc.Status errors = 1;</code>
*/
public Builder setErrors(
int index, com.google.rpc.Status value) {
if (errorsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureErrorsIsMutable();
errors_.set(index, value);
onChanged();
} else {
errorsBuilder_.setMessage(index, value);
}
return this;
}
/**
* <pre>
* Details of the errors when performing the asynchronous operation.
* </pre>
*
* <code>repeated .google.rpc.Status errors = 1;</code>
*/
public Builder setErrors(
int index, com.google.rpc.Status.Builder builderForValue) {
if (errorsBuilder_ == null) {
ensureErrorsIsMutable();
errors_.set(index, builderForValue.build());
onChanged();
} else {
errorsBuilder_.setMessage(index, builderForValue.build());
}
return this;
}
/**
* <pre>
* Details of the errors when performing the asynchronous operation.
* </pre>
*
* <code>repeated .google.rpc.Status errors = 1;</code>
*/
public Builder addErrors(com.google.rpc.Status value) {
if (errorsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureErrorsIsMutable();
errors_.add(value);
onChanged();
} else {
errorsBuilder_.addMessage(value);
}
return this;
}
/**
* <pre>
* Details of the errors when performing the asynchronous operation.
* </pre>
*
* <code>repeated .google.rpc.Status errors = 1;</code>
*/
public Builder addErrors(
int index, com.google.rpc.Status value) {
if (errorsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureErrorsIsMutable();
errors_.add(index, value);
onChanged();
} else {
errorsBuilder_.addMessage(index, value);
}
return this;
}
/**
* <pre>
* Details of the errors when performing the asynchronous operation.
* </pre>
*
* <code>repeated .google.rpc.Status errors = 1;</code>
*/
public Builder addErrors(
com.google.rpc.Status.Builder builderForValue) {
if (errorsBuilder_ == null) {
ensureErrorsIsMutable();
errors_.add(builderForValue.build());
onChanged();
} else {
errorsBuilder_.addMessage(builderForValue.build());
}
return this;
}
/**
* <pre>
* Details of the errors when performing the asynchronous operation.
* </pre>
*
* <code>repeated .google.rpc.Status errors = 1;</code>
*/
public Builder addErrors(
int index, com.google.rpc.Status.Builder builderForValue) {
if (errorsBuilder_ == null) {
ensureErrorsIsMutable();
errors_.add(index, builderForValue.build());
onChanged();
} else {
errorsBuilder_.addMessage(index, builderForValue.build());
}
return this;
}
/**
* <pre>
* Details of the errors when performing the asynchronous operation.
* </pre>
*
* <code>repeated .google.rpc.Status errors = 1;</code>
*/
public Builder addAllErrors(
java.lang.Iterable<? extends com.google.rpc.Status> values) {
if (errorsBuilder_ == null) {
ensureErrorsIsMutable();
com.google.protobuf.AbstractMessageLite.Builder.addAll(
values, errors_);
onChanged();
} else {
errorsBuilder_.addAllMessages(values);
}
return this;
}
/**
* <pre>
* Details of the errors when performing the asynchronous operation.
* </pre>
*
* <code>repeated .google.rpc.Status errors = 1;</code>
*/
public Builder clearErrors() {
if (errorsBuilder_ == null) {
errors_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
} else {
errorsBuilder_.clear();
}
return this;
}
/**
* <pre>
* Details of the errors when performing the asynchronous operation.
* </pre>
*
* <code>repeated .google.rpc.Status errors = 1;</code>
*/
public Builder removeErrors(int index) {
if (errorsBuilder_ == null) {
ensureErrorsIsMutable();
errors_.remove(index);
onChanged();
} else {
errorsBuilder_.remove(index);
}
return this;
}
/**
* <pre>
* Details of the errors when performing the asynchronous operation.
* </pre>
*
* <code>repeated .google.rpc.Status errors = 1;</code>
*/
public com.google.rpc.Status.Builder getErrorsBuilder(
int index) {
return getErrorsFieldBuilder().getBuilder(index);
}
/**
* <pre>
* Details of the errors when performing the asynchronous operation.
* </pre>
*
* <code>repeated .google.rpc.Status errors = 1;</code>
*/
public com.google.rpc.StatusOrBuilder getErrorsOrBuilder(
int index) {
if (errorsBuilder_ == null) {
return errors_.get(index); } else {
return errorsBuilder_.getMessageOrBuilder(index);
}
}
/**
* <pre>
* Details of the errors when performing the asynchronous operation.
* </pre>
*
* <code>repeated .google.rpc.Status errors = 1;</code>
*/
public java.util.List<? extends com.google.rpc.StatusOrBuilder>
getErrorsOrBuilderList() {
if (errorsBuilder_ != null) {
return errorsBuilder_.getMessageOrBuilderList();
} else {
return java.util.Collections.unmodifiableList(errors_);
}
}
/**
* <pre>
* Details of the errors when performing the asynchronous operation.
* </pre>
*
* <code>repeated .google.rpc.Status errors = 1;</code>
*/
public com.google.rpc.Status.Builder addErrorsBuilder() {
return getErrorsFieldBuilder().addBuilder(
com.google.rpc.Status.getDefaultInstance());
}
/**
* <pre>
* Details of the errors when performing the asynchronous operation.
* </pre>
*
* <code>repeated .google.rpc.Status errors = 1;</code>
*/
public com.google.rpc.Status.Builder addErrorsBuilder(
int index) {
return getErrorsFieldBuilder().addBuilder(
index, com.google.rpc.Status.getDefaultInstance());
}
/**
* <pre>
* Details of the errors when performing the asynchronous operation.
* </pre>
*
* <code>repeated .google.rpc.Status errors = 1;</code>
*/
public java.util.List<com.google.rpc.Status.Builder>
getErrorsBuilderList() {
return getErrorsFieldBuilder().getBuilderList();
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.rpc.Status, com.google.rpc.Status.Builder, com.google.rpc.StatusOrBuilder>
getErrorsFieldBuilder() {
if (errorsBuilder_ == null) {
errorsBuilder_ = new com.google.protobuf.RepeatedFieldBuilderV3<
com.google.rpc.Status, com.google.rpc.Status.Builder, com.google.rpc.StatusOrBuilder>(
errors_,
((bitField0_ & 0x00000001) != 0),
getParentForChildren(),
isClean());
errors_ = null;
}
return errorsBuilder_;
}
private java.lang.Object nextPageToken_ = "";
/**
* <pre>
* Pagination token used to retrieve the next page of results.
* Pass the content of this string as the `page_token` attribute of
* the next request. `next_page_token` is not returned for the last
* page.
* </pre>
*
* <code>string next_page_token = 2;</code>
* @return The nextPageToken.
*/
public java.lang.String getNextPageToken() {
java.lang.Object ref = nextPageToken_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs =
(com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
nextPageToken_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
* <pre>
* Pagination token used to retrieve the next page of results.
* Pass the content of this string as the `page_token` attribute of
* the next request. `next_page_token` is not returned for the last
* page.
* </pre>
*
* <code>string next_page_token = 2;</code>
* @return The bytes for nextPageToken.
*/
public com.google.protobuf.ByteString
getNextPageTokenBytes() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
nextPageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
* <pre>
* Pagination token used to retrieve the next page of results.
* Pass the content of this string as the `page_token` attribute of
* the next request. `next_page_token` is not returned for the last
* page.
* </pre>
*
* <code>string next_page_token = 2;</code>
* @param value The nextPageToken to set.
* @return This builder for chaining.
*/
public Builder setNextPageToken(
java.lang.String value) {
if (value == null) { throw new NullPointerException(); }
nextPageToken_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
* <pre>
* Pagination token used to retrieve the next page of results.
* Pass the content of this string as the `page_token` attribute of
* the next request. `next_page_token` is not returned for the last
* page.
* </pre>
*
* <code>string next_page_token = 2;</code>
* @return This builder for chaining.
*/
public Builder clearNextPageToken() {
nextPageToken_ = getDefaultInstance().getNextPageToken();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
return this;
}
/**
* <pre>
* Pagination token used to retrieve the next page of results.
* Pass the content of this string as the `page_token` attribute of
* the next request. `next_page_token` is not returned for the last
* page.
* </pre>
*
* <code>string next_page_token = 2;</code>
* @param value The bytes for nextPageToken to set.
* @return This builder for chaining.
*/
public Builder setNextPageTokenBytes(
com.google.protobuf.ByteString value) {
if (value == null) { throw new NullPointerException(); }
checkByteStringIsUtf8(value);
nextPageToken_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.ads.googleads.v20.services.ListCampaignDraftAsyncErrorsResponse)
}
// @@protoc_insertion_point(class_scope:google.ads.googleads.v20.services.ListCampaignDraftAsyncErrorsResponse)
private static final com.google.ads.googleads.v20.services.ListCampaignDraftAsyncErrorsResponse DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.ads.googleads.v20.services.ListCampaignDraftAsyncErrorsResponse();
}
public static com.google.ads.googleads.v20.services.ListCampaignDraftAsyncErrorsResponse getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<ListCampaignDraftAsyncErrorsResponse>
PARSER = new com.google.protobuf.AbstractParser<ListCampaignDraftAsyncErrorsResponse>() {
@java.lang.Override
public ListCampaignDraftAsyncErrorsResponse parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<ListCampaignDraftAsyncErrorsResponse> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<ListCampaignDraftAsyncErrorsResponse> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.ads.googleads.v20.services.ListCampaignDraftAsyncErrorsResponse getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleads/google-ads-java | 36,327 | google-ads-stubs-v21/src/main/java/com/google/ads/googleads/v21/services/ListCampaignDraftAsyncErrorsResponse.java | // Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/ads/googleads/v21/services/campaign_draft_service.proto
// Protobuf Java Version: 3.25.7
package com.google.ads.googleads.v21.services;
/**
* <pre>
* Response message for
* [CampaignDraftService.ListCampaignDraftAsyncErrors][google.ads.googleads.v21.services.CampaignDraftService.ListCampaignDraftAsyncErrors].
* </pre>
*
* Protobuf type {@code google.ads.googleads.v21.services.ListCampaignDraftAsyncErrorsResponse}
*/
public final class ListCampaignDraftAsyncErrorsResponse extends
com.google.protobuf.GeneratedMessageV3 implements
// @@protoc_insertion_point(message_implements:google.ads.googleads.v21.services.ListCampaignDraftAsyncErrorsResponse)
ListCampaignDraftAsyncErrorsResponseOrBuilder {
private static final long serialVersionUID = 0L;
// Use ListCampaignDraftAsyncErrorsResponse.newBuilder() to construct.
private ListCampaignDraftAsyncErrorsResponse(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private ListCampaignDraftAsyncErrorsResponse() {
errors_ = java.util.Collections.emptyList();
nextPageToken_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(
UnusedPrivateParameter unused) {
return new ListCampaignDraftAsyncErrorsResponse();
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return com.google.ads.googleads.v21.services.CampaignDraftServiceProto.internal_static_google_ads_googleads_v21_services_ListCampaignDraftAsyncErrorsResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.ads.googleads.v21.services.CampaignDraftServiceProto.internal_static_google_ads_googleads_v21_services_ListCampaignDraftAsyncErrorsResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.ads.googleads.v21.services.ListCampaignDraftAsyncErrorsResponse.class, com.google.ads.googleads.v21.services.ListCampaignDraftAsyncErrorsResponse.Builder.class);
}
public static final int ERRORS_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private java.util.List<com.google.rpc.Status> errors_;
/**
* <pre>
* Details of the errors when performing the asynchronous operation.
* </pre>
*
* <code>repeated .google.rpc.Status errors = 1;</code>
*/
@java.lang.Override
public java.util.List<com.google.rpc.Status> getErrorsList() {
return errors_;
}
/**
* <pre>
* Details of the errors when performing the asynchronous operation.
* </pre>
*
* <code>repeated .google.rpc.Status errors = 1;</code>
*/
@java.lang.Override
public java.util.List<? extends com.google.rpc.StatusOrBuilder>
getErrorsOrBuilderList() {
return errors_;
}
/**
* <pre>
* Details of the errors when performing the asynchronous operation.
* </pre>
*
* <code>repeated .google.rpc.Status errors = 1;</code>
*/
@java.lang.Override
public int getErrorsCount() {
return errors_.size();
}
/**
* <pre>
* Details of the errors when performing the asynchronous operation.
* </pre>
*
* <code>repeated .google.rpc.Status errors = 1;</code>
*/
@java.lang.Override
public com.google.rpc.Status getErrors(int index) {
return errors_.get(index);
}
/**
* <pre>
* Details of the errors when performing the asynchronous operation.
* </pre>
*
* <code>repeated .google.rpc.Status errors = 1;</code>
*/
@java.lang.Override
public com.google.rpc.StatusOrBuilder getErrorsOrBuilder(
int index) {
return errors_.get(index);
}
public static final int NEXT_PAGE_TOKEN_FIELD_NUMBER = 2;
@SuppressWarnings("serial")
private volatile java.lang.Object nextPageToken_ = "";
/**
* <pre>
* Pagination token used to retrieve the next page of results.
* Pass the content of this string as the `page_token` attribute of
* the next request. `next_page_token` is not returned for the last
* page.
* </pre>
*
* <code>string next_page_token = 2;</code>
* @return The nextPageToken.
*/
@java.lang.Override
public java.lang.String getNextPageToken() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs =
(com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
nextPageToken_ = s;
return s;
}
}
/**
* <pre>
* Pagination token used to retrieve the next page of results.
* Pass the content of this string as the `page_token` attribute of
* the next request. `next_page_token` is not returned for the last
* page.
* </pre>
*
* <code>string next_page_token = 2;</code>
* @return The bytes for nextPageToken.
*/
@java.lang.Override
public com.google.protobuf.ByteString
getNextPageTokenBytes() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
nextPageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
for (int i = 0; i < errors_.size(); i++) {
output.writeMessage(1, errors_.get(i));
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 2, nextPageToken_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
for (int i = 0; i < errors_.size(); i++) {
size += com.google.protobuf.CodedOutputStream
.computeMessageSize(1, errors_.get(i));
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, nextPageToken_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.ads.googleads.v21.services.ListCampaignDraftAsyncErrorsResponse)) {
return super.equals(obj);
}
com.google.ads.googleads.v21.services.ListCampaignDraftAsyncErrorsResponse other = (com.google.ads.googleads.v21.services.ListCampaignDraftAsyncErrorsResponse) obj;
if (!getErrorsList()
.equals(other.getErrorsList())) return false;
if (!getNextPageToken()
.equals(other.getNextPageToken())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (getErrorsCount() > 0) {
hash = (37 * hash) + ERRORS_FIELD_NUMBER;
hash = (53 * hash) + getErrorsList().hashCode();
}
hash = (37 * hash) + NEXT_PAGE_TOKEN_FIELD_NUMBER;
hash = (53 * hash) + getNextPageToken().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.ads.googleads.v21.services.ListCampaignDraftAsyncErrorsResponse parseFrom(
java.nio.ByteBuffer data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.ads.googleads.v21.services.ListCampaignDraftAsyncErrorsResponse parseFrom(
java.nio.ByteBuffer data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.ads.googleads.v21.services.ListCampaignDraftAsyncErrorsResponse parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.ads.googleads.v21.services.ListCampaignDraftAsyncErrorsResponse parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.ads.googleads.v21.services.ListCampaignDraftAsyncErrorsResponse parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.ads.googleads.v21.services.ListCampaignDraftAsyncErrorsResponse parseFrom(
byte[] data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.ads.googleads.v21.services.ListCampaignDraftAsyncErrorsResponse parseFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static com.google.ads.googleads.v21.services.ListCampaignDraftAsyncErrorsResponse parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public static com.google.ads.googleads.v21.services.ListCampaignDraftAsyncErrorsResponse parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.ads.googleads.v21.services.ListCampaignDraftAsyncErrorsResponse parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static com.google.ads.googleads.v21.services.ListCampaignDraftAsyncErrorsResponse parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static com.google.ads.googleads.v21.services.ListCampaignDraftAsyncErrorsResponse parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.ads.googleads.v21.services.ListCampaignDraftAsyncErrorsResponse prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE
? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(
com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* <pre>
* Response message for
* [CampaignDraftService.ListCampaignDraftAsyncErrors][google.ads.googleads.v21.services.CampaignDraftService.ListCampaignDraftAsyncErrors].
* </pre>
*
* Protobuf type {@code google.ads.googleads.v21.services.ListCampaignDraftAsyncErrorsResponse}
*/
public static final class Builder extends
com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements
// @@protoc_insertion_point(builder_implements:google.ads.googleads.v21.services.ListCampaignDraftAsyncErrorsResponse)
com.google.ads.googleads.v21.services.ListCampaignDraftAsyncErrorsResponseOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return com.google.ads.googleads.v21.services.CampaignDraftServiceProto.internal_static_google_ads_googleads_v21_services_ListCampaignDraftAsyncErrorsResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.ads.googleads.v21.services.CampaignDraftServiceProto.internal_static_google_ads_googleads_v21_services_ListCampaignDraftAsyncErrorsResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.ads.googleads.v21.services.ListCampaignDraftAsyncErrorsResponse.class, com.google.ads.googleads.v21.services.ListCampaignDraftAsyncErrorsResponse.Builder.class);
}
// Construct using com.google.ads.googleads.v21.services.ListCampaignDraftAsyncErrorsResponse.newBuilder()
private Builder() {
}
private Builder(
com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
if (errorsBuilder_ == null) {
errors_ = java.util.Collections.emptyList();
} else {
errors_ = null;
errorsBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000001);
nextPageToken_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return com.google.ads.googleads.v21.services.CampaignDraftServiceProto.internal_static_google_ads_googleads_v21_services_ListCampaignDraftAsyncErrorsResponse_descriptor;
}
@java.lang.Override
public com.google.ads.googleads.v21.services.ListCampaignDraftAsyncErrorsResponse getDefaultInstanceForType() {
return com.google.ads.googleads.v21.services.ListCampaignDraftAsyncErrorsResponse.getDefaultInstance();
}
@java.lang.Override
public com.google.ads.googleads.v21.services.ListCampaignDraftAsyncErrorsResponse build() {
com.google.ads.googleads.v21.services.ListCampaignDraftAsyncErrorsResponse result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.ads.googleads.v21.services.ListCampaignDraftAsyncErrorsResponse buildPartial() {
com.google.ads.googleads.v21.services.ListCampaignDraftAsyncErrorsResponse result = new com.google.ads.googleads.v21.services.ListCampaignDraftAsyncErrorsResponse(this);
buildPartialRepeatedFields(result);
if (bitField0_ != 0) { buildPartial0(result); }
onBuilt();
return result;
}
private void buildPartialRepeatedFields(com.google.ads.googleads.v21.services.ListCampaignDraftAsyncErrorsResponse result) {
if (errorsBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0)) {
errors_ = java.util.Collections.unmodifiableList(errors_);
bitField0_ = (bitField0_ & ~0x00000001);
}
result.errors_ = errors_;
} else {
result.errors_ = errorsBuilder_.build();
}
}
private void buildPartial0(com.google.ads.googleads.v21.services.ListCampaignDraftAsyncErrorsResponse result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000002) != 0)) {
result.nextPageToken_ = nextPageToken_;
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(
com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(
com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field,
int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.ads.googleads.v21.services.ListCampaignDraftAsyncErrorsResponse) {
return mergeFrom((com.google.ads.googleads.v21.services.ListCampaignDraftAsyncErrorsResponse)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.ads.googleads.v21.services.ListCampaignDraftAsyncErrorsResponse other) {
if (other == com.google.ads.googleads.v21.services.ListCampaignDraftAsyncErrorsResponse.getDefaultInstance()) return this;
if (errorsBuilder_ == null) {
if (!other.errors_.isEmpty()) {
if (errors_.isEmpty()) {
errors_ = other.errors_;
bitField0_ = (bitField0_ & ~0x00000001);
} else {
ensureErrorsIsMutable();
errors_.addAll(other.errors_);
}
onChanged();
}
} else {
if (!other.errors_.isEmpty()) {
if (errorsBuilder_.isEmpty()) {
errorsBuilder_.dispose();
errorsBuilder_ = null;
errors_ = other.errors_;
bitField0_ = (bitField0_ & ~0x00000001);
errorsBuilder_ =
com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ?
getErrorsFieldBuilder() : null;
} else {
errorsBuilder_.addAllMessages(other.errors_);
}
}
}
if (!other.getNextPageToken().isEmpty()) {
nextPageToken_ = other.nextPageToken_;
bitField0_ |= 0x00000002;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10: {
com.google.rpc.Status m =
input.readMessage(
com.google.rpc.Status.parser(),
extensionRegistry);
if (errorsBuilder_ == null) {
ensureErrorsIsMutable();
errors_.add(m);
} else {
errorsBuilder_.addMessage(m);
}
break;
} // case 10
case 18: {
nextPageToken_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000002;
break;
} // case 18
default: {
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.util.List<com.google.rpc.Status> errors_ =
java.util.Collections.emptyList();
private void ensureErrorsIsMutable() {
if (!((bitField0_ & 0x00000001) != 0)) {
errors_ = new java.util.ArrayList<com.google.rpc.Status>(errors_);
bitField0_ |= 0x00000001;
}
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.rpc.Status, com.google.rpc.Status.Builder, com.google.rpc.StatusOrBuilder> errorsBuilder_;
/**
* <pre>
* Details of the errors when performing the asynchronous operation.
* </pre>
*
* <code>repeated .google.rpc.Status errors = 1;</code>
*/
public java.util.List<com.google.rpc.Status> getErrorsList() {
if (errorsBuilder_ == null) {
return java.util.Collections.unmodifiableList(errors_);
} else {
return errorsBuilder_.getMessageList();
}
}
/**
* <pre>
* Details of the errors when performing the asynchronous operation.
* </pre>
*
* <code>repeated .google.rpc.Status errors = 1;</code>
*/
public int getErrorsCount() {
if (errorsBuilder_ == null) {
return errors_.size();
} else {
return errorsBuilder_.getCount();
}
}
/**
* <pre>
* Details of the errors when performing the asynchronous operation.
* </pre>
*
* <code>repeated .google.rpc.Status errors = 1;</code>
*/
public com.google.rpc.Status getErrors(int index) {
if (errorsBuilder_ == null) {
return errors_.get(index);
} else {
return errorsBuilder_.getMessage(index);
}
}
/**
* <pre>
* Details of the errors when performing the asynchronous operation.
* </pre>
*
* <code>repeated .google.rpc.Status errors = 1;</code>
*/
public Builder setErrors(
int index, com.google.rpc.Status value) {
if (errorsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureErrorsIsMutable();
errors_.set(index, value);
onChanged();
} else {
errorsBuilder_.setMessage(index, value);
}
return this;
}
/**
* <pre>
* Details of the errors when performing the asynchronous operation.
* </pre>
*
* <code>repeated .google.rpc.Status errors = 1;</code>
*/
public Builder setErrors(
int index, com.google.rpc.Status.Builder builderForValue) {
if (errorsBuilder_ == null) {
ensureErrorsIsMutable();
errors_.set(index, builderForValue.build());
onChanged();
} else {
errorsBuilder_.setMessage(index, builderForValue.build());
}
return this;
}
/**
* <pre>
* Details of the errors when performing the asynchronous operation.
* </pre>
*
* <code>repeated .google.rpc.Status errors = 1;</code>
*/
public Builder addErrors(com.google.rpc.Status value) {
if (errorsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureErrorsIsMutable();
errors_.add(value);
onChanged();
} else {
errorsBuilder_.addMessage(value);
}
return this;
}
/**
* <pre>
* Details of the errors when performing the asynchronous operation.
* </pre>
*
* <code>repeated .google.rpc.Status errors = 1;</code>
*/
public Builder addErrors(
int index, com.google.rpc.Status value) {
if (errorsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureErrorsIsMutable();
errors_.add(index, value);
onChanged();
} else {
errorsBuilder_.addMessage(index, value);
}
return this;
}
/**
* <pre>
* Details of the errors when performing the asynchronous operation.
* </pre>
*
* <code>repeated .google.rpc.Status errors = 1;</code>
*/
public Builder addErrors(
com.google.rpc.Status.Builder builderForValue) {
if (errorsBuilder_ == null) {
ensureErrorsIsMutable();
errors_.add(builderForValue.build());
onChanged();
} else {
errorsBuilder_.addMessage(builderForValue.build());
}
return this;
}
/**
* <pre>
* Details of the errors when performing the asynchronous operation.
* </pre>
*
* <code>repeated .google.rpc.Status errors = 1;</code>
*/
public Builder addErrors(
int index, com.google.rpc.Status.Builder builderForValue) {
if (errorsBuilder_ == null) {
ensureErrorsIsMutable();
errors_.add(index, builderForValue.build());
onChanged();
} else {
errorsBuilder_.addMessage(index, builderForValue.build());
}
return this;
}
/**
* <pre>
* Details of the errors when performing the asynchronous operation.
* </pre>
*
* <code>repeated .google.rpc.Status errors = 1;</code>
*/
public Builder addAllErrors(
java.lang.Iterable<? extends com.google.rpc.Status> values) {
if (errorsBuilder_ == null) {
ensureErrorsIsMutable();
com.google.protobuf.AbstractMessageLite.Builder.addAll(
values, errors_);
onChanged();
} else {
errorsBuilder_.addAllMessages(values);
}
return this;
}
/**
* <pre>
* Details of the errors when performing the asynchronous operation.
* </pre>
*
* <code>repeated .google.rpc.Status errors = 1;</code>
*/
public Builder clearErrors() {
if (errorsBuilder_ == null) {
errors_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
} else {
errorsBuilder_.clear();
}
return this;
}
/**
* <pre>
* Details of the errors when performing the asynchronous operation.
* </pre>
*
* <code>repeated .google.rpc.Status errors = 1;</code>
*/
public Builder removeErrors(int index) {
if (errorsBuilder_ == null) {
ensureErrorsIsMutable();
errors_.remove(index);
onChanged();
} else {
errorsBuilder_.remove(index);
}
return this;
}
/**
* <pre>
* Details of the errors when performing the asynchronous operation.
* </pre>
*
* <code>repeated .google.rpc.Status errors = 1;</code>
*/
public com.google.rpc.Status.Builder getErrorsBuilder(
int index) {
return getErrorsFieldBuilder().getBuilder(index);
}
/**
* <pre>
* Details of the errors when performing the asynchronous operation.
* </pre>
*
* <code>repeated .google.rpc.Status errors = 1;</code>
*/
public com.google.rpc.StatusOrBuilder getErrorsOrBuilder(
int index) {
if (errorsBuilder_ == null) {
return errors_.get(index); } else {
return errorsBuilder_.getMessageOrBuilder(index);
}
}
/**
* <pre>
* Details of the errors when performing the asynchronous operation.
* </pre>
*
* <code>repeated .google.rpc.Status errors = 1;</code>
*/
public java.util.List<? extends com.google.rpc.StatusOrBuilder>
getErrorsOrBuilderList() {
if (errorsBuilder_ != null) {
return errorsBuilder_.getMessageOrBuilderList();
} else {
return java.util.Collections.unmodifiableList(errors_);
}
}
/**
* <pre>
* Details of the errors when performing the asynchronous operation.
* </pre>
*
* <code>repeated .google.rpc.Status errors = 1;</code>
*/
public com.google.rpc.Status.Builder addErrorsBuilder() {
return getErrorsFieldBuilder().addBuilder(
com.google.rpc.Status.getDefaultInstance());
}
/**
* <pre>
* Details of the errors when performing the asynchronous operation.
* </pre>
*
* <code>repeated .google.rpc.Status errors = 1;</code>
*/
public com.google.rpc.Status.Builder addErrorsBuilder(
int index) {
return getErrorsFieldBuilder().addBuilder(
index, com.google.rpc.Status.getDefaultInstance());
}
/**
* <pre>
* Details of the errors when performing the asynchronous operation.
* </pre>
*
* <code>repeated .google.rpc.Status errors = 1;</code>
*/
public java.util.List<com.google.rpc.Status.Builder>
getErrorsBuilderList() {
return getErrorsFieldBuilder().getBuilderList();
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.rpc.Status, com.google.rpc.Status.Builder, com.google.rpc.StatusOrBuilder>
getErrorsFieldBuilder() {
if (errorsBuilder_ == null) {
errorsBuilder_ = new com.google.protobuf.RepeatedFieldBuilderV3<
com.google.rpc.Status, com.google.rpc.Status.Builder, com.google.rpc.StatusOrBuilder>(
errors_,
((bitField0_ & 0x00000001) != 0),
getParentForChildren(),
isClean());
errors_ = null;
}
return errorsBuilder_;
}
private java.lang.Object nextPageToken_ = "";
/**
* <pre>
* Pagination token used to retrieve the next page of results.
* Pass the content of this string as the `page_token` attribute of
* the next request. `next_page_token` is not returned for the last
* page.
* </pre>
*
* <code>string next_page_token = 2;</code>
* @return The nextPageToken.
*/
public java.lang.String getNextPageToken() {
java.lang.Object ref = nextPageToken_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs =
(com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
nextPageToken_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
* <pre>
* Pagination token used to retrieve the next page of results.
* Pass the content of this string as the `page_token` attribute of
* the next request. `next_page_token` is not returned for the last
* page.
* </pre>
*
* <code>string next_page_token = 2;</code>
* @return The bytes for nextPageToken.
*/
public com.google.protobuf.ByteString
getNextPageTokenBytes() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
nextPageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
* <pre>
* Pagination token used to retrieve the next page of results.
* Pass the content of this string as the `page_token` attribute of
* the next request. `next_page_token` is not returned for the last
* page.
* </pre>
*
* <code>string next_page_token = 2;</code>
* @param value The nextPageToken to set.
* @return This builder for chaining.
*/
public Builder setNextPageToken(
java.lang.String value) {
if (value == null) { throw new NullPointerException(); }
nextPageToken_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
* <pre>
* Pagination token used to retrieve the next page of results.
* Pass the content of this string as the `page_token` attribute of
* the next request. `next_page_token` is not returned for the last
* page.
* </pre>
*
* <code>string next_page_token = 2;</code>
* @return This builder for chaining.
*/
public Builder clearNextPageToken() {
nextPageToken_ = getDefaultInstance().getNextPageToken();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
return this;
}
/**
* <pre>
* Pagination token used to retrieve the next page of results.
* Pass the content of this string as the `page_token` attribute of
* the next request. `next_page_token` is not returned for the last
* page.
* </pre>
*
* <code>string next_page_token = 2;</code>
* @param value The bytes for nextPageToken to set.
* @return This builder for chaining.
*/
public Builder setNextPageTokenBytes(
com.google.protobuf.ByteString value) {
if (value == null) { throw new NullPointerException(); }
checkByteStringIsUtf8(value);
nextPageToken_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.ads.googleads.v21.services.ListCampaignDraftAsyncErrorsResponse)
}
// @@protoc_insertion_point(class_scope:google.ads.googleads.v21.services.ListCampaignDraftAsyncErrorsResponse)
private static final com.google.ads.googleads.v21.services.ListCampaignDraftAsyncErrorsResponse DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.ads.googleads.v21.services.ListCampaignDraftAsyncErrorsResponse();
}
public static com.google.ads.googleads.v21.services.ListCampaignDraftAsyncErrorsResponse getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<ListCampaignDraftAsyncErrorsResponse>
PARSER = new com.google.protobuf.AbstractParser<ListCampaignDraftAsyncErrorsResponse>() {
@java.lang.Override
public ListCampaignDraftAsyncErrorsResponse parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<ListCampaignDraftAsyncErrorsResponse> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<ListCampaignDraftAsyncErrorsResponse> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.ads.googleads.v21.services.ListCampaignDraftAsyncErrorsResponse getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleapis/google-cloud-java | 36,202 | java-gkehub/proto-google-cloud-gkehub-v1alpha/src/main/java/com/google/cloud/gkehub/v1alpha/ListFeaturesResponse.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/gkehub/v1alpha/service.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.gkehub.v1alpha;
/**
*
*
* <pre>
* Response message for the `GkeHub.ListFeatures` method.
* </pre>
*
* Protobuf type {@code google.cloud.gkehub.v1alpha.ListFeaturesResponse}
*/
public final class ListFeaturesResponse extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.gkehub.v1alpha.ListFeaturesResponse)
ListFeaturesResponseOrBuilder {
private static final long serialVersionUID = 0L;
// Use ListFeaturesResponse.newBuilder() to construct.
private ListFeaturesResponse(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private ListFeaturesResponse() {
resources_ = java.util.Collections.emptyList();
nextPageToken_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new ListFeaturesResponse();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.gkehub.v1alpha.ServiceProto
.internal_static_google_cloud_gkehub_v1alpha_ListFeaturesResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.gkehub.v1alpha.ServiceProto
.internal_static_google_cloud_gkehub_v1alpha_ListFeaturesResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.gkehub.v1alpha.ListFeaturesResponse.class,
com.google.cloud.gkehub.v1alpha.ListFeaturesResponse.Builder.class);
}
public static final int RESOURCES_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private java.util.List<com.google.cloud.gkehub.v1alpha.Feature> resources_;
/**
*
*
* <pre>
* The list of matching Features
* </pre>
*
* <code>repeated .google.cloud.gkehub.v1alpha.Feature resources = 1;</code>
*/
@java.lang.Override
public java.util.List<com.google.cloud.gkehub.v1alpha.Feature> getResourcesList() {
return resources_;
}
/**
*
*
* <pre>
* The list of matching Features
* </pre>
*
* <code>repeated .google.cloud.gkehub.v1alpha.Feature resources = 1;</code>
*/
@java.lang.Override
public java.util.List<? extends com.google.cloud.gkehub.v1alpha.FeatureOrBuilder>
getResourcesOrBuilderList() {
return resources_;
}
/**
*
*
* <pre>
* The list of matching Features
* </pre>
*
* <code>repeated .google.cloud.gkehub.v1alpha.Feature resources = 1;</code>
*/
@java.lang.Override
public int getResourcesCount() {
return resources_.size();
}
/**
*
*
* <pre>
* The list of matching Features
* </pre>
*
* <code>repeated .google.cloud.gkehub.v1alpha.Feature resources = 1;</code>
*/
@java.lang.Override
public com.google.cloud.gkehub.v1alpha.Feature getResources(int index) {
return resources_.get(index);
}
/**
*
*
* <pre>
* The list of matching Features
* </pre>
*
* <code>repeated .google.cloud.gkehub.v1alpha.Feature resources = 1;</code>
*/
@java.lang.Override
public com.google.cloud.gkehub.v1alpha.FeatureOrBuilder getResourcesOrBuilder(int index) {
return resources_.get(index);
}
public static final int NEXT_PAGE_TOKEN_FIELD_NUMBER = 2;
@SuppressWarnings("serial")
private volatile java.lang.Object nextPageToken_ = "";
/**
*
*
* <pre>
* A token to request the next page of resources from the
* `ListFeatures` method. The value of an empty string means
* that there are no more resources to return.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The nextPageToken.
*/
@java.lang.Override
public java.lang.String getNextPageToken() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
nextPageToken_ = s;
return s;
}
}
/**
*
*
* <pre>
* A token to request the next page of resources from the
* `ListFeatures` method. The value of an empty string means
* that there are no more resources to return.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The bytes for nextPageToken.
*/
@java.lang.Override
public com.google.protobuf.ByteString getNextPageTokenBytes() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
nextPageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
for (int i = 0; i < resources_.size(); i++) {
output.writeMessage(1, resources_.get(i));
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 2, nextPageToken_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
for (int i = 0; i < resources_.size(); i++) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, resources_.get(i));
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, nextPageToken_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.gkehub.v1alpha.ListFeaturesResponse)) {
return super.equals(obj);
}
com.google.cloud.gkehub.v1alpha.ListFeaturesResponse other =
(com.google.cloud.gkehub.v1alpha.ListFeaturesResponse) obj;
if (!getResourcesList().equals(other.getResourcesList())) return false;
if (!getNextPageToken().equals(other.getNextPageToken())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (getResourcesCount() > 0) {
hash = (37 * hash) + RESOURCES_FIELD_NUMBER;
hash = (53 * hash) + getResourcesList().hashCode();
}
hash = (37 * hash) + NEXT_PAGE_TOKEN_FIELD_NUMBER;
hash = (53 * hash) + getNextPageToken().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.gkehub.v1alpha.ListFeaturesResponse parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.gkehub.v1alpha.ListFeaturesResponse parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.gkehub.v1alpha.ListFeaturesResponse parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.gkehub.v1alpha.ListFeaturesResponse parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.gkehub.v1alpha.ListFeaturesResponse parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.gkehub.v1alpha.ListFeaturesResponse parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.gkehub.v1alpha.ListFeaturesResponse parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.gkehub.v1alpha.ListFeaturesResponse parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.gkehub.v1alpha.ListFeaturesResponse parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.gkehub.v1alpha.ListFeaturesResponse parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.gkehub.v1alpha.ListFeaturesResponse parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.gkehub.v1alpha.ListFeaturesResponse parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.cloud.gkehub.v1alpha.ListFeaturesResponse prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Response message for the `GkeHub.ListFeatures` method.
* </pre>
*
* Protobuf type {@code google.cloud.gkehub.v1alpha.ListFeaturesResponse}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.gkehub.v1alpha.ListFeaturesResponse)
com.google.cloud.gkehub.v1alpha.ListFeaturesResponseOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.gkehub.v1alpha.ServiceProto
.internal_static_google_cloud_gkehub_v1alpha_ListFeaturesResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.gkehub.v1alpha.ServiceProto
.internal_static_google_cloud_gkehub_v1alpha_ListFeaturesResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.gkehub.v1alpha.ListFeaturesResponse.class,
com.google.cloud.gkehub.v1alpha.ListFeaturesResponse.Builder.class);
}
// Construct using com.google.cloud.gkehub.v1alpha.ListFeaturesResponse.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
if (resourcesBuilder_ == null) {
resources_ = java.util.Collections.emptyList();
} else {
resources_ = null;
resourcesBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000001);
nextPageToken_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.gkehub.v1alpha.ServiceProto
.internal_static_google_cloud_gkehub_v1alpha_ListFeaturesResponse_descriptor;
}
@java.lang.Override
public com.google.cloud.gkehub.v1alpha.ListFeaturesResponse getDefaultInstanceForType() {
return com.google.cloud.gkehub.v1alpha.ListFeaturesResponse.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.gkehub.v1alpha.ListFeaturesResponse build() {
com.google.cloud.gkehub.v1alpha.ListFeaturesResponse result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.gkehub.v1alpha.ListFeaturesResponse buildPartial() {
com.google.cloud.gkehub.v1alpha.ListFeaturesResponse result =
new com.google.cloud.gkehub.v1alpha.ListFeaturesResponse(this);
buildPartialRepeatedFields(result);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartialRepeatedFields(
com.google.cloud.gkehub.v1alpha.ListFeaturesResponse result) {
if (resourcesBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0)) {
resources_ = java.util.Collections.unmodifiableList(resources_);
bitField0_ = (bitField0_ & ~0x00000001);
}
result.resources_ = resources_;
} else {
result.resources_ = resourcesBuilder_.build();
}
}
private void buildPartial0(com.google.cloud.gkehub.v1alpha.ListFeaturesResponse result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000002) != 0)) {
result.nextPageToken_ = nextPageToken_;
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.gkehub.v1alpha.ListFeaturesResponse) {
return mergeFrom((com.google.cloud.gkehub.v1alpha.ListFeaturesResponse) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.gkehub.v1alpha.ListFeaturesResponse other) {
if (other == com.google.cloud.gkehub.v1alpha.ListFeaturesResponse.getDefaultInstance())
return this;
if (resourcesBuilder_ == null) {
if (!other.resources_.isEmpty()) {
if (resources_.isEmpty()) {
resources_ = other.resources_;
bitField0_ = (bitField0_ & ~0x00000001);
} else {
ensureResourcesIsMutable();
resources_.addAll(other.resources_);
}
onChanged();
}
} else {
if (!other.resources_.isEmpty()) {
if (resourcesBuilder_.isEmpty()) {
resourcesBuilder_.dispose();
resourcesBuilder_ = null;
resources_ = other.resources_;
bitField0_ = (bitField0_ & ~0x00000001);
resourcesBuilder_ =
com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders
? getResourcesFieldBuilder()
: null;
} else {
resourcesBuilder_.addAllMessages(other.resources_);
}
}
}
if (!other.getNextPageToken().isEmpty()) {
nextPageToken_ = other.nextPageToken_;
bitField0_ |= 0x00000002;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
com.google.cloud.gkehub.v1alpha.Feature m =
input.readMessage(
com.google.cloud.gkehub.v1alpha.Feature.parser(), extensionRegistry);
if (resourcesBuilder_ == null) {
ensureResourcesIsMutable();
resources_.add(m);
} else {
resourcesBuilder_.addMessage(m);
}
break;
} // case 10
case 18:
{
nextPageToken_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000002;
break;
} // case 18
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.util.List<com.google.cloud.gkehub.v1alpha.Feature> resources_ =
java.util.Collections.emptyList();
private void ensureResourcesIsMutable() {
if (!((bitField0_ & 0x00000001) != 0)) {
resources_ = new java.util.ArrayList<com.google.cloud.gkehub.v1alpha.Feature>(resources_);
bitField0_ |= 0x00000001;
}
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.gkehub.v1alpha.Feature,
com.google.cloud.gkehub.v1alpha.Feature.Builder,
com.google.cloud.gkehub.v1alpha.FeatureOrBuilder>
resourcesBuilder_;
/**
*
*
* <pre>
* The list of matching Features
* </pre>
*
* <code>repeated .google.cloud.gkehub.v1alpha.Feature resources = 1;</code>
*/
public java.util.List<com.google.cloud.gkehub.v1alpha.Feature> getResourcesList() {
if (resourcesBuilder_ == null) {
return java.util.Collections.unmodifiableList(resources_);
} else {
return resourcesBuilder_.getMessageList();
}
}
/**
*
*
* <pre>
* The list of matching Features
* </pre>
*
* <code>repeated .google.cloud.gkehub.v1alpha.Feature resources = 1;</code>
*/
public int getResourcesCount() {
if (resourcesBuilder_ == null) {
return resources_.size();
} else {
return resourcesBuilder_.getCount();
}
}
/**
*
*
* <pre>
* The list of matching Features
* </pre>
*
* <code>repeated .google.cloud.gkehub.v1alpha.Feature resources = 1;</code>
*/
public com.google.cloud.gkehub.v1alpha.Feature getResources(int index) {
if (resourcesBuilder_ == null) {
return resources_.get(index);
} else {
return resourcesBuilder_.getMessage(index);
}
}
/**
*
*
* <pre>
* The list of matching Features
* </pre>
*
* <code>repeated .google.cloud.gkehub.v1alpha.Feature resources = 1;</code>
*/
public Builder setResources(int index, com.google.cloud.gkehub.v1alpha.Feature value) {
if (resourcesBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureResourcesIsMutable();
resources_.set(index, value);
onChanged();
} else {
resourcesBuilder_.setMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* The list of matching Features
* </pre>
*
* <code>repeated .google.cloud.gkehub.v1alpha.Feature resources = 1;</code>
*/
public Builder setResources(
int index, com.google.cloud.gkehub.v1alpha.Feature.Builder builderForValue) {
if (resourcesBuilder_ == null) {
ensureResourcesIsMutable();
resources_.set(index, builderForValue.build());
onChanged();
} else {
resourcesBuilder_.setMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* The list of matching Features
* </pre>
*
* <code>repeated .google.cloud.gkehub.v1alpha.Feature resources = 1;</code>
*/
public Builder addResources(com.google.cloud.gkehub.v1alpha.Feature value) {
if (resourcesBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureResourcesIsMutable();
resources_.add(value);
onChanged();
} else {
resourcesBuilder_.addMessage(value);
}
return this;
}
/**
*
*
* <pre>
* The list of matching Features
* </pre>
*
* <code>repeated .google.cloud.gkehub.v1alpha.Feature resources = 1;</code>
*/
public Builder addResources(int index, com.google.cloud.gkehub.v1alpha.Feature value) {
if (resourcesBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureResourcesIsMutable();
resources_.add(index, value);
onChanged();
} else {
resourcesBuilder_.addMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* The list of matching Features
* </pre>
*
* <code>repeated .google.cloud.gkehub.v1alpha.Feature resources = 1;</code>
*/
public Builder addResources(com.google.cloud.gkehub.v1alpha.Feature.Builder builderForValue) {
if (resourcesBuilder_ == null) {
ensureResourcesIsMutable();
resources_.add(builderForValue.build());
onChanged();
} else {
resourcesBuilder_.addMessage(builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* The list of matching Features
* </pre>
*
* <code>repeated .google.cloud.gkehub.v1alpha.Feature resources = 1;</code>
*/
public Builder addResources(
int index, com.google.cloud.gkehub.v1alpha.Feature.Builder builderForValue) {
if (resourcesBuilder_ == null) {
ensureResourcesIsMutable();
resources_.add(index, builderForValue.build());
onChanged();
} else {
resourcesBuilder_.addMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* The list of matching Features
* </pre>
*
* <code>repeated .google.cloud.gkehub.v1alpha.Feature resources = 1;</code>
*/
public Builder addAllResources(
java.lang.Iterable<? extends com.google.cloud.gkehub.v1alpha.Feature> values) {
if (resourcesBuilder_ == null) {
ensureResourcesIsMutable();
com.google.protobuf.AbstractMessageLite.Builder.addAll(values, resources_);
onChanged();
} else {
resourcesBuilder_.addAllMessages(values);
}
return this;
}
/**
*
*
* <pre>
* The list of matching Features
* </pre>
*
* <code>repeated .google.cloud.gkehub.v1alpha.Feature resources = 1;</code>
*/
public Builder clearResources() {
if (resourcesBuilder_ == null) {
resources_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
} else {
resourcesBuilder_.clear();
}
return this;
}
/**
*
*
* <pre>
* The list of matching Features
* </pre>
*
* <code>repeated .google.cloud.gkehub.v1alpha.Feature resources = 1;</code>
*/
public Builder removeResources(int index) {
if (resourcesBuilder_ == null) {
ensureResourcesIsMutable();
resources_.remove(index);
onChanged();
} else {
resourcesBuilder_.remove(index);
}
return this;
}
/**
*
*
* <pre>
* The list of matching Features
* </pre>
*
* <code>repeated .google.cloud.gkehub.v1alpha.Feature resources = 1;</code>
*/
public com.google.cloud.gkehub.v1alpha.Feature.Builder getResourcesBuilder(int index) {
return getResourcesFieldBuilder().getBuilder(index);
}
/**
*
*
* <pre>
* The list of matching Features
* </pre>
*
* <code>repeated .google.cloud.gkehub.v1alpha.Feature resources = 1;</code>
*/
public com.google.cloud.gkehub.v1alpha.FeatureOrBuilder getResourcesOrBuilder(int index) {
if (resourcesBuilder_ == null) {
return resources_.get(index);
} else {
return resourcesBuilder_.getMessageOrBuilder(index);
}
}
/**
*
*
* <pre>
* The list of matching Features
* </pre>
*
* <code>repeated .google.cloud.gkehub.v1alpha.Feature resources = 1;</code>
*/
public java.util.List<? extends com.google.cloud.gkehub.v1alpha.FeatureOrBuilder>
getResourcesOrBuilderList() {
if (resourcesBuilder_ != null) {
return resourcesBuilder_.getMessageOrBuilderList();
} else {
return java.util.Collections.unmodifiableList(resources_);
}
}
/**
*
*
* <pre>
* The list of matching Features
* </pre>
*
* <code>repeated .google.cloud.gkehub.v1alpha.Feature resources = 1;</code>
*/
public com.google.cloud.gkehub.v1alpha.Feature.Builder addResourcesBuilder() {
return getResourcesFieldBuilder()
.addBuilder(com.google.cloud.gkehub.v1alpha.Feature.getDefaultInstance());
}
/**
*
*
* <pre>
* The list of matching Features
* </pre>
*
* <code>repeated .google.cloud.gkehub.v1alpha.Feature resources = 1;</code>
*/
public com.google.cloud.gkehub.v1alpha.Feature.Builder addResourcesBuilder(int index) {
return getResourcesFieldBuilder()
.addBuilder(index, com.google.cloud.gkehub.v1alpha.Feature.getDefaultInstance());
}
/**
*
*
* <pre>
* The list of matching Features
* </pre>
*
* <code>repeated .google.cloud.gkehub.v1alpha.Feature resources = 1;</code>
*/
public java.util.List<com.google.cloud.gkehub.v1alpha.Feature.Builder>
getResourcesBuilderList() {
return getResourcesFieldBuilder().getBuilderList();
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.gkehub.v1alpha.Feature,
com.google.cloud.gkehub.v1alpha.Feature.Builder,
com.google.cloud.gkehub.v1alpha.FeatureOrBuilder>
getResourcesFieldBuilder() {
if (resourcesBuilder_ == null) {
resourcesBuilder_ =
new com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.gkehub.v1alpha.Feature,
com.google.cloud.gkehub.v1alpha.Feature.Builder,
com.google.cloud.gkehub.v1alpha.FeatureOrBuilder>(
resources_, ((bitField0_ & 0x00000001) != 0), getParentForChildren(), isClean());
resources_ = null;
}
return resourcesBuilder_;
}
private java.lang.Object nextPageToken_ = "";
/**
*
*
* <pre>
* A token to request the next page of resources from the
* `ListFeatures` method. The value of an empty string means
* that there are no more resources to return.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The nextPageToken.
*/
public java.lang.String getNextPageToken() {
java.lang.Object ref = nextPageToken_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
nextPageToken_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* A token to request the next page of resources from the
* `ListFeatures` method. The value of an empty string means
* that there are no more resources to return.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The bytes for nextPageToken.
*/
public com.google.protobuf.ByteString getNextPageTokenBytes() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
nextPageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* A token to request the next page of resources from the
* `ListFeatures` method. The value of an empty string means
* that there are no more resources to return.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @param value The nextPageToken to set.
* @return This builder for chaining.
*/
public Builder setNextPageToken(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
nextPageToken_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* A token to request the next page of resources from the
* `ListFeatures` method. The value of an empty string means
* that there are no more resources to return.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return This builder for chaining.
*/
public Builder clearNextPageToken() {
nextPageToken_ = getDefaultInstance().getNextPageToken();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
return this;
}
/**
*
*
* <pre>
* A token to request the next page of resources from the
* `ListFeatures` method. The value of an empty string means
* that there are no more resources to return.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @param value The bytes for nextPageToken to set.
* @return This builder for chaining.
*/
public Builder setNextPageTokenBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
nextPageToken_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.gkehub.v1alpha.ListFeaturesResponse)
}
// @@protoc_insertion_point(class_scope:google.cloud.gkehub.v1alpha.ListFeaturesResponse)
private static final com.google.cloud.gkehub.v1alpha.ListFeaturesResponse DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.gkehub.v1alpha.ListFeaturesResponse();
}
public static com.google.cloud.gkehub.v1alpha.ListFeaturesResponse getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<ListFeaturesResponse> PARSER =
new com.google.protobuf.AbstractParser<ListFeaturesResponse>() {
@java.lang.Override
public ListFeaturesResponse parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<ListFeaturesResponse> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<ListFeaturesResponse> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.gkehub.v1alpha.ListFeaturesResponse getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleapis/google-cloud-java | 35,415 | java-dataplex/proto-google-cloud-dataplex-v1/src/main/java/com/google/cloud/dataplex/v1/DataQualityProto.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/dataplex/v1/data_quality.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.dataplex.v1;
public final class DataQualityProto {
private DataQualityProto() {}
public static void registerAllExtensions(com.google.protobuf.ExtensionRegistryLite registry) {}
public static void registerAllExtensions(com.google.protobuf.ExtensionRegistry registry) {
registerAllExtensions((com.google.protobuf.ExtensionRegistryLite) registry);
}
static final com.google.protobuf.Descriptors.Descriptor
internal_static_google_cloud_dataplex_v1_DataQualitySpec_descriptor;
static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_google_cloud_dataplex_v1_DataQualitySpec_fieldAccessorTable;
static final com.google.protobuf.Descriptors.Descriptor
internal_static_google_cloud_dataplex_v1_DataQualitySpec_PostScanActions_descriptor;
static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_google_cloud_dataplex_v1_DataQualitySpec_PostScanActions_fieldAccessorTable;
static final com.google.protobuf.Descriptors.Descriptor
internal_static_google_cloud_dataplex_v1_DataQualitySpec_PostScanActions_BigQueryExport_descriptor;
static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_google_cloud_dataplex_v1_DataQualitySpec_PostScanActions_BigQueryExport_fieldAccessorTable;
static final com.google.protobuf.Descriptors.Descriptor
internal_static_google_cloud_dataplex_v1_DataQualitySpec_PostScanActions_Recipients_descriptor;
static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_google_cloud_dataplex_v1_DataQualitySpec_PostScanActions_Recipients_fieldAccessorTable;
static final com.google.protobuf.Descriptors.Descriptor
internal_static_google_cloud_dataplex_v1_DataQualitySpec_PostScanActions_ScoreThresholdTrigger_descriptor;
static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_google_cloud_dataplex_v1_DataQualitySpec_PostScanActions_ScoreThresholdTrigger_fieldAccessorTable;
static final com.google.protobuf.Descriptors.Descriptor
internal_static_google_cloud_dataplex_v1_DataQualitySpec_PostScanActions_JobFailureTrigger_descriptor;
static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_google_cloud_dataplex_v1_DataQualitySpec_PostScanActions_JobFailureTrigger_fieldAccessorTable;
static final com.google.protobuf.Descriptors.Descriptor
internal_static_google_cloud_dataplex_v1_DataQualitySpec_PostScanActions_JobEndTrigger_descriptor;
static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_google_cloud_dataplex_v1_DataQualitySpec_PostScanActions_JobEndTrigger_fieldAccessorTable;
static final com.google.protobuf.Descriptors.Descriptor
internal_static_google_cloud_dataplex_v1_DataQualitySpec_PostScanActions_NotificationReport_descriptor;
static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_google_cloud_dataplex_v1_DataQualitySpec_PostScanActions_NotificationReport_fieldAccessorTable;
static final com.google.protobuf.Descriptors.Descriptor
internal_static_google_cloud_dataplex_v1_DataQualityResult_descriptor;
static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_google_cloud_dataplex_v1_DataQualityResult_fieldAccessorTable;
static final com.google.protobuf.Descriptors.Descriptor
internal_static_google_cloud_dataplex_v1_DataQualityResult_PostScanActionsResult_descriptor;
static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_google_cloud_dataplex_v1_DataQualityResult_PostScanActionsResult_fieldAccessorTable;
static final com.google.protobuf.Descriptors.Descriptor
internal_static_google_cloud_dataplex_v1_DataQualityResult_PostScanActionsResult_BigQueryExportResult_descriptor;
static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_google_cloud_dataplex_v1_DataQualityResult_PostScanActionsResult_BigQueryExportResult_fieldAccessorTable;
static final com.google.protobuf.Descriptors.Descriptor
internal_static_google_cloud_dataplex_v1_DataQualityRuleResult_descriptor;
static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_google_cloud_dataplex_v1_DataQualityRuleResult_fieldAccessorTable;
static final com.google.protobuf.Descriptors.Descriptor
internal_static_google_cloud_dataplex_v1_DataQualityDimensionResult_descriptor;
static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_google_cloud_dataplex_v1_DataQualityDimensionResult_fieldAccessorTable;
static final com.google.protobuf.Descriptors.Descriptor
internal_static_google_cloud_dataplex_v1_DataQualityDimension_descriptor;
static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_google_cloud_dataplex_v1_DataQualityDimension_fieldAccessorTable;
static final com.google.protobuf.Descriptors.Descriptor
internal_static_google_cloud_dataplex_v1_DataQualityRule_descriptor;
static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_google_cloud_dataplex_v1_DataQualityRule_fieldAccessorTable;
static final com.google.protobuf.Descriptors.Descriptor
internal_static_google_cloud_dataplex_v1_DataQualityRule_RangeExpectation_descriptor;
static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_google_cloud_dataplex_v1_DataQualityRule_RangeExpectation_fieldAccessorTable;
static final com.google.protobuf.Descriptors.Descriptor
internal_static_google_cloud_dataplex_v1_DataQualityRule_NonNullExpectation_descriptor;
static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_google_cloud_dataplex_v1_DataQualityRule_NonNullExpectation_fieldAccessorTable;
static final com.google.protobuf.Descriptors.Descriptor
internal_static_google_cloud_dataplex_v1_DataQualityRule_SetExpectation_descriptor;
static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_google_cloud_dataplex_v1_DataQualityRule_SetExpectation_fieldAccessorTable;
static final com.google.protobuf.Descriptors.Descriptor
internal_static_google_cloud_dataplex_v1_DataQualityRule_RegexExpectation_descriptor;
static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_google_cloud_dataplex_v1_DataQualityRule_RegexExpectation_fieldAccessorTable;
static final com.google.protobuf.Descriptors.Descriptor
internal_static_google_cloud_dataplex_v1_DataQualityRule_UniquenessExpectation_descriptor;
static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_google_cloud_dataplex_v1_DataQualityRule_UniquenessExpectation_fieldAccessorTable;
static final com.google.protobuf.Descriptors.Descriptor
internal_static_google_cloud_dataplex_v1_DataQualityRule_StatisticRangeExpectation_descriptor;
static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_google_cloud_dataplex_v1_DataQualityRule_StatisticRangeExpectation_fieldAccessorTable;
static final com.google.protobuf.Descriptors.Descriptor
internal_static_google_cloud_dataplex_v1_DataQualityRule_RowConditionExpectation_descriptor;
static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_google_cloud_dataplex_v1_DataQualityRule_RowConditionExpectation_fieldAccessorTable;
static final com.google.protobuf.Descriptors.Descriptor
internal_static_google_cloud_dataplex_v1_DataQualityRule_TableConditionExpectation_descriptor;
static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_google_cloud_dataplex_v1_DataQualityRule_TableConditionExpectation_fieldAccessorTable;
static final com.google.protobuf.Descriptors.Descriptor
internal_static_google_cloud_dataplex_v1_DataQualityRule_SqlAssertion_descriptor;
static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_google_cloud_dataplex_v1_DataQualityRule_SqlAssertion_fieldAccessorTable;
static final com.google.protobuf.Descriptors.Descriptor
internal_static_google_cloud_dataplex_v1_DataQualityColumnResult_descriptor;
static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_google_cloud_dataplex_v1_DataQualityColumnResult_fieldAccessorTable;
public static com.google.protobuf.Descriptors.FileDescriptor getDescriptor() {
return descriptor;
}
private static com.google.protobuf.Descriptors.FileDescriptor descriptor;
static {
java.lang.String[] descriptorData = {
"\n"
+ "+google/cloud/dataplex/v1/data_quality."
+ "proto\022\030google.cloud.dataplex.v1\032\037google/"
+ "api/field_behavior.proto\032\031google/api/res"
+ "ource.proto\032/google/cloud/dataplex/v1/da"
+ "tascans_common.proto\032)google/cloud/dataplex/v1/processing.proto\"\351\010\n"
+ "\017DataQualitySpec\022=\n"
+ "\005rules\030\001"
+ " \003(\0132).google.cloud.dataplex.v1.DataQualityRuleB\003\340A\002\022\035\n"
+ "\020sampling_percent\030\004 \001(\002B\003\340A\001\022\027\n\n"
+ "row_filter\030\005 \001(\tB\003\340A\001\022Y\n"
+ "\021post_scan_actions\030\006 \001(\01329.google.c"
+ "loud.dataplex.v1.DataQualitySpec.PostScanActionsB\003\340A\001\022\'\n"
+ "\032catalog_publishing_enabled\030\010 \001(\010B\003\340A\001\032\332\006\n"
+ "\017PostScanActions\022f\n"
+ "\017bigquery_export\030\001 \001(\0132H.google.cloud.datap"
+ "lex.v1.DataQualitySpec.PostScanActions.BigQueryExportB\003\340A\001\022n\n"
+ "\023notification_report\030\002 \001(\0132L.google.cloud.dataplex.v1.DataQ"
+ "ualitySpec.PostScanActions.NotificationReportB\003\340A\001\032,\n"
+ "\016BigQueryExport\022\032\n\r"
+ "results_table\030\001 \001(\tB\003\340A\001\032!\n\n"
+ "Recipients\022\023\n"
+ "\006emails\030\001 \003(\tB\003\340A\001\0325\n"
+ "\025ScoreThresholdTrigger\022\034\n"
+ "\017score_threshold\030\002 \001(\002B\003\340A\001\032\023\n"
+ "\021JobFailureTrigger\032\017\n\r"
+ "JobEndTrigger\032\300\003\n"
+ "\022NotificationReport\022]\n\n"
+ "recipients\030\001 \001(\0132D.google.clo"
+ "ud.dataplex.v1.DataQualitySpec.PostScanActions.RecipientsB\003\340A\002\022u\n"
+ "\027score_threshold_trigger\030\002 \001(\0132O.google.cloud.dataplex."
+ "v1.DataQualitySpec.PostScanActions.ScoreThresholdTriggerB\003\340A\001\022m\n"
+ "\023job_failure_trigger\030\004 \001(\0132K.google.cloud.dataplex.v1.Da"
+ "taQualitySpec.PostScanActions.JobFailureTriggerB\003\340A\001\022e\n"
+ "\017job_end_trigger\030\005 \001(\0132G."
+ "google.cloud.dataplex.v1.DataQualitySpec.PostScanActions.JobEndTriggerB\003\340A\001\"\317\007\n"
+ "\021DataQualityResult\022\023\n"
+ "\006passed\030\005 \001(\010B\003\340A\003\022\027\n"
+ "\005score\030\t \001(\002B\003\340A\003H\000\210\001\001\022M\n\n"
+ "dimensions\030\002 "
+ "\003(\01324.google.cloud.dataplex.v1.DataQualityDimensionResultB\003\340A\003\022G\n"
+ "\007columns\030\n"
+ " \003(\01321.google.cloud.dataplex.v1.DataQualityColumnResultB\003\340A\003\022C\n"
+ "\005rules\030\003 \003(\0132/.google."
+ "cloud.dataplex.v1.DataQualityRuleResultB\003\340A\003\022\026\n"
+ "\trow_count\030\004 \001(\003B\003\340A\003\022@\n"
+ "\014scanned_data\030\007"
+ " \001(\0132%.google.cloud.dataplex.v1.ScannedDataB\003\340A\003\022h\n"
+ "\030post_scan_actions_result\030\010 \001(\0132A.google.cloud.dataplex.v1.Data"
+ "QualityResult.PostScanActionsResultB\003\340A\003\022a\n"
+ "\031catalog_publishing_status\030\013 \001(\01329.go"
+ "ogle.cloud.dataplex.v1.DataScanCatalogPublishingStatusB\003\340A\003\032\375\002\n"
+ "\025PostScanActionsResult\022{\n"
+ "\026bigquery_export_result\030\001 \001(\0132V."
+ "google.cloud.dataplex.v1.DataQualityResu"
+ "lt.PostScanActionsResult.BigQueryExportResultB\003\340A\003\032\346\001\n"
+ "\024BigQueryExportResult\022p\n"
+ "\005state\030\001 \001(\0162\\.google.cloud.dataplex.v1.Da"
+ "taQualityResult.PostScanActionsResult.BigQueryExportResult.StateB\003\340A\003\022\024\n"
+ "\007message\030\002 \001(\tB\003\340A\003\"F\n"
+ "\005State\022\025\n"
+ "\021STATE_UNSPECIFIED\020\000\022\r\n"
+ "\tSUCCEEDED\020\001\022\n\n"
+ "\006FAILED\020\002\022\013\n"
+ "\007SKIPPED\020\003B\010\n"
+ "\006_score\"\230\002\n"
+ "\025DataQualityRuleResult\022<\n"
+ "\004rule\030\001"
+ " \001(\0132).google.cloud.dataplex.v1.DataQualityRuleB\003\340A\003\022\023\n"
+ "\006passed\030\007 \001(\010B\003\340A\003\022\034\n"
+ "\017evaluated_count\030\t \001(\003B\003\340A\003\022\031\n"
+ "\014passed_count\030\010 \001(\003B\003\340A\003\022\027\n\n"
+ "null_count\030\005 \001(\003B\003\340A\003\022\027\n\n"
+ "pass_ratio\030\006 \001(\001B\003\340A\003\022\037\n"
+ "\022failing_rows_query\030\n"
+ " \001(\tB\003\340A\003\022 \n"
+ "\023assertion_row_count\030\013 \001(\003B\003\340A\003\"\234\001\n"
+ "\032DataQualityDimensionResult\022F\n"
+ "\tdimension\030\001 \001(\0132..google.clou"
+ "d.dataplex.v1.DataQualityDimensionB\003\340A\003\022\023\n"
+ "\006passed\030\003 \001(\010B\003\340A\003\022\027\n"
+ "\005score\030\004 \001(\002B\003\340A\003H\000\210\001\001B\010\n"
+ "\006_score\")\n"
+ "\024DataQualityDimension\022\021\n"
+ "\004name\030\001 \001(\tB\003\340A\003\"\215\016\n"
+ "\017DataQualityRule\022W\n"
+ "\021range_expectation\030\001 \001(\0132:.google.cloud"
+ ".dataplex.v1.DataQualityRule.RangeExpectationH\000\022\\\n"
+ "\024non_null_expectation\030\002 \001(\0132<."
+ "google.cloud.dataplex.v1.DataQualityRule.NonNullExpectationH\000\022S\n"
+ "\017set_expectation\030\003"
+ " \001(\01328.google.cloud.dataplex.v1.DataQualityRule.SetExpectationH\000\022W\n"
+ "\021regex_expectation\030\004"
+ " \001(\0132:.google.cloud.dataplex.v1.DataQualityRule.RegexExpectationH\000\022a\n"
+ "\026uniqueness_expectation\030d \001(\0132?.google.clo"
+ "ud.dataplex.v1.DataQualityRule.UniquenessExpectationH\000\022j\n"
+ "\033statistic_range_expectation\030e \001(\0132C.google.cloud.dataplex.v1.D"
+ "ataQualityRule.StatisticRangeExpectationH\000\022g\n"
+ "\031row_condition_expectation\030\310\001 \001(\0132A"
+ ".google.cloud.dataplex.v1.DataQualityRule.RowConditionExpectationH\000\022k\n"
+ "\033table_condition_expectation\030\311\001 \001(\0132C.google.cloud"
+ ".dataplex.v1.DataQualityRule.TableConditionExpectationH\000\022P\n\r"
+ "sql_assertion\030\312\001 \001(\013"
+ "26.google.cloud.dataplex.v1.DataQualityRule.SqlAssertionH\000\022\024\n"
+ "\006column\030\364\003 \001(\tB\003\340A\001\022\031\n"
+ "\013ignore_null\030\365\003 \001(\010B\003\340A\001\022\027\n"
+ "\tdimension\030\366\003 \001(\tB\003\340A\002\022\027\n"
+ "\tthreshold\030\367\003 \001(\001B\003\340A\001\022\022\n"
+ "\004name\030\370\003 \001(\tB\003\340A\001\022\031\n"
+ "\013description\030\371\003 \001(\tB\003\340A\001\022\027\n"
+ "\tsuspended\030\372\003 \001(\010B\003\340A\001\032\204\001\n"
+ "\020RangeExpectation\022\026\n"
+ "\tmin_value\030\001 \001(\tB\003\340A\001\022\026\n"
+ "\tmax_value\030\002 \001(\tB\003\340A\001\022\037\n"
+ "\022strict_min_enabled\030\003 \001(\010B\003\340A\001\022\037\n"
+ "\022strict_max_enabled\030\004 \001(\010B\003\340A\001\032\024\n"
+ "\022NonNullExpectation\032%\n"
+ "\016SetExpectation\022\023\n"
+ "\006values\030\001 \003(\tB\003\340A\001\032&\n"
+ "\020RegexExpectation\022\022\n"
+ "\005regex\030\001 \001(\tB\003\340A\001\032\027\n"
+ "\025UniquenessExpectation\032\302\002\n"
+ "\031StatisticRangeExpectation\022k\n"
+ "\tstatistic\030\001 \001(\0162S.google.cloud.datap"
+ "lex.v1.DataQualityRule.StatisticRangeExpectation.ColumnStatisticB\003\340A\001\022\026\n"
+ "\tmin_value\030\002 \001(\tB\003\340A\001\022\026\n"
+ "\tmax_value\030\003 \001(\tB\003\340A\001\022\037\n"
+ "\022strict_min_enabled\030\004 \001(\010B\003\340A\001\022\037\n"
+ "\022strict_max_enabled\030\005 \001(\010B\003\340A\001\"F\n"
+ "\017ColumnStatistic\022\027\n"
+ "\023STATISTIC_UNDEFINED\020\000\022\010\n"
+ "\004MEAN\020\001\022\007\n"
+ "\003MIN\020\002\022\007\n"
+ "\003MAX\020\003\0326\n"
+ "\027RowConditionExpectation\022\033\n"
+ "\016sql_expression\030\001 \001(\tB\003\340A\001\0328\n"
+ "\031TableConditionExpectation\022\033\n"
+ "\016sql_expression\030\001 \001(\tB\003\340A\001\032*\n"
+ "\014SqlAssertion\022\032\n\r"
+ "sql_statement\030\001 \001(\tB\003\340A\001B\013\n"
+ "\trule_type\"\265\001\n"
+ "\027DataQualityColumnResult\022\023\n"
+ "\006column\030\001 \001(\tB\003\340A\003\022\027\n"
+ "\005score\030\002 \001(\002B\003\340A\003H\000\210\001\001\022\023\n"
+ "\006passed\030\003 \001(\010B\003\340A\003\022M\n\n"
+ "dimensions\030\004 \003(\01324.google.cloud.dat"
+ "aplex.v1.DataQualityDimensionResultB\003\340A\003B\010\n"
+ "\006_scoreB\304\001\n"
+ "\034com.google.cloud.dataplex.v1B\020DataQualityProtoP\001Z8cloud.google.co"
+ "m/go/dataplex/apiv1/dataplexpb;dataplexpb\352AU\n"
+ "\035bigquery.googleapis.com/Table\0224projects/{project}/datasets/{dataset}/table"
+ "s/{table}b\006proto3"
};
descriptor =
com.google.protobuf.Descriptors.FileDescriptor.internalBuildGeneratedFileFrom(
descriptorData,
new com.google.protobuf.Descriptors.FileDescriptor[] {
com.google.api.FieldBehaviorProto.getDescriptor(),
com.google.api.ResourceProto.getDescriptor(),
com.google.cloud.dataplex.v1.DataScansCommonProto.getDescriptor(),
com.google.cloud.dataplex.v1.ProcessingProto.getDescriptor(),
});
internal_static_google_cloud_dataplex_v1_DataQualitySpec_descriptor =
getDescriptor().getMessageTypes().get(0);
internal_static_google_cloud_dataplex_v1_DataQualitySpec_fieldAccessorTable =
new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_google_cloud_dataplex_v1_DataQualitySpec_descriptor,
new java.lang.String[] {
"Rules",
"SamplingPercent",
"RowFilter",
"PostScanActions",
"CatalogPublishingEnabled",
});
internal_static_google_cloud_dataplex_v1_DataQualitySpec_PostScanActions_descriptor =
internal_static_google_cloud_dataplex_v1_DataQualitySpec_descriptor.getNestedTypes().get(0);
internal_static_google_cloud_dataplex_v1_DataQualitySpec_PostScanActions_fieldAccessorTable =
new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_google_cloud_dataplex_v1_DataQualitySpec_PostScanActions_descriptor,
new java.lang.String[] {
"BigqueryExport", "NotificationReport",
});
internal_static_google_cloud_dataplex_v1_DataQualitySpec_PostScanActions_BigQueryExport_descriptor =
internal_static_google_cloud_dataplex_v1_DataQualitySpec_PostScanActions_descriptor
.getNestedTypes()
.get(0);
internal_static_google_cloud_dataplex_v1_DataQualitySpec_PostScanActions_BigQueryExport_fieldAccessorTable =
new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_google_cloud_dataplex_v1_DataQualitySpec_PostScanActions_BigQueryExport_descriptor,
new java.lang.String[] {
"ResultsTable",
});
internal_static_google_cloud_dataplex_v1_DataQualitySpec_PostScanActions_Recipients_descriptor =
internal_static_google_cloud_dataplex_v1_DataQualitySpec_PostScanActions_descriptor
.getNestedTypes()
.get(1);
internal_static_google_cloud_dataplex_v1_DataQualitySpec_PostScanActions_Recipients_fieldAccessorTable =
new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_google_cloud_dataplex_v1_DataQualitySpec_PostScanActions_Recipients_descriptor,
new java.lang.String[] {
"Emails",
});
internal_static_google_cloud_dataplex_v1_DataQualitySpec_PostScanActions_ScoreThresholdTrigger_descriptor =
internal_static_google_cloud_dataplex_v1_DataQualitySpec_PostScanActions_descriptor
.getNestedTypes()
.get(2);
internal_static_google_cloud_dataplex_v1_DataQualitySpec_PostScanActions_ScoreThresholdTrigger_fieldAccessorTable =
new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_google_cloud_dataplex_v1_DataQualitySpec_PostScanActions_ScoreThresholdTrigger_descriptor,
new java.lang.String[] {
"ScoreThreshold",
});
internal_static_google_cloud_dataplex_v1_DataQualitySpec_PostScanActions_JobFailureTrigger_descriptor =
internal_static_google_cloud_dataplex_v1_DataQualitySpec_PostScanActions_descriptor
.getNestedTypes()
.get(3);
internal_static_google_cloud_dataplex_v1_DataQualitySpec_PostScanActions_JobFailureTrigger_fieldAccessorTable =
new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_google_cloud_dataplex_v1_DataQualitySpec_PostScanActions_JobFailureTrigger_descriptor,
new java.lang.String[] {});
internal_static_google_cloud_dataplex_v1_DataQualitySpec_PostScanActions_JobEndTrigger_descriptor =
internal_static_google_cloud_dataplex_v1_DataQualitySpec_PostScanActions_descriptor
.getNestedTypes()
.get(4);
internal_static_google_cloud_dataplex_v1_DataQualitySpec_PostScanActions_JobEndTrigger_fieldAccessorTable =
new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_google_cloud_dataplex_v1_DataQualitySpec_PostScanActions_JobEndTrigger_descriptor,
new java.lang.String[] {});
internal_static_google_cloud_dataplex_v1_DataQualitySpec_PostScanActions_NotificationReport_descriptor =
internal_static_google_cloud_dataplex_v1_DataQualitySpec_PostScanActions_descriptor
.getNestedTypes()
.get(5);
internal_static_google_cloud_dataplex_v1_DataQualitySpec_PostScanActions_NotificationReport_fieldAccessorTable =
new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_google_cloud_dataplex_v1_DataQualitySpec_PostScanActions_NotificationReport_descriptor,
new java.lang.String[] {
"Recipients", "ScoreThresholdTrigger", "JobFailureTrigger", "JobEndTrigger",
});
internal_static_google_cloud_dataplex_v1_DataQualityResult_descriptor =
getDescriptor().getMessageTypes().get(1);
internal_static_google_cloud_dataplex_v1_DataQualityResult_fieldAccessorTable =
new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_google_cloud_dataplex_v1_DataQualityResult_descriptor,
new java.lang.String[] {
"Passed",
"Score",
"Dimensions",
"Columns",
"Rules",
"RowCount",
"ScannedData",
"PostScanActionsResult",
"CatalogPublishingStatus",
});
internal_static_google_cloud_dataplex_v1_DataQualityResult_PostScanActionsResult_descriptor =
internal_static_google_cloud_dataplex_v1_DataQualityResult_descriptor
.getNestedTypes()
.get(0);
internal_static_google_cloud_dataplex_v1_DataQualityResult_PostScanActionsResult_fieldAccessorTable =
new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_google_cloud_dataplex_v1_DataQualityResult_PostScanActionsResult_descriptor,
new java.lang.String[] {
"BigqueryExportResult",
});
internal_static_google_cloud_dataplex_v1_DataQualityResult_PostScanActionsResult_BigQueryExportResult_descriptor =
internal_static_google_cloud_dataplex_v1_DataQualityResult_PostScanActionsResult_descriptor
.getNestedTypes()
.get(0);
internal_static_google_cloud_dataplex_v1_DataQualityResult_PostScanActionsResult_BigQueryExportResult_fieldAccessorTable =
new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_google_cloud_dataplex_v1_DataQualityResult_PostScanActionsResult_BigQueryExportResult_descriptor,
new java.lang.String[] {
"State", "Message",
});
internal_static_google_cloud_dataplex_v1_DataQualityRuleResult_descriptor =
getDescriptor().getMessageTypes().get(2);
internal_static_google_cloud_dataplex_v1_DataQualityRuleResult_fieldAccessorTable =
new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_google_cloud_dataplex_v1_DataQualityRuleResult_descriptor,
new java.lang.String[] {
"Rule",
"Passed",
"EvaluatedCount",
"PassedCount",
"NullCount",
"PassRatio",
"FailingRowsQuery",
"AssertionRowCount",
});
internal_static_google_cloud_dataplex_v1_DataQualityDimensionResult_descriptor =
getDescriptor().getMessageTypes().get(3);
internal_static_google_cloud_dataplex_v1_DataQualityDimensionResult_fieldAccessorTable =
new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_google_cloud_dataplex_v1_DataQualityDimensionResult_descriptor,
new java.lang.String[] {
"Dimension", "Passed", "Score",
});
internal_static_google_cloud_dataplex_v1_DataQualityDimension_descriptor =
getDescriptor().getMessageTypes().get(4);
internal_static_google_cloud_dataplex_v1_DataQualityDimension_fieldAccessorTable =
new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_google_cloud_dataplex_v1_DataQualityDimension_descriptor,
new java.lang.String[] {
"Name",
});
internal_static_google_cloud_dataplex_v1_DataQualityRule_descriptor =
getDescriptor().getMessageTypes().get(5);
internal_static_google_cloud_dataplex_v1_DataQualityRule_fieldAccessorTable =
new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_google_cloud_dataplex_v1_DataQualityRule_descriptor,
new java.lang.String[] {
"RangeExpectation",
"NonNullExpectation",
"SetExpectation",
"RegexExpectation",
"UniquenessExpectation",
"StatisticRangeExpectation",
"RowConditionExpectation",
"TableConditionExpectation",
"SqlAssertion",
"Column",
"IgnoreNull",
"Dimension",
"Threshold",
"Name",
"Description",
"Suspended",
"RuleType",
});
internal_static_google_cloud_dataplex_v1_DataQualityRule_RangeExpectation_descriptor =
internal_static_google_cloud_dataplex_v1_DataQualityRule_descriptor.getNestedTypes().get(0);
internal_static_google_cloud_dataplex_v1_DataQualityRule_RangeExpectation_fieldAccessorTable =
new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_google_cloud_dataplex_v1_DataQualityRule_RangeExpectation_descriptor,
new java.lang.String[] {
"MinValue", "MaxValue", "StrictMinEnabled", "StrictMaxEnabled",
});
internal_static_google_cloud_dataplex_v1_DataQualityRule_NonNullExpectation_descriptor =
internal_static_google_cloud_dataplex_v1_DataQualityRule_descriptor.getNestedTypes().get(1);
internal_static_google_cloud_dataplex_v1_DataQualityRule_NonNullExpectation_fieldAccessorTable =
new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_google_cloud_dataplex_v1_DataQualityRule_NonNullExpectation_descriptor,
new java.lang.String[] {});
internal_static_google_cloud_dataplex_v1_DataQualityRule_SetExpectation_descriptor =
internal_static_google_cloud_dataplex_v1_DataQualityRule_descriptor.getNestedTypes().get(2);
internal_static_google_cloud_dataplex_v1_DataQualityRule_SetExpectation_fieldAccessorTable =
new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_google_cloud_dataplex_v1_DataQualityRule_SetExpectation_descriptor,
new java.lang.String[] {
"Values",
});
internal_static_google_cloud_dataplex_v1_DataQualityRule_RegexExpectation_descriptor =
internal_static_google_cloud_dataplex_v1_DataQualityRule_descriptor.getNestedTypes().get(3);
internal_static_google_cloud_dataplex_v1_DataQualityRule_RegexExpectation_fieldAccessorTable =
new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_google_cloud_dataplex_v1_DataQualityRule_RegexExpectation_descriptor,
new java.lang.String[] {
"Regex",
});
internal_static_google_cloud_dataplex_v1_DataQualityRule_UniquenessExpectation_descriptor =
internal_static_google_cloud_dataplex_v1_DataQualityRule_descriptor.getNestedTypes().get(4);
internal_static_google_cloud_dataplex_v1_DataQualityRule_UniquenessExpectation_fieldAccessorTable =
new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_google_cloud_dataplex_v1_DataQualityRule_UniquenessExpectation_descriptor,
new java.lang.String[] {});
internal_static_google_cloud_dataplex_v1_DataQualityRule_StatisticRangeExpectation_descriptor =
internal_static_google_cloud_dataplex_v1_DataQualityRule_descriptor.getNestedTypes().get(5);
internal_static_google_cloud_dataplex_v1_DataQualityRule_StatisticRangeExpectation_fieldAccessorTable =
new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_google_cloud_dataplex_v1_DataQualityRule_StatisticRangeExpectation_descriptor,
new java.lang.String[] {
"Statistic", "MinValue", "MaxValue", "StrictMinEnabled", "StrictMaxEnabled",
});
internal_static_google_cloud_dataplex_v1_DataQualityRule_RowConditionExpectation_descriptor =
internal_static_google_cloud_dataplex_v1_DataQualityRule_descriptor.getNestedTypes().get(6);
internal_static_google_cloud_dataplex_v1_DataQualityRule_RowConditionExpectation_fieldAccessorTable =
new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_google_cloud_dataplex_v1_DataQualityRule_RowConditionExpectation_descriptor,
new java.lang.String[] {
"SqlExpression",
});
internal_static_google_cloud_dataplex_v1_DataQualityRule_TableConditionExpectation_descriptor =
internal_static_google_cloud_dataplex_v1_DataQualityRule_descriptor.getNestedTypes().get(7);
internal_static_google_cloud_dataplex_v1_DataQualityRule_TableConditionExpectation_fieldAccessorTable =
new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_google_cloud_dataplex_v1_DataQualityRule_TableConditionExpectation_descriptor,
new java.lang.String[] {
"SqlExpression",
});
internal_static_google_cloud_dataplex_v1_DataQualityRule_SqlAssertion_descriptor =
internal_static_google_cloud_dataplex_v1_DataQualityRule_descriptor.getNestedTypes().get(8);
internal_static_google_cloud_dataplex_v1_DataQualityRule_SqlAssertion_fieldAccessorTable =
new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_google_cloud_dataplex_v1_DataQualityRule_SqlAssertion_descriptor,
new java.lang.String[] {
"SqlStatement",
});
internal_static_google_cloud_dataplex_v1_DataQualityColumnResult_descriptor =
getDescriptor().getMessageTypes().get(6);
internal_static_google_cloud_dataplex_v1_DataQualityColumnResult_fieldAccessorTable =
new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_google_cloud_dataplex_v1_DataQualityColumnResult_descriptor,
new java.lang.String[] {
"Column", "Score", "Passed", "Dimensions",
});
com.google.protobuf.ExtensionRegistry registry =
com.google.protobuf.ExtensionRegistry.newInstance();
registry.add(com.google.api.FieldBehaviorProto.fieldBehavior);
registry.add(com.google.api.ResourceProto.resourceDefinition);
com.google.protobuf.Descriptors.FileDescriptor.internalUpdateFileDescriptor(
descriptor, registry);
com.google.api.FieldBehaviorProto.getDescriptor();
com.google.api.ResourceProto.getDescriptor();
com.google.cloud.dataplex.v1.DataScansCommonProto.getDescriptor();
com.google.cloud.dataplex.v1.ProcessingProto.getDescriptor();
}
// @@protoc_insertion_point(outer_class_scope)
}
|
googleapis/google-cloud-java | 36,258 | java-asset/proto-google-cloud-asset-v1/src/main/java/com/google/cloud/asset/v1/UpdateSavedQueryRequest.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/asset/v1/asset_service.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.asset.v1;
/**
*
*
* <pre>
* Request to update a saved query.
* </pre>
*
* Protobuf type {@code google.cloud.asset.v1.UpdateSavedQueryRequest}
*/
public final class UpdateSavedQueryRequest extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.asset.v1.UpdateSavedQueryRequest)
UpdateSavedQueryRequestOrBuilder {
private static final long serialVersionUID = 0L;
// Use UpdateSavedQueryRequest.newBuilder() to construct.
private UpdateSavedQueryRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private UpdateSavedQueryRequest() {}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new UpdateSavedQueryRequest();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.asset.v1.AssetServiceProto
.internal_static_google_cloud_asset_v1_UpdateSavedQueryRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.asset.v1.AssetServiceProto
.internal_static_google_cloud_asset_v1_UpdateSavedQueryRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.asset.v1.UpdateSavedQueryRequest.class,
com.google.cloud.asset.v1.UpdateSavedQueryRequest.Builder.class);
}
private int bitField0_;
public static final int SAVED_QUERY_FIELD_NUMBER = 1;
private com.google.cloud.asset.v1.SavedQuery savedQuery_;
/**
*
*
* <pre>
* Required. The saved query to update.
*
* The saved query's `name` field is used to identify the one to update,
* which has format as below:
*
* * projects/project_number/savedQueries/saved_query_id
* * folders/folder_number/savedQueries/saved_query_id
* * organizations/organization_number/savedQueries/saved_query_id
* </pre>
*
* <code>
* .google.cloud.asset.v1.SavedQuery saved_query = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the savedQuery field is set.
*/
@java.lang.Override
public boolean hasSavedQuery() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* Required. The saved query to update.
*
* The saved query's `name` field is used to identify the one to update,
* which has format as below:
*
* * projects/project_number/savedQueries/saved_query_id
* * folders/folder_number/savedQueries/saved_query_id
* * organizations/organization_number/savedQueries/saved_query_id
* </pre>
*
* <code>
* .google.cloud.asset.v1.SavedQuery saved_query = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The savedQuery.
*/
@java.lang.Override
public com.google.cloud.asset.v1.SavedQuery getSavedQuery() {
return savedQuery_ == null
? com.google.cloud.asset.v1.SavedQuery.getDefaultInstance()
: savedQuery_;
}
/**
*
*
* <pre>
* Required. The saved query to update.
*
* The saved query's `name` field is used to identify the one to update,
* which has format as below:
*
* * projects/project_number/savedQueries/saved_query_id
* * folders/folder_number/savedQueries/saved_query_id
* * organizations/organization_number/savedQueries/saved_query_id
* </pre>
*
* <code>
* .google.cloud.asset.v1.SavedQuery saved_query = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
@java.lang.Override
public com.google.cloud.asset.v1.SavedQueryOrBuilder getSavedQueryOrBuilder() {
return savedQuery_ == null
? com.google.cloud.asset.v1.SavedQuery.getDefaultInstance()
: savedQuery_;
}
public static final int UPDATE_MASK_FIELD_NUMBER = 2;
private com.google.protobuf.FieldMask updateMask_;
/**
*
*
* <pre>
* Required. The list of fields to update.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the updateMask field is set.
*/
@java.lang.Override
public boolean hasUpdateMask() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
*
*
* <pre>
* Required. The list of fields to update.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The updateMask.
*/
@java.lang.Override
public com.google.protobuf.FieldMask getUpdateMask() {
return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_;
}
/**
*
*
* <pre>
* Required. The list of fields to update.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
@java.lang.Override
public com.google.protobuf.FieldMaskOrBuilder getUpdateMaskOrBuilder() {
return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_;
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (((bitField0_ & 0x00000001) != 0)) {
output.writeMessage(1, getSavedQuery());
}
if (((bitField0_ & 0x00000002) != 0)) {
output.writeMessage(2, getUpdateMask());
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) != 0)) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, getSavedQuery());
}
if (((bitField0_ & 0x00000002) != 0)) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, getUpdateMask());
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.asset.v1.UpdateSavedQueryRequest)) {
return super.equals(obj);
}
com.google.cloud.asset.v1.UpdateSavedQueryRequest other =
(com.google.cloud.asset.v1.UpdateSavedQueryRequest) obj;
if (hasSavedQuery() != other.hasSavedQuery()) return false;
if (hasSavedQuery()) {
if (!getSavedQuery().equals(other.getSavedQuery())) return false;
}
if (hasUpdateMask() != other.hasUpdateMask()) return false;
if (hasUpdateMask()) {
if (!getUpdateMask().equals(other.getUpdateMask())) return false;
}
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (hasSavedQuery()) {
hash = (37 * hash) + SAVED_QUERY_FIELD_NUMBER;
hash = (53 * hash) + getSavedQuery().hashCode();
}
if (hasUpdateMask()) {
hash = (37 * hash) + UPDATE_MASK_FIELD_NUMBER;
hash = (53 * hash) + getUpdateMask().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.asset.v1.UpdateSavedQueryRequest parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.asset.v1.UpdateSavedQueryRequest parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.asset.v1.UpdateSavedQueryRequest parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.asset.v1.UpdateSavedQueryRequest parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.asset.v1.UpdateSavedQueryRequest parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.asset.v1.UpdateSavedQueryRequest parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.asset.v1.UpdateSavedQueryRequest parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.asset.v1.UpdateSavedQueryRequest parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.asset.v1.UpdateSavedQueryRequest parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.asset.v1.UpdateSavedQueryRequest parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.asset.v1.UpdateSavedQueryRequest parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.asset.v1.UpdateSavedQueryRequest parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.cloud.asset.v1.UpdateSavedQueryRequest prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Request to update a saved query.
* </pre>
*
* Protobuf type {@code google.cloud.asset.v1.UpdateSavedQueryRequest}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.asset.v1.UpdateSavedQueryRequest)
com.google.cloud.asset.v1.UpdateSavedQueryRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.asset.v1.AssetServiceProto
.internal_static_google_cloud_asset_v1_UpdateSavedQueryRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.asset.v1.AssetServiceProto
.internal_static_google_cloud_asset_v1_UpdateSavedQueryRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.asset.v1.UpdateSavedQueryRequest.class,
com.google.cloud.asset.v1.UpdateSavedQueryRequest.Builder.class);
}
// Construct using com.google.cloud.asset.v1.UpdateSavedQueryRequest.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {
getSavedQueryFieldBuilder();
getUpdateMaskFieldBuilder();
}
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
savedQuery_ = null;
if (savedQueryBuilder_ != null) {
savedQueryBuilder_.dispose();
savedQueryBuilder_ = null;
}
updateMask_ = null;
if (updateMaskBuilder_ != null) {
updateMaskBuilder_.dispose();
updateMaskBuilder_ = null;
}
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.asset.v1.AssetServiceProto
.internal_static_google_cloud_asset_v1_UpdateSavedQueryRequest_descriptor;
}
@java.lang.Override
public com.google.cloud.asset.v1.UpdateSavedQueryRequest getDefaultInstanceForType() {
return com.google.cloud.asset.v1.UpdateSavedQueryRequest.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.asset.v1.UpdateSavedQueryRequest build() {
com.google.cloud.asset.v1.UpdateSavedQueryRequest result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.asset.v1.UpdateSavedQueryRequest buildPartial() {
com.google.cloud.asset.v1.UpdateSavedQueryRequest result =
new com.google.cloud.asset.v1.UpdateSavedQueryRequest(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(com.google.cloud.asset.v1.UpdateSavedQueryRequest result) {
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.savedQuery_ = savedQueryBuilder_ == null ? savedQuery_ : savedQueryBuilder_.build();
to_bitField0_ |= 0x00000001;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.updateMask_ = updateMaskBuilder_ == null ? updateMask_ : updateMaskBuilder_.build();
to_bitField0_ |= 0x00000002;
}
result.bitField0_ |= to_bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.asset.v1.UpdateSavedQueryRequest) {
return mergeFrom((com.google.cloud.asset.v1.UpdateSavedQueryRequest) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.asset.v1.UpdateSavedQueryRequest other) {
if (other == com.google.cloud.asset.v1.UpdateSavedQueryRequest.getDefaultInstance())
return this;
if (other.hasSavedQuery()) {
mergeSavedQuery(other.getSavedQuery());
}
if (other.hasUpdateMask()) {
mergeUpdateMask(other.getUpdateMask());
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
input.readMessage(getSavedQueryFieldBuilder().getBuilder(), extensionRegistry);
bitField0_ |= 0x00000001;
break;
} // case 10
case 18:
{
input.readMessage(getUpdateMaskFieldBuilder().getBuilder(), extensionRegistry);
bitField0_ |= 0x00000002;
break;
} // case 18
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private com.google.cloud.asset.v1.SavedQuery savedQuery_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.asset.v1.SavedQuery,
com.google.cloud.asset.v1.SavedQuery.Builder,
com.google.cloud.asset.v1.SavedQueryOrBuilder>
savedQueryBuilder_;
/**
*
*
* <pre>
* Required. The saved query to update.
*
* The saved query's `name` field is used to identify the one to update,
* which has format as below:
*
* * projects/project_number/savedQueries/saved_query_id
* * folders/folder_number/savedQueries/saved_query_id
* * organizations/organization_number/savedQueries/saved_query_id
* </pre>
*
* <code>
* .google.cloud.asset.v1.SavedQuery saved_query = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the savedQuery field is set.
*/
public boolean hasSavedQuery() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* Required. The saved query to update.
*
* The saved query's `name` field is used to identify the one to update,
* which has format as below:
*
* * projects/project_number/savedQueries/saved_query_id
* * folders/folder_number/savedQueries/saved_query_id
* * organizations/organization_number/savedQueries/saved_query_id
* </pre>
*
* <code>
* .google.cloud.asset.v1.SavedQuery saved_query = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The savedQuery.
*/
public com.google.cloud.asset.v1.SavedQuery getSavedQuery() {
if (savedQueryBuilder_ == null) {
return savedQuery_ == null
? com.google.cloud.asset.v1.SavedQuery.getDefaultInstance()
: savedQuery_;
} else {
return savedQueryBuilder_.getMessage();
}
}
/**
*
*
* <pre>
* Required. The saved query to update.
*
* The saved query's `name` field is used to identify the one to update,
* which has format as below:
*
* * projects/project_number/savedQueries/saved_query_id
* * folders/folder_number/savedQueries/saved_query_id
* * organizations/organization_number/savedQueries/saved_query_id
* </pre>
*
* <code>
* .google.cloud.asset.v1.SavedQuery saved_query = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setSavedQuery(com.google.cloud.asset.v1.SavedQuery value) {
if (savedQueryBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
savedQuery_ = value;
} else {
savedQueryBuilder_.setMessage(value);
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The saved query to update.
*
* The saved query's `name` field is used to identify the one to update,
* which has format as below:
*
* * projects/project_number/savedQueries/saved_query_id
* * folders/folder_number/savedQueries/saved_query_id
* * organizations/organization_number/savedQueries/saved_query_id
* </pre>
*
* <code>
* .google.cloud.asset.v1.SavedQuery saved_query = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setSavedQuery(com.google.cloud.asset.v1.SavedQuery.Builder builderForValue) {
if (savedQueryBuilder_ == null) {
savedQuery_ = builderForValue.build();
} else {
savedQueryBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The saved query to update.
*
* The saved query's `name` field is used to identify the one to update,
* which has format as below:
*
* * projects/project_number/savedQueries/saved_query_id
* * folders/folder_number/savedQueries/saved_query_id
* * organizations/organization_number/savedQueries/saved_query_id
* </pre>
*
* <code>
* .google.cloud.asset.v1.SavedQuery saved_query = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder mergeSavedQuery(com.google.cloud.asset.v1.SavedQuery value) {
if (savedQueryBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0)
&& savedQuery_ != null
&& savedQuery_ != com.google.cloud.asset.v1.SavedQuery.getDefaultInstance()) {
getSavedQueryBuilder().mergeFrom(value);
} else {
savedQuery_ = value;
}
} else {
savedQueryBuilder_.mergeFrom(value);
}
if (savedQuery_ != null) {
bitField0_ |= 0x00000001;
onChanged();
}
return this;
}
/**
*
*
* <pre>
* Required. The saved query to update.
*
* The saved query's `name` field is used to identify the one to update,
* which has format as below:
*
* * projects/project_number/savedQueries/saved_query_id
* * folders/folder_number/savedQueries/saved_query_id
* * organizations/organization_number/savedQueries/saved_query_id
* </pre>
*
* <code>
* .google.cloud.asset.v1.SavedQuery saved_query = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder clearSavedQuery() {
bitField0_ = (bitField0_ & ~0x00000001);
savedQuery_ = null;
if (savedQueryBuilder_ != null) {
savedQueryBuilder_.dispose();
savedQueryBuilder_ = null;
}
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The saved query to update.
*
* The saved query's `name` field is used to identify the one to update,
* which has format as below:
*
* * projects/project_number/savedQueries/saved_query_id
* * folders/folder_number/savedQueries/saved_query_id
* * organizations/organization_number/savedQueries/saved_query_id
* </pre>
*
* <code>
* .google.cloud.asset.v1.SavedQuery saved_query = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.cloud.asset.v1.SavedQuery.Builder getSavedQueryBuilder() {
bitField0_ |= 0x00000001;
onChanged();
return getSavedQueryFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* Required. The saved query to update.
*
* The saved query's `name` field is used to identify the one to update,
* which has format as below:
*
* * projects/project_number/savedQueries/saved_query_id
* * folders/folder_number/savedQueries/saved_query_id
* * organizations/organization_number/savedQueries/saved_query_id
* </pre>
*
* <code>
* .google.cloud.asset.v1.SavedQuery saved_query = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.cloud.asset.v1.SavedQueryOrBuilder getSavedQueryOrBuilder() {
if (savedQueryBuilder_ != null) {
return savedQueryBuilder_.getMessageOrBuilder();
} else {
return savedQuery_ == null
? com.google.cloud.asset.v1.SavedQuery.getDefaultInstance()
: savedQuery_;
}
}
/**
*
*
* <pre>
* Required. The saved query to update.
*
* The saved query's `name` field is used to identify the one to update,
* which has format as below:
*
* * projects/project_number/savedQueries/saved_query_id
* * folders/folder_number/savedQueries/saved_query_id
* * organizations/organization_number/savedQueries/saved_query_id
* </pre>
*
* <code>
* .google.cloud.asset.v1.SavedQuery saved_query = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.asset.v1.SavedQuery,
com.google.cloud.asset.v1.SavedQuery.Builder,
com.google.cloud.asset.v1.SavedQueryOrBuilder>
getSavedQueryFieldBuilder() {
if (savedQueryBuilder_ == null) {
savedQueryBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.asset.v1.SavedQuery,
com.google.cloud.asset.v1.SavedQuery.Builder,
com.google.cloud.asset.v1.SavedQueryOrBuilder>(
getSavedQuery(), getParentForChildren(), isClean());
savedQuery_ = null;
}
return savedQueryBuilder_;
}
private com.google.protobuf.FieldMask updateMask_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.FieldMask,
com.google.protobuf.FieldMask.Builder,
com.google.protobuf.FieldMaskOrBuilder>
updateMaskBuilder_;
/**
*
*
* <pre>
* Required. The list of fields to update.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the updateMask field is set.
*/
public boolean hasUpdateMask() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
*
*
* <pre>
* Required. The list of fields to update.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The updateMask.
*/
public com.google.protobuf.FieldMask getUpdateMask() {
if (updateMaskBuilder_ == null) {
return updateMask_ == null
? com.google.protobuf.FieldMask.getDefaultInstance()
: updateMask_;
} else {
return updateMaskBuilder_.getMessage();
}
}
/**
*
*
* <pre>
* Required. The list of fields to update.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setUpdateMask(com.google.protobuf.FieldMask value) {
if (updateMaskBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
updateMask_ = value;
} else {
updateMaskBuilder_.setMessage(value);
}
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The list of fields to update.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setUpdateMask(com.google.protobuf.FieldMask.Builder builderForValue) {
if (updateMaskBuilder_ == null) {
updateMask_ = builderForValue.build();
} else {
updateMaskBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The list of fields to update.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder mergeUpdateMask(com.google.protobuf.FieldMask value) {
if (updateMaskBuilder_ == null) {
if (((bitField0_ & 0x00000002) != 0)
&& updateMask_ != null
&& updateMask_ != com.google.protobuf.FieldMask.getDefaultInstance()) {
getUpdateMaskBuilder().mergeFrom(value);
} else {
updateMask_ = value;
}
} else {
updateMaskBuilder_.mergeFrom(value);
}
if (updateMask_ != null) {
bitField0_ |= 0x00000002;
onChanged();
}
return this;
}
/**
*
*
* <pre>
* Required. The list of fields to update.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder clearUpdateMask() {
bitField0_ = (bitField0_ & ~0x00000002);
updateMask_ = null;
if (updateMaskBuilder_ != null) {
updateMaskBuilder_.dispose();
updateMaskBuilder_ = null;
}
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The list of fields to update.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.protobuf.FieldMask.Builder getUpdateMaskBuilder() {
bitField0_ |= 0x00000002;
onChanged();
return getUpdateMaskFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* Required. The list of fields to update.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.protobuf.FieldMaskOrBuilder getUpdateMaskOrBuilder() {
if (updateMaskBuilder_ != null) {
return updateMaskBuilder_.getMessageOrBuilder();
} else {
return updateMask_ == null
? com.google.protobuf.FieldMask.getDefaultInstance()
: updateMask_;
}
}
/**
*
*
* <pre>
* Required. The list of fields to update.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.FieldMask,
com.google.protobuf.FieldMask.Builder,
com.google.protobuf.FieldMaskOrBuilder>
getUpdateMaskFieldBuilder() {
if (updateMaskBuilder_ == null) {
updateMaskBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.FieldMask,
com.google.protobuf.FieldMask.Builder,
com.google.protobuf.FieldMaskOrBuilder>(
getUpdateMask(), getParentForChildren(), isClean());
updateMask_ = null;
}
return updateMaskBuilder_;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.asset.v1.UpdateSavedQueryRequest)
}
// @@protoc_insertion_point(class_scope:google.cloud.asset.v1.UpdateSavedQueryRequest)
private static final com.google.cloud.asset.v1.UpdateSavedQueryRequest DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.asset.v1.UpdateSavedQueryRequest();
}
public static com.google.cloud.asset.v1.UpdateSavedQueryRequest getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<UpdateSavedQueryRequest> PARSER =
new com.google.protobuf.AbstractParser<UpdateSavedQueryRequest>() {
@java.lang.Override
public UpdateSavedQueryRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<UpdateSavedQueryRequest> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<UpdateSavedQueryRequest> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.asset.v1.UpdateSavedQueryRequest getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleapis/google-cloud-java | 36,398 | java-dialogflow-cx/grpc-google-cloud-dialogflow-cx-v3beta1/src/main/java/com/google/cloud/dialogflow/cx/v3beta1/ExamplesGrpc.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.cloud.dialogflow.cx.v3beta1;
import static io.grpc.MethodDescriptor.generateFullMethodName;
/**
*
*
* <pre>
* Service for managing [Examples][google.cloud.dialogflow.cx.v3beta1.Example].
* </pre>
*/
@javax.annotation.Generated(
value = "by gRPC proto compiler",
comments = "Source: google/cloud/dialogflow/cx/v3beta1/example.proto")
@io.grpc.stub.annotations.GrpcGenerated
public final class ExamplesGrpc {
private ExamplesGrpc() {}
public static final java.lang.String SERVICE_NAME = "google.cloud.dialogflow.cx.v3beta1.Examples";
// Static method descriptors that strictly reflect the proto.
private static volatile io.grpc.MethodDescriptor<
com.google.cloud.dialogflow.cx.v3beta1.CreateExampleRequest,
com.google.cloud.dialogflow.cx.v3beta1.Example>
getCreateExampleMethod;
@io.grpc.stub.annotations.RpcMethod(
fullMethodName = SERVICE_NAME + '/' + "CreateExample",
requestType = com.google.cloud.dialogflow.cx.v3beta1.CreateExampleRequest.class,
responseType = com.google.cloud.dialogflow.cx.v3beta1.Example.class,
methodType = io.grpc.MethodDescriptor.MethodType.UNARY)
public static io.grpc.MethodDescriptor<
com.google.cloud.dialogflow.cx.v3beta1.CreateExampleRequest,
com.google.cloud.dialogflow.cx.v3beta1.Example>
getCreateExampleMethod() {
io.grpc.MethodDescriptor<
com.google.cloud.dialogflow.cx.v3beta1.CreateExampleRequest,
com.google.cloud.dialogflow.cx.v3beta1.Example>
getCreateExampleMethod;
if ((getCreateExampleMethod = ExamplesGrpc.getCreateExampleMethod) == null) {
synchronized (ExamplesGrpc.class) {
if ((getCreateExampleMethod = ExamplesGrpc.getCreateExampleMethod) == null) {
ExamplesGrpc.getCreateExampleMethod =
getCreateExampleMethod =
io.grpc.MethodDescriptor
.<com.google.cloud.dialogflow.cx.v3beta1.CreateExampleRequest,
com.google.cloud.dialogflow.cx.v3beta1.Example>
newBuilder()
.setType(io.grpc.MethodDescriptor.MethodType.UNARY)
.setFullMethodName(generateFullMethodName(SERVICE_NAME, "CreateExample"))
.setSampledToLocalTracing(true)
.setRequestMarshaller(
io.grpc.protobuf.ProtoUtils.marshaller(
com.google.cloud.dialogflow.cx.v3beta1.CreateExampleRequest
.getDefaultInstance()))
.setResponseMarshaller(
io.grpc.protobuf.ProtoUtils.marshaller(
com.google.cloud.dialogflow.cx.v3beta1.Example.getDefaultInstance()))
.setSchemaDescriptor(new ExamplesMethodDescriptorSupplier("CreateExample"))
.build();
}
}
}
return getCreateExampleMethod;
}
private static volatile io.grpc.MethodDescriptor<
com.google.cloud.dialogflow.cx.v3beta1.DeleteExampleRequest, com.google.protobuf.Empty>
getDeleteExampleMethod;
@io.grpc.stub.annotations.RpcMethod(
fullMethodName = SERVICE_NAME + '/' + "DeleteExample",
requestType = com.google.cloud.dialogflow.cx.v3beta1.DeleteExampleRequest.class,
responseType = com.google.protobuf.Empty.class,
methodType = io.grpc.MethodDescriptor.MethodType.UNARY)
public static io.grpc.MethodDescriptor<
com.google.cloud.dialogflow.cx.v3beta1.DeleteExampleRequest, com.google.protobuf.Empty>
getDeleteExampleMethod() {
io.grpc.MethodDescriptor<
com.google.cloud.dialogflow.cx.v3beta1.DeleteExampleRequest, com.google.protobuf.Empty>
getDeleteExampleMethod;
if ((getDeleteExampleMethod = ExamplesGrpc.getDeleteExampleMethod) == null) {
synchronized (ExamplesGrpc.class) {
if ((getDeleteExampleMethod = ExamplesGrpc.getDeleteExampleMethod) == null) {
ExamplesGrpc.getDeleteExampleMethod =
getDeleteExampleMethod =
io.grpc.MethodDescriptor
.<com.google.cloud.dialogflow.cx.v3beta1.DeleteExampleRequest,
com.google.protobuf.Empty>
newBuilder()
.setType(io.grpc.MethodDescriptor.MethodType.UNARY)
.setFullMethodName(generateFullMethodName(SERVICE_NAME, "DeleteExample"))
.setSampledToLocalTracing(true)
.setRequestMarshaller(
io.grpc.protobuf.ProtoUtils.marshaller(
com.google.cloud.dialogflow.cx.v3beta1.DeleteExampleRequest
.getDefaultInstance()))
.setResponseMarshaller(
io.grpc.protobuf.ProtoUtils.marshaller(
com.google.protobuf.Empty.getDefaultInstance()))
.setSchemaDescriptor(new ExamplesMethodDescriptorSupplier("DeleteExample"))
.build();
}
}
}
return getDeleteExampleMethod;
}
private static volatile io.grpc.MethodDescriptor<
com.google.cloud.dialogflow.cx.v3beta1.ListExamplesRequest,
com.google.cloud.dialogflow.cx.v3beta1.ListExamplesResponse>
getListExamplesMethod;
@io.grpc.stub.annotations.RpcMethod(
fullMethodName = SERVICE_NAME + '/' + "ListExamples",
requestType = com.google.cloud.dialogflow.cx.v3beta1.ListExamplesRequest.class,
responseType = com.google.cloud.dialogflow.cx.v3beta1.ListExamplesResponse.class,
methodType = io.grpc.MethodDescriptor.MethodType.UNARY)
public static io.grpc.MethodDescriptor<
com.google.cloud.dialogflow.cx.v3beta1.ListExamplesRequest,
com.google.cloud.dialogflow.cx.v3beta1.ListExamplesResponse>
getListExamplesMethod() {
io.grpc.MethodDescriptor<
com.google.cloud.dialogflow.cx.v3beta1.ListExamplesRequest,
com.google.cloud.dialogflow.cx.v3beta1.ListExamplesResponse>
getListExamplesMethod;
if ((getListExamplesMethod = ExamplesGrpc.getListExamplesMethod) == null) {
synchronized (ExamplesGrpc.class) {
if ((getListExamplesMethod = ExamplesGrpc.getListExamplesMethod) == null) {
ExamplesGrpc.getListExamplesMethod =
getListExamplesMethod =
io.grpc.MethodDescriptor
.<com.google.cloud.dialogflow.cx.v3beta1.ListExamplesRequest,
com.google.cloud.dialogflow.cx.v3beta1.ListExamplesResponse>
newBuilder()
.setType(io.grpc.MethodDescriptor.MethodType.UNARY)
.setFullMethodName(generateFullMethodName(SERVICE_NAME, "ListExamples"))
.setSampledToLocalTracing(true)
.setRequestMarshaller(
io.grpc.protobuf.ProtoUtils.marshaller(
com.google.cloud.dialogflow.cx.v3beta1.ListExamplesRequest
.getDefaultInstance()))
.setResponseMarshaller(
io.grpc.protobuf.ProtoUtils.marshaller(
com.google.cloud.dialogflow.cx.v3beta1.ListExamplesResponse
.getDefaultInstance()))
.setSchemaDescriptor(new ExamplesMethodDescriptorSupplier("ListExamples"))
.build();
}
}
}
return getListExamplesMethod;
}
private static volatile io.grpc.MethodDescriptor<
com.google.cloud.dialogflow.cx.v3beta1.GetExampleRequest,
com.google.cloud.dialogflow.cx.v3beta1.Example>
getGetExampleMethod;
@io.grpc.stub.annotations.RpcMethod(
fullMethodName = SERVICE_NAME + '/' + "GetExample",
requestType = com.google.cloud.dialogflow.cx.v3beta1.GetExampleRequest.class,
responseType = com.google.cloud.dialogflow.cx.v3beta1.Example.class,
methodType = io.grpc.MethodDescriptor.MethodType.UNARY)
public static io.grpc.MethodDescriptor<
com.google.cloud.dialogflow.cx.v3beta1.GetExampleRequest,
com.google.cloud.dialogflow.cx.v3beta1.Example>
getGetExampleMethod() {
io.grpc.MethodDescriptor<
com.google.cloud.dialogflow.cx.v3beta1.GetExampleRequest,
com.google.cloud.dialogflow.cx.v3beta1.Example>
getGetExampleMethod;
if ((getGetExampleMethod = ExamplesGrpc.getGetExampleMethod) == null) {
synchronized (ExamplesGrpc.class) {
if ((getGetExampleMethod = ExamplesGrpc.getGetExampleMethod) == null) {
ExamplesGrpc.getGetExampleMethod =
getGetExampleMethod =
io.grpc.MethodDescriptor
.<com.google.cloud.dialogflow.cx.v3beta1.GetExampleRequest,
com.google.cloud.dialogflow.cx.v3beta1.Example>
newBuilder()
.setType(io.grpc.MethodDescriptor.MethodType.UNARY)
.setFullMethodName(generateFullMethodName(SERVICE_NAME, "GetExample"))
.setSampledToLocalTracing(true)
.setRequestMarshaller(
io.grpc.protobuf.ProtoUtils.marshaller(
com.google.cloud.dialogflow.cx.v3beta1.GetExampleRequest
.getDefaultInstance()))
.setResponseMarshaller(
io.grpc.protobuf.ProtoUtils.marshaller(
com.google.cloud.dialogflow.cx.v3beta1.Example.getDefaultInstance()))
.setSchemaDescriptor(new ExamplesMethodDescriptorSupplier("GetExample"))
.build();
}
}
}
return getGetExampleMethod;
}
private static volatile io.grpc.MethodDescriptor<
com.google.cloud.dialogflow.cx.v3beta1.UpdateExampleRequest,
com.google.cloud.dialogflow.cx.v3beta1.Example>
getUpdateExampleMethod;
@io.grpc.stub.annotations.RpcMethod(
fullMethodName = SERVICE_NAME + '/' + "UpdateExample",
requestType = com.google.cloud.dialogflow.cx.v3beta1.UpdateExampleRequest.class,
responseType = com.google.cloud.dialogflow.cx.v3beta1.Example.class,
methodType = io.grpc.MethodDescriptor.MethodType.UNARY)
public static io.grpc.MethodDescriptor<
com.google.cloud.dialogflow.cx.v3beta1.UpdateExampleRequest,
com.google.cloud.dialogflow.cx.v3beta1.Example>
getUpdateExampleMethod() {
io.grpc.MethodDescriptor<
com.google.cloud.dialogflow.cx.v3beta1.UpdateExampleRequest,
com.google.cloud.dialogflow.cx.v3beta1.Example>
getUpdateExampleMethod;
if ((getUpdateExampleMethod = ExamplesGrpc.getUpdateExampleMethod) == null) {
synchronized (ExamplesGrpc.class) {
if ((getUpdateExampleMethod = ExamplesGrpc.getUpdateExampleMethod) == null) {
ExamplesGrpc.getUpdateExampleMethod =
getUpdateExampleMethod =
io.grpc.MethodDescriptor
.<com.google.cloud.dialogflow.cx.v3beta1.UpdateExampleRequest,
com.google.cloud.dialogflow.cx.v3beta1.Example>
newBuilder()
.setType(io.grpc.MethodDescriptor.MethodType.UNARY)
.setFullMethodName(generateFullMethodName(SERVICE_NAME, "UpdateExample"))
.setSampledToLocalTracing(true)
.setRequestMarshaller(
io.grpc.protobuf.ProtoUtils.marshaller(
com.google.cloud.dialogflow.cx.v3beta1.UpdateExampleRequest
.getDefaultInstance()))
.setResponseMarshaller(
io.grpc.protobuf.ProtoUtils.marshaller(
com.google.cloud.dialogflow.cx.v3beta1.Example.getDefaultInstance()))
.setSchemaDescriptor(new ExamplesMethodDescriptorSupplier("UpdateExample"))
.build();
}
}
}
return getUpdateExampleMethod;
}
/** Creates a new async stub that supports all call types for the service */
public static ExamplesStub newStub(io.grpc.Channel channel) {
io.grpc.stub.AbstractStub.StubFactory<ExamplesStub> factory =
new io.grpc.stub.AbstractStub.StubFactory<ExamplesStub>() {
@java.lang.Override
public ExamplesStub newStub(io.grpc.Channel channel, io.grpc.CallOptions callOptions) {
return new ExamplesStub(channel, callOptions);
}
};
return ExamplesStub.newStub(factory, channel);
}
/** Creates a new blocking-style stub that supports all types of calls on the service */
public static ExamplesBlockingV2Stub newBlockingV2Stub(io.grpc.Channel channel) {
io.grpc.stub.AbstractStub.StubFactory<ExamplesBlockingV2Stub> factory =
new io.grpc.stub.AbstractStub.StubFactory<ExamplesBlockingV2Stub>() {
@java.lang.Override
public ExamplesBlockingV2Stub newStub(
io.grpc.Channel channel, io.grpc.CallOptions callOptions) {
return new ExamplesBlockingV2Stub(channel, callOptions);
}
};
return ExamplesBlockingV2Stub.newStub(factory, channel);
}
/**
* Creates a new blocking-style stub that supports unary and streaming output calls on the service
*/
public static ExamplesBlockingStub newBlockingStub(io.grpc.Channel channel) {
io.grpc.stub.AbstractStub.StubFactory<ExamplesBlockingStub> factory =
new io.grpc.stub.AbstractStub.StubFactory<ExamplesBlockingStub>() {
@java.lang.Override
public ExamplesBlockingStub newStub(
io.grpc.Channel channel, io.grpc.CallOptions callOptions) {
return new ExamplesBlockingStub(channel, callOptions);
}
};
return ExamplesBlockingStub.newStub(factory, channel);
}
/** Creates a new ListenableFuture-style stub that supports unary calls on the service */
public static ExamplesFutureStub newFutureStub(io.grpc.Channel channel) {
io.grpc.stub.AbstractStub.StubFactory<ExamplesFutureStub> factory =
new io.grpc.stub.AbstractStub.StubFactory<ExamplesFutureStub>() {
@java.lang.Override
public ExamplesFutureStub newStub(
io.grpc.Channel channel, io.grpc.CallOptions callOptions) {
return new ExamplesFutureStub(channel, callOptions);
}
};
return ExamplesFutureStub.newStub(factory, channel);
}
/**
*
*
* <pre>
* Service for managing [Examples][google.cloud.dialogflow.cx.v3beta1.Example].
* </pre>
*/
public interface AsyncService {
/**
*
*
* <pre>
* Creates an example in the specified playbook.
* </pre>
*/
default void createExample(
com.google.cloud.dialogflow.cx.v3beta1.CreateExampleRequest request,
io.grpc.stub.StreamObserver<com.google.cloud.dialogflow.cx.v3beta1.Example>
responseObserver) {
io.grpc.stub.ServerCalls.asyncUnimplementedUnaryCall(
getCreateExampleMethod(), responseObserver);
}
/**
*
*
* <pre>
* Deletes the specified example.
* </pre>
*/
default void deleteExample(
com.google.cloud.dialogflow.cx.v3beta1.DeleteExampleRequest request,
io.grpc.stub.StreamObserver<com.google.protobuf.Empty> responseObserver) {
io.grpc.stub.ServerCalls.asyncUnimplementedUnaryCall(
getDeleteExampleMethod(), responseObserver);
}
/**
*
*
* <pre>
* Returns a list of examples in the specified playbook.
* </pre>
*/
default void listExamples(
com.google.cloud.dialogflow.cx.v3beta1.ListExamplesRequest request,
io.grpc.stub.StreamObserver<com.google.cloud.dialogflow.cx.v3beta1.ListExamplesResponse>
responseObserver) {
io.grpc.stub.ServerCalls.asyncUnimplementedUnaryCall(
getListExamplesMethod(), responseObserver);
}
/**
*
*
* <pre>
* Retrieves the specified example.
* </pre>
*/
default void getExample(
com.google.cloud.dialogflow.cx.v3beta1.GetExampleRequest request,
io.grpc.stub.StreamObserver<com.google.cloud.dialogflow.cx.v3beta1.Example>
responseObserver) {
io.grpc.stub.ServerCalls.asyncUnimplementedUnaryCall(getGetExampleMethod(), responseObserver);
}
/**
*
*
* <pre>
* Update the specified example.
* </pre>
*/
default void updateExample(
com.google.cloud.dialogflow.cx.v3beta1.UpdateExampleRequest request,
io.grpc.stub.StreamObserver<com.google.cloud.dialogflow.cx.v3beta1.Example>
responseObserver) {
io.grpc.stub.ServerCalls.asyncUnimplementedUnaryCall(
getUpdateExampleMethod(), responseObserver);
}
}
/**
* Base class for the server implementation of the service Examples.
*
* <pre>
* Service for managing [Examples][google.cloud.dialogflow.cx.v3beta1.Example].
* </pre>
*/
public abstract static class ExamplesImplBase implements io.grpc.BindableService, AsyncService {
@java.lang.Override
public final io.grpc.ServerServiceDefinition bindService() {
return ExamplesGrpc.bindService(this);
}
}
/**
* A stub to allow clients to do asynchronous rpc calls to service Examples.
*
* <pre>
* Service for managing [Examples][google.cloud.dialogflow.cx.v3beta1.Example].
* </pre>
*/
public static final class ExamplesStub extends io.grpc.stub.AbstractAsyncStub<ExamplesStub> {
private ExamplesStub(io.grpc.Channel channel, io.grpc.CallOptions callOptions) {
super(channel, callOptions);
}
@java.lang.Override
protected ExamplesStub build(io.grpc.Channel channel, io.grpc.CallOptions callOptions) {
return new ExamplesStub(channel, callOptions);
}
/**
*
*
* <pre>
* Creates an example in the specified playbook.
* </pre>
*/
public void createExample(
com.google.cloud.dialogflow.cx.v3beta1.CreateExampleRequest request,
io.grpc.stub.StreamObserver<com.google.cloud.dialogflow.cx.v3beta1.Example>
responseObserver) {
io.grpc.stub.ClientCalls.asyncUnaryCall(
getChannel().newCall(getCreateExampleMethod(), getCallOptions()),
request,
responseObserver);
}
/**
*
*
* <pre>
* Deletes the specified example.
* </pre>
*/
public void deleteExample(
com.google.cloud.dialogflow.cx.v3beta1.DeleteExampleRequest request,
io.grpc.stub.StreamObserver<com.google.protobuf.Empty> responseObserver) {
io.grpc.stub.ClientCalls.asyncUnaryCall(
getChannel().newCall(getDeleteExampleMethod(), getCallOptions()),
request,
responseObserver);
}
/**
*
*
* <pre>
* Returns a list of examples in the specified playbook.
* </pre>
*/
public void listExamples(
com.google.cloud.dialogflow.cx.v3beta1.ListExamplesRequest request,
io.grpc.stub.StreamObserver<com.google.cloud.dialogflow.cx.v3beta1.ListExamplesResponse>
responseObserver) {
io.grpc.stub.ClientCalls.asyncUnaryCall(
getChannel().newCall(getListExamplesMethod(), getCallOptions()),
request,
responseObserver);
}
/**
*
*
* <pre>
* Retrieves the specified example.
* </pre>
*/
public void getExample(
com.google.cloud.dialogflow.cx.v3beta1.GetExampleRequest request,
io.grpc.stub.StreamObserver<com.google.cloud.dialogflow.cx.v3beta1.Example>
responseObserver) {
io.grpc.stub.ClientCalls.asyncUnaryCall(
getChannel().newCall(getGetExampleMethod(), getCallOptions()), request, responseObserver);
}
/**
*
*
* <pre>
* Update the specified example.
* </pre>
*/
public void updateExample(
com.google.cloud.dialogflow.cx.v3beta1.UpdateExampleRequest request,
io.grpc.stub.StreamObserver<com.google.cloud.dialogflow.cx.v3beta1.Example>
responseObserver) {
io.grpc.stub.ClientCalls.asyncUnaryCall(
getChannel().newCall(getUpdateExampleMethod(), getCallOptions()),
request,
responseObserver);
}
}
/**
* A stub to allow clients to do synchronous rpc calls to service Examples.
*
* <pre>
* Service for managing [Examples][google.cloud.dialogflow.cx.v3beta1.Example].
* </pre>
*/
public static final class ExamplesBlockingV2Stub
extends io.grpc.stub.AbstractBlockingStub<ExamplesBlockingV2Stub> {
private ExamplesBlockingV2Stub(io.grpc.Channel channel, io.grpc.CallOptions callOptions) {
super(channel, callOptions);
}
@java.lang.Override
protected ExamplesBlockingV2Stub build(
io.grpc.Channel channel, io.grpc.CallOptions callOptions) {
return new ExamplesBlockingV2Stub(channel, callOptions);
}
/**
*
*
* <pre>
* Creates an example in the specified playbook.
* </pre>
*/
public com.google.cloud.dialogflow.cx.v3beta1.Example createExample(
com.google.cloud.dialogflow.cx.v3beta1.CreateExampleRequest request) {
return io.grpc.stub.ClientCalls.blockingUnaryCall(
getChannel(), getCreateExampleMethod(), getCallOptions(), request);
}
/**
*
*
* <pre>
* Deletes the specified example.
* </pre>
*/
public com.google.protobuf.Empty deleteExample(
com.google.cloud.dialogflow.cx.v3beta1.DeleteExampleRequest request) {
return io.grpc.stub.ClientCalls.blockingUnaryCall(
getChannel(), getDeleteExampleMethod(), getCallOptions(), request);
}
/**
*
*
* <pre>
* Returns a list of examples in the specified playbook.
* </pre>
*/
public com.google.cloud.dialogflow.cx.v3beta1.ListExamplesResponse listExamples(
com.google.cloud.dialogflow.cx.v3beta1.ListExamplesRequest request) {
return io.grpc.stub.ClientCalls.blockingUnaryCall(
getChannel(), getListExamplesMethod(), getCallOptions(), request);
}
/**
*
*
* <pre>
* Retrieves the specified example.
* </pre>
*/
public com.google.cloud.dialogflow.cx.v3beta1.Example getExample(
com.google.cloud.dialogflow.cx.v3beta1.GetExampleRequest request) {
return io.grpc.stub.ClientCalls.blockingUnaryCall(
getChannel(), getGetExampleMethod(), getCallOptions(), request);
}
/**
*
*
* <pre>
* Update the specified example.
* </pre>
*/
public com.google.cloud.dialogflow.cx.v3beta1.Example updateExample(
com.google.cloud.dialogflow.cx.v3beta1.UpdateExampleRequest request) {
return io.grpc.stub.ClientCalls.blockingUnaryCall(
getChannel(), getUpdateExampleMethod(), getCallOptions(), request);
}
}
/**
* A stub to allow clients to do limited synchronous rpc calls to service Examples.
*
* <pre>
* Service for managing [Examples][google.cloud.dialogflow.cx.v3beta1.Example].
* </pre>
*/
public static final class ExamplesBlockingStub
extends io.grpc.stub.AbstractBlockingStub<ExamplesBlockingStub> {
private ExamplesBlockingStub(io.grpc.Channel channel, io.grpc.CallOptions callOptions) {
super(channel, callOptions);
}
@java.lang.Override
protected ExamplesBlockingStub build(io.grpc.Channel channel, io.grpc.CallOptions callOptions) {
return new ExamplesBlockingStub(channel, callOptions);
}
/**
*
*
* <pre>
* Creates an example in the specified playbook.
* </pre>
*/
public com.google.cloud.dialogflow.cx.v3beta1.Example createExample(
com.google.cloud.dialogflow.cx.v3beta1.CreateExampleRequest request) {
return io.grpc.stub.ClientCalls.blockingUnaryCall(
getChannel(), getCreateExampleMethod(), getCallOptions(), request);
}
/**
*
*
* <pre>
* Deletes the specified example.
* </pre>
*/
public com.google.protobuf.Empty deleteExample(
com.google.cloud.dialogflow.cx.v3beta1.DeleteExampleRequest request) {
return io.grpc.stub.ClientCalls.blockingUnaryCall(
getChannel(), getDeleteExampleMethod(), getCallOptions(), request);
}
/**
*
*
* <pre>
* Returns a list of examples in the specified playbook.
* </pre>
*/
public com.google.cloud.dialogflow.cx.v3beta1.ListExamplesResponse listExamples(
com.google.cloud.dialogflow.cx.v3beta1.ListExamplesRequest request) {
return io.grpc.stub.ClientCalls.blockingUnaryCall(
getChannel(), getListExamplesMethod(), getCallOptions(), request);
}
/**
*
*
* <pre>
* Retrieves the specified example.
* </pre>
*/
public com.google.cloud.dialogflow.cx.v3beta1.Example getExample(
com.google.cloud.dialogflow.cx.v3beta1.GetExampleRequest request) {
return io.grpc.stub.ClientCalls.blockingUnaryCall(
getChannel(), getGetExampleMethod(), getCallOptions(), request);
}
/**
*
*
* <pre>
* Update the specified example.
* </pre>
*/
public com.google.cloud.dialogflow.cx.v3beta1.Example updateExample(
com.google.cloud.dialogflow.cx.v3beta1.UpdateExampleRequest request) {
return io.grpc.stub.ClientCalls.blockingUnaryCall(
getChannel(), getUpdateExampleMethod(), getCallOptions(), request);
}
}
/**
* A stub to allow clients to do ListenableFuture-style rpc calls to service Examples.
*
* <pre>
* Service for managing [Examples][google.cloud.dialogflow.cx.v3beta1.Example].
* </pre>
*/
public static final class ExamplesFutureStub
extends io.grpc.stub.AbstractFutureStub<ExamplesFutureStub> {
private ExamplesFutureStub(io.grpc.Channel channel, io.grpc.CallOptions callOptions) {
super(channel, callOptions);
}
@java.lang.Override
protected ExamplesFutureStub build(io.grpc.Channel channel, io.grpc.CallOptions callOptions) {
return new ExamplesFutureStub(channel, callOptions);
}
/**
*
*
* <pre>
* Creates an example in the specified playbook.
* </pre>
*/
public com.google.common.util.concurrent.ListenableFuture<
com.google.cloud.dialogflow.cx.v3beta1.Example>
createExample(com.google.cloud.dialogflow.cx.v3beta1.CreateExampleRequest request) {
return io.grpc.stub.ClientCalls.futureUnaryCall(
getChannel().newCall(getCreateExampleMethod(), getCallOptions()), request);
}
/**
*
*
* <pre>
* Deletes the specified example.
* </pre>
*/
public com.google.common.util.concurrent.ListenableFuture<com.google.protobuf.Empty>
deleteExample(com.google.cloud.dialogflow.cx.v3beta1.DeleteExampleRequest request) {
return io.grpc.stub.ClientCalls.futureUnaryCall(
getChannel().newCall(getDeleteExampleMethod(), getCallOptions()), request);
}
/**
*
*
* <pre>
* Returns a list of examples in the specified playbook.
* </pre>
*/
public com.google.common.util.concurrent.ListenableFuture<
com.google.cloud.dialogflow.cx.v3beta1.ListExamplesResponse>
listExamples(com.google.cloud.dialogflow.cx.v3beta1.ListExamplesRequest request) {
return io.grpc.stub.ClientCalls.futureUnaryCall(
getChannel().newCall(getListExamplesMethod(), getCallOptions()), request);
}
/**
*
*
* <pre>
* Retrieves the specified example.
* </pre>
*/
public com.google.common.util.concurrent.ListenableFuture<
com.google.cloud.dialogflow.cx.v3beta1.Example>
getExample(com.google.cloud.dialogflow.cx.v3beta1.GetExampleRequest request) {
return io.grpc.stub.ClientCalls.futureUnaryCall(
getChannel().newCall(getGetExampleMethod(), getCallOptions()), request);
}
/**
*
*
* <pre>
* Update the specified example.
* </pre>
*/
public com.google.common.util.concurrent.ListenableFuture<
com.google.cloud.dialogflow.cx.v3beta1.Example>
updateExample(com.google.cloud.dialogflow.cx.v3beta1.UpdateExampleRequest request) {
return io.grpc.stub.ClientCalls.futureUnaryCall(
getChannel().newCall(getUpdateExampleMethod(), getCallOptions()), request);
}
}
private static final int METHODID_CREATE_EXAMPLE = 0;
private static final int METHODID_DELETE_EXAMPLE = 1;
private static final int METHODID_LIST_EXAMPLES = 2;
private static final int METHODID_GET_EXAMPLE = 3;
private static final int METHODID_UPDATE_EXAMPLE = 4;
private static final class MethodHandlers<Req, Resp>
implements io.grpc.stub.ServerCalls.UnaryMethod<Req, Resp>,
io.grpc.stub.ServerCalls.ServerStreamingMethod<Req, Resp>,
io.grpc.stub.ServerCalls.ClientStreamingMethod<Req, Resp>,
io.grpc.stub.ServerCalls.BidiStreamingMethod<Req, Resp> {
private final AsyncService serviceImpl;
private final int methodId;
MethodHandlers(AsyncService serviceImpl, int methodId) {
this.serviceImpl = serviceImpl;
this.methodId = methodId;
}
@java.lang.Override
@java.lang.SuppressWarnings("unchecked")
public void invoke(Req request, io.grpc.stub.StreamObserver<Resp> responseObserver) {
switch (methodId) {
case METHODID_CREATE_EXAMPLE:
serviceImpl.createExample(
(com.google.cloud.dialogflow.cx.v3beta1.CreateExampleRequest) request,
(io.grpc.stub.StreamObserver<com.google.cloud.dialogflow.cx.v3beta1.Example>)
responseObserver);
break;
case METHODID_DELETE_EXAMPLE:
serviceImpl.deleteExample(
(com.google.cloud.dialogflow.cx.v3beta1.DeleteExampleRequest) request,
(io.grpc.stub.StreamObserver<com.google.protobuf.Empty>) responseObserver);
break;
case METHODID_LIST_EXAMPLES:
serviceImpl.listExamples(
(com.google.cloud.dialogflow.cx.v3beta1.ListExamplesRequest) request,
(io.grpc.stub.StreamObserver<
com.google.cloud.dialogflow.cx.v3beta1.ListExamplesResponse>)
responseObserver);
break;
case METHODID_GET_EXAMPLE:
serviceImpl.getExample(
(com.google.cloud.dialogflow.cx.v3beta1.GetExampleRequest) request,
(io.grpc.stub.StreamObserver<com.google.cloud.dialogflow.cx.v3beta1.Example>)
responseObserver);
break;
case METHODID_UPDATE_EXAMPLE:
serviceImpl.updateExample(
(com.google.cloud.dialogflow.cx.v3beta1.UpdateExampleRequest) request,
(io.grpc.stub.StreamObserver<com.google.cloud.dialogflow.cx.v3beta1.Example>)
responseObserver);
break;
default:
throw new AssertionError();
}
}
@java.lang.Override
@java.lang.SuppressWarnings("unchecked")
public io.grpc.stub.StreamObserver<Req> invoke(
io.grpc.stub.StreamObserver<Resp> responseObserver) {
switch (methodId) {
default:
throw new AssertionError();
}
}
}
public static final io.grpc.ServerServiceDefinition bindService(AsyncService service) {
return io.grpc.ServerServiceDefinition.builder(getServiceDescriptor())
.addMethod(
getCreateExampleMethod(),
io.grpc.stub.ServerCalls.asyncUnaryCall(
new MethodHandlers<
com.google.cloud.dialogflow.cx.v3beta1.CreateExampleRequest,
com.google.cloud.dialogflow.cx.v3beta1.Example>(
service, METHODID_CREATE_EXAMPLE)))
.addMethod(
getDeleteExampleMethod(),
io.grpc.stub.ServerCalls.asyncUnaryCall(
new MethodHandlers<
com.google.cloud.dialogflow.cx.v3beta1.DeleteExampleRequest,
com.google.protobuf.Empty>(service, METHODID_DELETE_EXAMPLE)))
.addMethod(
getListExamplesMethod(),
io.grpc.stub.ServerCalls.asyncUnaryCall(
new MethodHandlers<
com.google.cloud.dialogflow.cx.v3beta1.ListExamplesRequest,
com.google.cloud.dialogflow.cx.v3beta1.ListExamplesResponse>(
service, METHODID_LIST_EXAMPLES)))
.addMethod(
getGetExampleMethod(),
io.grpc.stub.ServerCalls.asyncUnaryCall(
new MethodHandlers<
com.google.cloud.dialogflow.cx.v3beta1.GetExampleRequest,
com.google.cloud.dialogflow.cx.v3beta1.Example>(service, METHODID_GET_EXAMPLE)))
.addMethod(
getUpdateExampleMethod(),
io.grpc.stub.ServerCalls.asyncUnaryCall(
new MethodHandlers<
com.google.cloud.dialogflow.cx.v3beta1.UpdateExampleRequest,
com.google.cloud.dialogflow.cx.v3beta1.Example>(
service, METHODID_UPDATE_EXAMPLE)))
.build();
}
private abstract static class ExamplesBaseDescriptorSupplier
implements io.grpc.protobuf.ProtoFileDescriptorSupplier,
io.grpc.protobuf.ProtoServiceDescriptorSupplier {
ExamplesBaseDescriptorSupplier() {}
@java.lang.Override
public com.google.protobuf.Descriptors.FileDescriptor getFileDescriptor() {
return com.google.cloud.dialogflow.cx.v3beta1.ExampleProto.getDescriptor();
}
@java.lang.Override
public com.google.protobuf.Descriptors.ServiceDescriptor getServiceDescriptor() {
return getFileDescriptor().findServiceByName("Examples");
}
}
private static final class ExamplesFileDescriptorSupplier extends ExamplesBaseDescriptorSupplier {
ExamplesFileDescriptorSupplier() {}
}
private static final class ExamplesMethodDescriptorSupplier extends ExamplesBaseDescriptorSupplier
implements io.grpc.protobuf.ProtoMethodDescriptorSupplier {
private final java.lang.String methodName;
ExamplesMethodDescriptorSupplier(java.lang.String methodName) {
this.methodName = methodName;
}
@java.lang.Override
public com.google.protobuf.Descriptors.MethodDescriptor getMethodDescriptor() {
return getServiceDescriptor().findMethodByName(methodName);
}
}
private static volatile io.grpc.ServiceDescriptor serviceDescriptor;
public static io.grpc.ServiceDescriptor getServiceDescriptor() {
io.grpc.ServiceDescriptor result = serviceDescriptor;
if (result == null) {
synchronized (ExamplesGrpc.class) {
result = serviceDescriptor;
if (result == null) {
serviceDescriptor =
result =
io.grpc.ServiceDescriptor.newBuilder(SERVICE_NAME)
.setSchemaDescriptor(new ExamplesFileDescriptorSupplier())
.addMethod(getCreateExampleMethod())
.addMethod(getDeleteExampleMethod())
.addMethod(getListExamplesMethod())
.addMethod(getGetExampleMethod())
.addMethod(getUpdateExampleMethod())
.build();
}
}
}
return result;
}
}
|
googleapis/google-cloud-java | 36,398 | java-dialogflow-cx/grpc-google-cloud-dialogflow-cx-v3beta1/src/main/java/com/google/cloud/dialogflow/cx/v3beta1/WebhooksGrpc.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.cloud.dialogflow.cx.v3beta1;
import static io.grpc.MethodDescriptor.generateFullMethodName;
/**
*
*
* <pre>
* Service for managing [Webhooks][google.cloud.dialogflow.cx.v3beta1.Webhook].
* </pre>
*/
@javax.annotation.Generated(
value = "by gRPC proto compiler",
comments = "Source: google/cloud/dialogflow/cx/v3beta1/webhook.proto")
@io.grpc.stub.annotations.GrpcGenerated
public final class WebhooksGrpc {
private WebhooksGrpc() {}
public static final java.lang.String SERVICE_NAME = "google.cloud.dialogflow.cx.v3beta1.Webhooks";
// Static method descriptors that strictly reflect the proto.
private static volatile io.grpc.MethodDescriptor<
com.google.cloud.dialogflow.cx.v3beta1.ListWebhooksRequest,
com.google.cloud.dialogflow.cx.v3beta1.ListWebhooksResponse>
getListWebhooksMethod;
@io.grpc.stub.annotations.RpcMethod(
fullMethodName = SERVICE_NAME + '/' + "ListWebhooks",
requestType = com.google.cloud.dialogflow.cx.v3beta1.ListWebhooksRequest.class,
responseType = com.google.cloud.dialogflow.cx.v3beta1.ListWebhooksResponse.class,
methodType = io.grpc.MethodDescriptor.MethodType.UNARY)
public static io.grpc.MethodDescriptor<
com.google.cloud.dialogflow.cx.v3beta1.ListWebhooksRequest,
com.google.cloud.dialogflow.cx.v3beta1.ListWebhooksResponse>
getListWebhooksMethod() {
io.grpc.MethodDescriptor<
com.google.cloud.dialogflow.cx.v3beta1.ListWebhooksRequest,
com.google.cloud.dialogflow.cx.v3beta1.ListWebhooksResponse>
getListWebhooksMethod;
if ((getListWebhooksMethod = WebhooksGrpc.getListWebhooksMethod) == null) {
synchronized (WebhooksGrpc.class) {
if ((getListWebhooksMethod = WebhooksGrpc.getListWebhooksMethod) == null) {
WebhooksGrpc.getListWebhooksMethod =
getListWebhooksMethod =
io.grpc.MethodDescriptor
.<com.google.cloud.dialogflow.cx.v3beta1.ListWebhooksRequest,
com.google.cloud.dialogflow.cx.v3beta1.ListWebhooksResponse>
newBuilder()
.setType(io.grpc.MethodDescriptor.MethodType.UNARY)
.setFullMethodName(generateFullMethodName(SERVICE_NAME, "ListWebhooks"))
.setSampledToLocalTracing(true)
.setRequestMarshaller(
io.grpc.protobuf.ProtoUtils.marshaller(
com.google.cloud.dialogflow.cx.v3beta1.ListWebhooksRequest
.getDefaultInstance()))
.setResponseMarshaller(
io.grpc.protobuf.ProtoUtils.marshaller(
com.google.cloud.dialogflow.cx.v3beta1.ListWebhooksResponse
.getDefaultInstance()))
.setSchemaDescriptor(new WebhooksMethodDescriptorSupplier("ListWebhooks"))
.build();
}
}
}
return getListWebhooksMethod;
}
private static volatile io.grpc.MethodDescriptor<
com.google.cloud.dialogflow.cx.v3beta1.GetWebhookRequest,
com.google.cloud.dialogflow.cx.v3beta1.Webhook>
getGetWebhookMethod;
@io.grpc.stub.annotations.RpcMethod(
fullMethodName = SERVICE_NAME + '/' + "GetWebhook",
requestType = com.google.cloud.dialogflow.cx.v3beta1.GetWebhookRequest.class,
responseType = com.google.cloud.dialogflow.cx.v3beta1.Webhook.class,
methodType = io.grpc.MethodDescriptor.MethodType.UNARY)
public static io.grpc.MethodDescriptor<
com.google.cloud.dialogflow.cx.v3beta1.GetWebhookRequest,
com.google.cloud.dialogflow.cx.v3beta1.Webhook>
getGetWebhookMethod() {
io.grpc.MethodDescriptor<
com.google.cloud.dialogflow.cx.v3beta1.GetWebhookRequest,
com.google.cloud.dialogflow.cx.v3beta1.Webhook>
getGetWebhookMethod;
if ((getGetWebhookMethod = WebhooksGrpc.getGetWebhookMethod) == null) {
synchronized (WebhooksGrpc.class) {
if ((getGetWebhookMethod = WebhooksGrpc.getGetWebhookMethod) == null) {
WebhooksGrpc.getGetWebhookMethod =
getGetWebhookMethod =
io.grpc.MethodDescriptor
.<com.google.cloud.dialogflow.cx.v3beta1.GetWebhookRequest,
com.google.cloud.dialogflow.cx.v3beta1.Webhook>
newBuilder()
.setType(io.grpc.MethodDescriptor.MethodType.UNARY)
.setFullMethodName(generateFullMethodName(SERVICE_NAME, "GetWebhook"))
.setSampledToLocalTracing(true)
.setRequestMarshaller(
io.grpc.protobuf.ProtoUtils.marshaller(
com.google.cloud.dialogflow.cx.v3beta1.GetWebhookRequest
.getDefaultInstance()))
.setResponseMarshaller(
io.grpc.protobuf.ProtoUtils.marshaller(
com.google.cloud.dialogflow.cx.v3beta1.Webhook.getDefaultInstance()))
.setSchemaDescriptor(new WebhooksMethodDescriptorSupplier("GetWebhook"))
.build();
}
}
}
return getGetWebhookMethod;
}
private static volatile io.grpc.MethodDescriptor<
com.google.cloud.dialogflow.cx.v3beta1.CreateWebhookRequest,
com.google.cloud.dialogflow.cx.v3beta1.Webhook>
getCreateWebhookMethod;
@io.grpc.stub.annotations.RpcMethod(
fullMethodName = SERVICE_NAME + '/' + "CreateWebhook",
requestType = com.google.cloud.dialogflow.cx.v3beta1.CreateWebhookRequest.class,
responseType = com.google.cloud.dialogflow.cx.v3beta1.Webhook.class,
methodType = io.grpc.MethodDescriptor.MethodType.UNARY)
public static io.grpc.MethodDescriptor<
com.google.cloud.dialogflow.cx.v3beta1.CreateWebhookRequest,
com.google.cloud.dialogflow.cx.v3beta1.Webhook>
getCreateWebhookMethod() {
io.grpc.MethodDescriptor<
com.google.cloud.dialogflow.cx.v3beta1.CreateWebhookRequest,
com.google.cloud.dialogflow.cx.v3beta1.Webhook>
getCreateWebhookMethod;
if ((getCreateWebhookMethod = WebhooksGrpc.getCreateWebhookMethod) == null) {
synchronized (WebhooksGrpc.class) {
if ((getCreateWebhookMethod = WebhooksGrpc.getCreateWebhookMethod) == null) {
WebhooksGrpc.getCreateWebhookMethod =
getCreateWebhookMethod =
io.grpc.MethodDescriptor
.<com.google.cloud.dialogflow.cx.v3beta1.CreateWebhookRequest,
com.google.cloud.dialogflow.cx.v3beta1.Webhook>
newBuilder()
.setType(io.grpc.MethodDescriptor.MethodType.UNARY)
.setFullMethodName(generateFullMethodName(SERVICE_NAME, "CreateWebhook"))
.setSampledToLocalTracing(true)
.setRequestMarshaller(
io.grpc.protobuf.ProtoUtils.marshaller(
com.google.cloud.dialogflow.cx.v3beta1.CreateWebhookRequest
.getDefaultInstance()))
.setResponseMarshaller(
io.grpc.protobuf.ProtoUtils.marshaller(
com.google.cloud.dialogflow.cx.v3beta1.Webhook.getDefaultInstance()))
.setSchemaDescriptor(new WebhooksMethodDescriptorSupplier("CreateWebhook"))
.build();
}
}
}
return getCreateWebhookMethod;
}
private static volatile io.grpc.MethodDescriptor<
com.google.cloud.dialogflow.cx.v3beta1.UpdateWebhookRequest,
com.google.cloud.dialogflow.cx.v3beta1.Webhook>
getUpdateWebhookMethod;
@io.grpc.stub.annotations.RpcMethod(
fullMethodName = SERVICE_NAME + '/' + "UpdateWebhook",
requestType = com.google.cloud.dialogflow.cx.v3beta1.UpdateWebhookRequest.class,
responseType = com.google.cloud.dialogflow.cx.v3beta1.Webhook.class,
methodType = io.grpc.MethodDescriptor.MethodType.UNARY)
public static io.grpc.MethodDescriptor<
com.google.cloud.dialogflow.cx.v3beta1.UpdateWebhookRequest,
com.google.cloud.dialogflow.cx.v3beta1.Webhook>
getUpdateWebhookMethod() {
io.grpc.MethodDescriptor<
com.google.cloud.dialogflow.cx.v3beta1.UpdateWebhookRequest,
com.google.cloud.dialogflow.cx.v3beta1.Webhook>
getUpdateWebhookMethod;
if ((getUpdateWebhookMethod = WebhooksGrpc.getUpdateWebhookMethod) == null) {
synchronized (WebhooksGrpc.class) {
if ((getUpdateWebhookMethod = WebhooksGrpc.getUpdateWebhookMethod) == null) {
WebhooksGrpc.getUpdateWebhookMethod =
getUpdateWebhookMethod =
io.grpc.MethodDescriptor
.<com.google.cloud.dialogflow.cx.v3beta1.UpdateWebhookRequest,
com.google.cloud.dialogflow.cx.v3beta1.Webhook>
newBuilder()
.setType(io.grpc.MethodDescriptor.MethodType.UNARY)
.setFullMethodName(generateFullMethodName(SERVICE_NAME, "UpdateWebhook"))
.setSampledToLocalTracing(true)
.setRequestMarshaller(
io.grpc.protobuf.ProtoUtils.marshaller(
com.google.cloud.dialogflow.cx.v3beta1.UpdateWebhookRequest
.getDefaultInstance()))
.setResponseMarshaller(
io.grpc.protobuf.ProtoUtils.marshaller(
com.google.cloud.dialogflow.cx.v3beta1.Webhook.getDefaultInstance()))
.setSchemaDescriptor(new WebhooksMethodDescriptorSupplier("UpdateWebhook"))
.build();
}
}
}
return getUpdateWebhookMethod;
}
private static volatile io.grpc.MethodDescriptor<
com.google.cloud.dialogflow.cx.v3beta1.DeleteWebhookRequest, com.google.protobuf.Empty>
getDeleteWebhookMethod;
@io.grpc.stub.annotations.RpcMethod(
fullMethodName = SERVICE_NAME + '/' + "DeleteWebhook",
requestType = com.google.cloud.dialogflow.cx.v3beta1.DeleteWebhookRequest.class,
responseType = com.google.protobuf.Empty.class,
methodType = io.grpc.MethodDescriptor.MethodType.UNARY)
public static io.grpc.MethodDescriptor<
com.google.cloud.dialogflow.cx.v3beta1.DeleteWebhookRequest, com.google.protobuf.Empty>
getDeleteWebhookMethod() {
io.grpc.MethodDescriptor<
com.google.cloud.dialogflow.cx.v3beta1.DeleteWebhookRequest, com.google.protobuf.Empty>
getDeleteWebhookMethod;
if ((getDeleteWebhookMethod = WebhooksGrpc.getDeleteWebhookMethod) == null) {
synchronized (WebhooksGrpc.class) {
if ((getDeleteWebhookMethod = WebhooksGrpc.getDeleteWebhookMethod) == null) {
WebhooksGrpc.getDeleteWebhookMethod =
getDeleteWebhookMethod =
io.grpc.MethodDescriptor
.<com.google.cloud.dialogflow.cx.v3beta1.DeleteWebhookRequest,
com.google.protobuf.Empty>
newBuilder()
.setType(io.grpc.MethodDescriptor.MethodType.UNARY)
.setFullMethodName(generateFullMethodName(SERVICE_NAME, "DeleteWebhook"))
.setSampledToLocalTracing(true)
.setRequestMarshaller(
io.grpc.protobuf.ProtoUtils.marshaller(
com.google.cloud.dialogflow.cx.v3beta1.DeleteWebhookRequest
.getDefaultInstance()))
.setResponseMarshaller(
io.grpc.protobuf.ProtoUtils.marshaller(
com.google.protobuf.Empty.getDefaultInstance()))
.setSchemaDescriptor(new WebhooksMethodDescriptorSupplier("DeleteWebhook"))
.build();
}
}
}
return getDeleteWebhookMethod;
}
/** Creates a new async stub that supports all call types for the service */
public static WebhooksStub newStub(io.grpc.Channel channel) {
io.grpc.stub.AbstractStub.StubFactory<WebhooksStub> factory =
new io.grpc.stub.AbstractStub.StubFactory<WebhooksStub>() {
@java.lang.Override
public WebhooksStub newStub(io.grpc.Channel channel, io.grpc.CallOptions callOptions) {
return new WebhooksStub(channel, callOptions);
}
};
return WebhooksStub.newStub(factory, channel);
}
/** Creates a new blocking-style stub that supports all types of calls on the service */
public static WebhooksBlockingV2Stub newBlockingV2Stub(io.grpc.Channel channel) {
io.grpc.stub.AbstractStub.StubFactory<WebhooksBlockingV2Stub> factory =
new io.grpc.stub.AbstractStub.StubFactory<WebhooksBlockingV2Stub>() {
@java.lang.Override
public WebhooksBlockingV2Stub newStub(
io.grpc.Channel channel, io.grpc.CallOptions callOptions) {
return new WebhooksBlockingV2Stub(channel, callOptions);
}
};
return WebhooksBlockingV2Stub.newStub(factory, channel);
}
/**
* Creates a new blocking-style stub that supports unary and streaming output calls on the service
*/
public static WebhooksBlockingStub newBlockingStub(io.grpc.Channel channel) {
io.grpc.stub.AbstractStub.StubFactory<WebhooksBlockingStub> factory =
new io.grpc.stub.AbstractStub.StubFactory<WebhooksBlockingStub>() {
@java.lang.Override
public WebhooksBlockingStub newStub(
io.grpc.Channel channel, io.grpc.CallOptions callOptions) {
return new WebhooksBlockingStub(channel, callOptions);
}
};
return WebhooksBlockingStub.newStub(factory, channel);
}
/** Creates a new ListenableFuture-style stub that supports unary calls on the service */
public static WebhooksFutureStub newFutureStub(io.grpc.Channel channel) {
io.grpc.stub.AbstractStub.StubFactory<WebhooksFutureStub> factory =
new io.grpc.stub.AbstractStub.StubFactory<WebhooksFutureStub>() {
@java.lang.Override
public WebhooksFutureStub newStub(
io.grpc.Channel channel, io.grpc.CallOptions callOptions) {
return new WebhooksFutureStub(channel, callOptions);
}
};
return WebhooksFutureStub.newStub(factory, channel);
}
/**
*
*
* <pre>
* Service for managing [Webhooks][google.cloud.dialogflow.cx.v3beta1.Webhook].
* </pre>
*/
public interface AsyncService {
/**
*
*
* <pre>
* Returns the list of all webhooks in the specified agent.
* </pre>
*/
default void listWebhooks(
com.google.cloud.dialogflow.cx.v3beta1.ListWebhooksRequest request,
io.grpc.stub.StreamObserver<com.google.cloud.dialogflow.cx.v3beta1.ListWebhooksResponse>
responseObserver) {
io.grpc.stub.ServerCalls.asyncUnimplementedUnaryCall(
getListWebhooksMethod(), responseObserver);
}
/**
*
*
* <pre>
* Retrieves the specified webhook.
* </pre>
*/
default void getWebhook(
com.google.cloud.dialogflow.cx.v3beta1.GetWebhookRequest request,
io.grpc.stub.StreamObserver<com.google.cloud.dialogflow.cx.v3beta1.Webhook>
responseObserver) {
io.grpc.stub.ServerCalls.asyncUnimplementedUnaryCall(getGetWebhookMethod(), responseObserver);
}
/**
*
*
* <pre>
* Creates a webhook in the specified agent.
* </pre>
*/
default void createWebhook(
com.google.cloud.dialogflow.cx.v3beta1.CreateWebhookRequest request,
io.grpc.stub.StreamObserver<com.google.cloud.dialogflow.cx.v3beta1.Webhook>
responseObserver) {
io.grpc.stub.ServerCalls.asyncUnimplementedUnaryCall(
getCreateWebhookMethod(), responseObserver);
}
/**
*
*
* <pre>
* Updates the specified webhook.
* </pre>
*/
default void updateWebhook(
com.google.cloud.dialogflow.cx.v3beta1.UpdateWebhookRequest request,
io.grpc.stub.StreamObserver<com.google.cloud.dialogflow.cx.v3beta1.Webhook>
responseObserver) {
io.grpc.stub.ServerCalls.asyncUnimplementedUnaryCall(
getUpdateWebhookMethod(), responseObserver);
}
/**
*
*
* <pre>
* Deletes the specified webhook.
* </pre>
*/
default void deleteWebhook(
com.google.cloud.dialogflow.cx.v3beta1.DeleteWebhookRequest request,
io.grpc.stub.StreamObserver<com.google.protobuf.Empty> responseObserver) {
io.grpc.stub.ServerCalls.asyncUnimplementedUnaryCall(
getDeleteWebhookMethod(), responseObserver);
}
}
/**
* Base class for the server implementation of the service Webhooks.
*
* <pre>
* Service for managing [Webhooks][google.cloud.dialogflow.cx.v3beta1.Webhook].
* </pre>
*/
public abstract static class WebhooksImplBase implements io.grpc.BindableService, AsyncService {
@java.lang.Override
public final io.grpc.ServerServiceDefinition bindService() {
return WebhooksGrpc.bindService(this);
}
}
/**
* A stub to allow clients to do asynchronous rpc calls to service Webhooks.
*
* <pre>
* Service for managing [Webhooks][google.cloud.dialogflow.cx.v3beta1.Webhook].
* </pre>
*/
public static final class WebhooksStub extends io.grpc.stub.AbstractAsyncStub<WebhooksStub> {
private WebhooksStub(io.grpc.Channel channel, io.grpc.CallOptions callOptions) {
super(channel, callOptions);
}
@java.lang.Override
protected WebhooksStub build(io.grpc.Channel channel, io.grpc.CallOptions callOptions) {
return new WebhooksStub(channel, callOptions);
}
/**
*
*
* <pre>
* Returns the list of all webhooks in the specified agent.
* </pre>
*/
public void listWebhooks(
com.google.cloud.dialogflow.cx.v3beta1.ListWebhooksRequest request,
io.grpc.stub.StreamObserver<com.google.cloud.dialogflow.cx.v3beta1.ListWebhooksResponse>
responseObserver) {
io.grpc.stub.ClientCalls.asyncUnaryCall(
getChannel().newCall(getListWebhooksMethod(), getCallOptions()),
request,
responseObserver);
}
/**
*
*
* <pre>
* Retrieves the specified webhook.
* </pre>
*/
public void getWebhook(
com.google.cloud.dialogflow.cx.v3beta1.GetWebhookRequest request,
io.grpc.stub.StreamObserver<com.google.cloud.dialogflow.cx.v3beta1.Webhook>
responseObserver) {
io.grpc.stub.ClientCalls.asyncUnaryCall(
getChannel().newCall(getGetWebhookMethod(), getCallOptions()), request, responseObserver);
}
/**
*
*
* <pre>
* Creates a webhook in the specified agent.
* </pre>
*/
public void createWebhook(
com.google.cloud.dialogflow.cx.v3beta1.CreateWebhookRequest request,
io.grpc.stub.StreamObserver<com.google.cloud.dialogflow.cx.v3beta1.Webhook>
responseObserver) {
io.grpc.stub.ClientCalls.asyncUnaryCall(
getChannel().newCall(getCreateWebhookMethod(), getCallOptions()),
request,
responseObserver);
}
/**
*
*
* <pre>
* Updates the specified webhook.
* </pre>
*/
public void updateWebhook(
com.google.cloud.dialogflow.cx.v3beta1.UpdateWebhookRequest request,
io.grpc.stub.StreamObserver<com.google.cloud.dialogflow.cx.v3beta1.Webhook>
responseObserver) {
io.grpc.stub.ClientCalls.asyncUnaryCall(
getChannel().newCall(getUpdateWebhookMethod(), getCallOptions()),
request,
responseObserver);
}
/**
*
*
* <pre>
* Deletes the specified webhook.
* </pre>
*/
public void deleteWebhook(
com.google.cloud.dialogflow.cx.v3beta1.DeleteWebhookRequest request,
io.grpc.stub.StreamObserver<com.google.protobuf.Empty> responseObserver) {
io.grpc.stub.ClientCalls.asyncUnaryCall(
getChannel().newCall(getDeleteWebhookMethod(), getCallOptions()),
request,
responseObserver);
}
}
/**
* A stub to allow clients to do synchronous rpc calls to service Webhooks.
*
* <pre>
* Service for managing [Webhooks][google.cloud.dialogflow.cx.v3beta1.Webhook].
* </pre>
*/
public static final class WebhooksBlockingV2Stub
extends io.grpc.stub.AbstractBlockingStub<WebhooksBlockingV2Stub> {
private WebhooksBlockingV2Stub(io.grpc.Channel channel, io.grpc.CallOptions callOptions) {
super(channel, callOptions);
}
@java.lang.Override
protected WebhooksBlockingV2Stub build(
io.grpc.Channel channel, io.grpc.CallOptions callOptions) {
return new WebhooksBlockingV2Stub(channel, callOptions);
}
/**
*
*
* <pre>
* Returns the list of all webhooks in the specified agent.
* </pre>
*/
public com.google.cloud.dialogflow.cx.v3beta1.ListWebhooksResponse listWebhooks(
com.google.cloud.dialogflow.cx.v3beta1.ListWebhooksRequest request) {
return io.grpc.stub.ClientCalls.blockingUnaryCall(
getChannel(), getListWebhooksMethod(), getCallOptions(), request);
}
/**
*
*
* <pre>
* Retrieves the specified webhook.
* </pre>
*/
public com.google.cloud.dialogflow.cx.v3beta1.Webhook getWebhook(
com.google.cloud.dialogflow.cx.v3beta1.GetWebhookRequest request) {
return io.grpc.stub.ClientCalls.blockingUnaryCall(
getChannel(), getGetWebhookMethod(), getCallOptions(), request);
}
/**
*
*
* <pre>
* Creates a webhook in the specified agent.
* </pre>
*/
public com.google.cloud.dialogflow.cx.v3beta1.Webhook createWebhook(
com.google.cloud.dialogflow.cx.v3beta1.CreateWebhookRequest request) {
return io.grpc.stub.ClientCalls.blockingUnaryCall(
getChannel(), getCreateWebhookMethod(), getCallOptions(), request);
}
/**
*
*
* <pre>
* Updates the specified webhook.
* </pre>
*/
public com.google.cloud.dialogflow.cx.v3beta1.Webhook updateWebhook(
com.google.cloud.dialogflow.cx.v3beta1.UpdateWebhookRequest request) {
return io.grpc.stub.ClientCalls.blockingUnaryCall(
getChannel(), getUpdateWebhookMethod(), getCallOptions(), request);
}
/**
*
*
* <pre>
* Deletes the specified webhook.
* </pre>
*/
public com.google.protobuf.Empty deleteWebhook(
com.google.cloud.dialogflow.cx.v3beta1.DeleteWebhookRequest request) {
return io.grpc.stub.ClientCalls.blockingUnaryCall(
getChannel(), getDeleteWebhookMethod(), getCallOptions(), request);
}
}
/**
* A stub to allow clients to do limited synchronous rpc calls to service Webhooks.
*
* <pre>
* Service for managing [Webhooks][google.cloud.dialogflow.cx.v3beta1.Webhook].
* </pre>
*/
public static final class WebhooksBlockingStub
extends io.grpc.stub.AbstractBlockingStub<WebhooksBlockingStub> {
private WebhooksBlockingStub(io.grpc.Channel channel, io.grpc.CallOptions callOptions) {
super(channel, callOptions);
}
@java.lang.Override
protected WebhooksBlockingStub build(io.grpc.Channel channel, io.grpc.CallOptions callOptions) {
return new WebhooksBlockingStub(channel, callOptions);
}
/**
*
*
* <pre>
* Returns the list of all webhooks in the specified agent.
* </pre>
*/
public com.google.cloud.dialogflow.cx.v3beta1.ListWebhooksResponse listWebhooks(
com.google.cloud.dialogflow.cx.v3beta1.ListWebhooksRequest request) {
return io.grpc.stub.ClientCalls.blockingUnaryCall(
getChannel(), getListWebhooksMethod(), getCallOptions(), request);
}
/**
*
*
* <pre>
* Retrieves the specified webhook.
* </pre>
*/
public com.google.cloud.dialogflow.cx.v3beta1.Webhook getWebhook(
com.google.cloud.dialogflow.cx.v3beta1.GetWebhookRequest request) {
return io.grpc.stub.ClientCalls.blockingUnaryCall(
getChannel(), getGetWebhookMethod(), getCallOptions(), request);
}
/**
*
*
* <pre>
* Creates a webhook in the specified agent.
* </pre>
*/
public com.google.cloud.dialogflow.cx.v3beta1.Webhook createWebhook(
com.google.cloud.dialogflow.cx.v3beta1.CreateWebhookRequest request) {
return io.grpc.stub.ClientCalls.blockingUnaryCall(
getChannel(), getCreateWebhookMethod(), getCallOptions(), request);
}
/**
*
*
* <pre>
* Updates the specified webhook.
* </pre>
*/
public com.google.cloud.dialogflow.cx.v3beta1.Webhook updateWebhook(
com.google.cloud.dialogflow.cx.v3beta1.UpdateWebhookRequest request) {
return io.grpc.stub.ClientCalls.blockingUnaryCall(
getChannel(), getUpdateWebhookMethod(), getCallOptions(), request);
}
/**
*
*
* <pre>
* Deletes the specified webhook.
* </pre>
*/
public com.google.protobuf.Empty deleteWebhook(
com.google.cloud.dialogflow.cx.v3beta1.DeleteWebhookRequest request) {
return io.grpc.stub.ClientCalls.blockingUnaryCall(
getChannel(), getDeleteWebhookMethod(), getCallOptions(), request);
}
}
/**
* A stub to allow clients to do ListenableFuture-style rpc calls to service Webhooks.
*
* <pre>
* Service for managing [Webhooks][google.cloud.dialogflow.cx.v3beta1.Webhook].
* </pre>
*/
public static final class WebhooksFutureStub
extends io.grpc.stub.AbstractFutureStub<WebhooksFutureStub> {
private WebhooksFutureStub(io.grpc.Channel channel, io.grpc.CallOptions callOptions) {
super(channel, callOptions);
}
@java.lang.Override
protected WebhooksFutureStub build(io.grpc.Channel channel, io.grpc.CallOptions callOptions) {
return new WebhooksFutureStub(channel, callOptions);
}
/**
*
*
* <pre>
* Returns the list of all webhooks in the specified agent.
* </pre>
*/
public com.google.common.util.concurrent.ListenableFuture<
com.google.cloud.dialogflow.cx.v3beta1.ListWebhooksResponse>
listWebhooks(com.google.cloud.dialogflow.cx.v3beta1.ListWebhooksRequest request) {
return io.grpc.stub.ClientCalls.futureUnaryCall(
getChannel().newCall(getListWebhooksMethod(), getCallOptions()), request);
}
/**
*
*
* <pre>
* Retrieves the specified webhook.
* </pre>
*/
public com.google.common.util.concurrent.ListenableFuture<
com.google.cloud.dialogflow.cx.v3beta1.Webhook>
getWebhook(com.google.cloud.dialogflow.cx.v3beta1.GetWebhookRequest request) {
return io.grpc.stub.ClientCalls.futureUnaryCall(
getChannel().newCall(getGetWebhookMethod(), getCallOptions()), request);
}
/**
*
*
* <pre>
* Creates a webhook in the specified agent.
* </pre>
*/
public com.google.common.util.concurrent.ListenableFuture<
com.google.cloud.dialogflow.cx.v3beta1.Webhook>
createWebhook(com.google.cloud.dialogflow.cx.v3beta1.CreateWebhookRequest request) {
return io.grpc.stub.ClientCalls.futureUnaryCall(
getChannel().newCall(getCreateWebhookMethod(), getCallOptions()), request);
}
/**
*
*
* <pre>
* Updates the specified webhook.
* </pre>
*/
public com.google.common.util.concurrent.ListenableFuture<
com.google.cloud.dialogflow.cx.v3beta1.Webhook>
updateWebhook(com.google.cloud.dialogflow.cx.v3beta1.UpdateWebhookRequest request) {
return io.grpc.stub.ClientCalls.futureUnaryCall(
getChannel().newCall(getUpdateWebhookMethod(), getCallOptions()), request);
}
/**
*
*
* <pre>
* Deletes the specified webhook.
* </pre>
*/
public com.google.common.util.concurrent.ListenableFuture<com.google.protobuf.Empty>
deleteWebhook(com.google.cloud.dialogflow.cx.v3beta1.DeleteWebhookRequest request) {
return io.grpc.stub.ClientCalls.futureUnaryCall(
getChannel().newCall(getDeleteWebhookMethod(), getCallOptions()), request);
}
}
private static final int METHODID_LIST_WEBHOOKS = 0;
private static final int METHODID_GET_WEBHOOK = 1;
private static final int METHODID_CREATE_WEBHOOK = 2;
private static final int METHODID_UPDATE_WEBHOOK = 3;
private static final int METHODID_DELETE_WEBHOOK = 4;
private static final class MethodHandlers<Req, Resp>
implements io.grpc.stub.ServerCalls.UnaryMethod<Req, Resp>,
io.grpc.stub.ServerCalls.ServerStreamingMethod<Req, Resp>,
io.grpc.stub.ServerCalls.ClientStreamingMethod<Req, Resp>,
io.grpc.stub.ServerCalls.BidiStreamingMethod<Req, Resp> {
private final AsyncService serviceImpl;
private final int methodId;
MethodHandlers(AsyncService serviceImpl, int methodId) {
this.serviceImpl = serviceImpl;
this.methodId = methodId;
}
@java.lang.Override
@java.lang.SuppressWarnings("unchecked")
public void invoke(Req request, io.grpc.stub.StreamObserver<Resp> responseObserver) {
switch (methodId) {
case METHODID_LIST_WEBHOOKS:
serviceImpl.listWebhooks(
(com.google.cloud.dialogflow.cx.v3beta1.ListWebhooksRequest) request,
(io.grpc.stub.StreamObserver<
com.google.cloud.dialogflow.cx.v3beta1.ListWebhooksResponse>)
responseObserver);
break;
case METHODID_GET_WEBHOOK:
serviceImpl.getWebhook(
(com.google.cloud.dialogflow.cx.v3beta1.GetWebhookRequest) request,
(io.grpc.stub.StreamObserver<com.google.cloud.dialogflow.cx.v3beta1.Webhook>)
responseObserver);
break;
case METHODID_CREATE_WEBHOOK:
serviceImpl.createWebhook(
(com.google.cloud.dialogflow.cx.v3beta1.CreateWebhookRequest) request,
(io.grpc.stub.StreamObserver<com.google.cloud.dialogflow.cx.v3beta1.Webhook>)
responseObserver);
break;
case METHODID_UPDATE_WEBHOOK:
serviceImpl.updateWebhook(
(com.google.cloud.dialogflow.cx.v3beta1.UpdateWebhookRequest) request,
(io.grpc.stub.StreamObserver<com.google.cloud.dialogflow.cx.v3beta1.Webhook>)
responseObserver);
break;
case METHODID_DELETE_WEBHOOK:
serviceImpl.deleteWebhook(
(com.google.cloud.dialogflow.cx.v3beta1.DeleteWebhookRequest) request,
(io.grpc.stub.StreamObserver<com.google.protobuf.Empty>) responseObserver);
break;
default:
throw new AssertionError();
}
}
@java.lang.Override
@java.lang.SuppressWarnings("unchecked")
public io.grpc.stub.StreamObserver<Req> invoke(
io.grpc.stub.StreamObserver<Resp> responseObserver) {
switch (methodId) {
default:
throw new AssertionError();
}
}
}
public static final io.grpc.ServerServiceDefinition bindService(AsyncService service) {
return io.grpc.ServerServiceDefinition.builder(getServiceDescriptor())
.addMethod(
getListWebhooksMethod(),
io.grpc.stub.ServerCalls.asyncUnaryCall(
new MethodHandlers<
com.google.cloud.dialogflow.cx.v3beta1.ListWebhooksRequest,
com.google.cloud.dialogflow.cx.v3beta1.ListWebhooksResponse>(
service, METHODID_LIST_WEBHOOKS)))
.addMethod(
getGetWebhookMethod(),
io.grpc.stub.ServerCalls.asyncUnaryCall(
new MethodHandlers<
com.google.cloud.dialogflow.cx.v3beta1.GetWebhookRequest,
com.google.cloud.dialogflow.cx.v3beta1.Webhook>(service, METHODID_GET_WEBHOOK)))
.addMethod(
getCreateWebhookMethod(),
io.grpc.stub.ServerCalls.asyncUnaryCall(
new MethodHandlers<
com.google.cloud.dialogflow.cx.v3beta1.CreateWebhookRequest,
com.google.cloud.dialogflow.cx.v3beta1.Webhook>(
service, METHODID_CREATE_WEBHOOK)))
.addMethod(
getUpdateWebhookMethod(),
io.grpc.stub.ServerCalls.asyncUnaryCall(
new MethodHandlers<
com.google.cloud.dialogflow.cx.v3beta1.UpdateWebhookRequest,
com.google.cloud.dialogflow.cx.v3beta1.Webhook>(
service, METHODID_UPDATE_WEBHOOK)))
.addMethod(
getDeleteWebhookMethod(),
io.grpc.stub.ServerCalls.asyncUnaryCall(
new MethodHandlers<
com.google.cloud.dialogflow.cx.v3beta1.DeleteWebhookRequest,
com.google.protobuf.Empty>(service, METHODID_DELETE_WEBHOOK)))
.build();
}
private abstract static class WebhooksBaseDescriptorSupplier
implements io.grpc.protobuf.ProtoFileDescriptorSupplier,
io.grpc.protobuf.ProtoServiceDescriptorSupplier {
WebhooksBaseDescriptorSupplier() {}
@java.lang.Override
public com.google.protobuf.Descriptors.FileDescriptor getFileDescriptor() {
return com.google.cloud.dialogflow.cx.v3beta1.WebhookProto.getDescriptor();
}
@java.lang.Override
public com.google.protobuf.Descriptors.ServiceDescriptor getServiceDescriptor() {
return getFileDescriptor().findServiceByName("Webhooks");
}
}
private static final class WebhooksFileDescriptorSupplier extends WebhooksBaseDescriptorSupplier {
WebhooksFileDescriptorSupplier() {}
}
private static final class WebhooksMethodDescriptorSupplier extends WebhooksBaseDescriptorSupplier
implements io.grpc.protobuf.ProtoMethodDescriptorSupplier {
private final java.lang.String methodName;
WebhooksMethodDescriptorSupplier(java.lang.String methodName) {
this.methodName = methodName;
}
@java.lang.Override
public com.google.protobuf.Descriptors.MethodDescriptor getMethodDescriptor() {
return getServiceDescriptor().findMethodByName(methodName);
}
}
private static volatile io.grpc.ServiceDescriptor serviceDescriptor;
public static io.grpc.ServiceDescriptor getServiceDescriptor() {
io.grpc.ServiceDescriptor result = serviceDescriptor;
if (result == null) {
synchronized (WebhooksGrpc.class) {
result = serviceDescriptor;
if (result == null) {
serviceDescriptor =
result =
io.grpc.ServiceDescriptor.newBuilder(SERVICE_NAME)
.setSchemaDescriptor(new WebhooksFileDescriptorSupplier())
.addMethod(getListWebhooksMethod())
.addMethod(getGetWebhookMethod())
.addMethod(getCreateWebhookMethod())
.addMethod(getUpdateWebhookMethod())
.addMethod(getDeleteWebhookMethod())
.build();
}
}
}
return result;
}
}
|
apache/grails-core | 36,321 | grails-web-common/src/main/groovy/org/grails/web/json/parser/JsonParserTokenManager.java | /* Generated By:JavaCC: Do not edit this line. JsonParserTokenManager.java */
package org.grails.web.json.parser;
/** Token Manager. */
@SuppressWarnings("unused")
public class JsonParserTokenManager implements JsonParserConstants
{
/** Debug output. */
public java.io.PrintStream debugStream = System.out;
/** Set debug output. */
public void setDebugStream(java.io.PrintStream ds) { debugStream = ds; }
private int jjMoveStringLiteralDfa0_3()
{
return jjMoveNfa_3(0, 0);
}
private int jjMoveNfa_3(int startState, int curPos)
{
int startsAt = 0;
jjnewStateCnt = 4;
int i = 1;
jjstateSet[0] = startState;
int kind = 0x7fffffff;
for (;;)
{
if (++jjround == 0x7fffffff)
ReInitRounds();
if (curChar < 64)
{
long l = 1L << curChar;
do
{
switch (jjstateSet[--i])
{
case 0:
if ((0x3ff000000000000L & l) != 0L)
jjstateSet[jjnewStateCnt++] = 1;
break;
case 1:
if ((0x3ff000000000000L & l) != 0L)
jjstateSet[jjnewStateCnt++] = 2;
break;
case 2:
if ((0x3ff000000000000L & l) != 0L)
jjstateSet[jjnewStateCnt++] = 3;
break;
case 3:
if ((0x3ff000000000000L & l) != 0L && kind > 19)
kind = 19;
break;
default: break;
}
} while (i != startsAt);
}
else if (curChar < 128)
{
long l = 1L << (curChar & 077);
do
{
switch (jjstateSet[--i])
{
case 0:
if ((0x7e0000007eL & l) != 0L)
jjstateSet[jjnewStateCnt++] = 1;
break;
case 1:
if ((0x7e0000007eL & l) != 0L)
jjstateSet[jjnewStateCnt++] = 2;
break;
case 2:
if ((0x7e0000007eL & l) != 0L)
jjstateSet[jjnewStateCnt++] = 3;
break;
case 3:
if ((0x7e0000007eL & l) != 0L && kind > 19)
kind = 19;
break;
default: break;
}
} while (i != startsAt);
}
else
{
int hiByte = (curChar >> 8);
int i1 = hiByte >> 6;
long l1 = 1L << (hiByte & 077);
int i2 = (curChar & 0xff) >> 6;
long l2 = 1L << (curChar & 077);
do
{
switch (jjstateSet[--i])
{
default: break;
}
} while (i != startsAt);
}
if (kind != 0x7fffffff)
{
jjmatchedKind = kind;
jjmatchedPos = curPos;
kind = 0x7fffffff;
}
++curPos;
if ((i = jjnewStateCnt) == (startsAt = 4 - (jjnewStateCnt = startsAt)))
return curPos;
try { curChar = input_stream.readChar(); }
catch (java.io.IOException e) { return curPos; }
}
}
private final int jjStopStringLiteralDfa_0(int pos, long active0)
{
switch (pos)
{
case 0:
if ((active0 & 0x1000L) != 0L)
return 31;
return -1;
default:
return -1;
}
}
private final int jjStartNfa_0(int pos, long active0)
{
return jjMoveNfa_0(jjStopStringLiteralDfa_0(pos, active0), pos + 1);
}
private int jjStopAtPos(int pos, int kind)
{
jjmatchedKind = kind;
jjmatchedPos = pos;
return pos + 1;
}
private int jjMoveStringLiteralDfa0_0()
{
switch (curChar)
{
case 34:
return jjStartNfaWithStates_0(0, 12, 31);
case 44:
return jjStopAtPos(0, 23);
case 45:
return jjStopAtPos(0, 29);
case 46:
return jjStopAtPos(0, 30);
case 58:
return jjStopAtPos(0, 24);
case 91:
return jjStopAtPos(0, 25);
case 93:
return jjStopAtPos(0, 26);
case 102:
return jjMoveStringLiteralDfa1_0(0x10000000L);
case 110:
return jjMoveStringLiteralDfa1_0(0x400000L);
case 116:
return jjMoveStringLiteralDfa1_0(0x8000000L);
case 123:
return jjStopAtPos(0, 20);
case 125:
return jjStopAtPos(0, 21);
default:
return jjMoveNfa_0(0, 0);
}
}
private int jjMoveStringLiteralDfa1_0(long active0)
{
try { curChar = input_stream.readChar(); }
catch (java.io.IOException e) {
jjStopStringLiteralDfa_0(0, active0);
return 1;
}
switch (curChar)
{
case 97:
return jjMoveStringLiteralDfa2_0(active0, 0x10000000L);
case 114:
return jjMoveStringLiteralDfa2_0(active0, 0x8000000L);
case 117:
return jjMoveStringLiteralDfa2_0(active0, 0x400000L);
default:
break;
}
return jjStartNfa_0(0, active0);
}
private int jjMoveStringLiteralDfa2_0(long old0, long active0)
{
if (((active0 &= old0)) == 0L)
return jjStartNfa_0(0, old0);
try { curChar = input_stream.readChar(); }
catch (java.io.IOException e) {
jjStopStringLiteralDfa_0(1, active0);
return 2;
}
switch (curChar)
{
case 108:
return jjMoveStringLiteralDfa3_0(active0, 0x10400000L);
case 117:
return jjMoveStringLiteralDfa3_0(active0, 0x8000000L);
default:
break;
}
return jjStartNfa_0(1, active0);
}
private int jjMoveStringLiteralDfa3_0(long old0, long active0)
{
if (((active0 &= old0)) == 0L)
return jjStartNfa_0(1, old0);
try { curChar = input_stream.readChar(); }
catch (java.io.IOException e) {
jjStopStringLiteralDfa_0(2, active0);
return 3;
}
switch (curChar)
{
case 101:
if ((active0 & 0x8000000L) != 0L)
return jjStopAtPos(3, 27);
break;
case 108:
if ((active0 & 0x400000L) != 0L)
return jjStopAtPos(3, 22);
break;
case 115:
return jjMoveStringLiteralDfa4_0(active0, 0x10000000L);
default:
break;
}
return jjStartNfa_0(2, active0);
}
private int jjMoveStringLiteralDfa4_0(long old0, long active0)
{
if (((active0 &= old0)) == 0L)
return jjStartNfa_0(2, old0);
try { curChar = input_stream.readChar(); }
catch (java.io.IOException e) {
jjStopStringLiteralDfa_0(3, active0);
return 4;
}
switch (curChar)
{
case 101:
if ((active0 & 0x10000000L) != 0L)
return jjStopAtPos(4, 28);
break;
default:
break;
}
return jjStartNfa_0(3, active0);
}
private int jjStartNfaWithStates_0(int pos, int kind, int state)
{
jjmatchedKind = kind;
jjmatchedPos = pos;
try { curChar = input_stream.readChar(); }
catch (java.io.IOException e) { return pos + 1; }
return jjMoveNfa_0(state, pos + 1);
}
static final long[] jjbitVec0 = {
0xfffffffffffffffeL, 0xffffffffffffffffL, 0xffffffffffffffffL, 0xffffffffffffffffL
};
static final long[] jjbitVec2 = {
0x0L, 0x0L, 0xffffffffffffffffL, 0xffffffffffffffffL
};
private int jjMoveNfa_0(int startState, int curPos)
{
int startsAt = 0;
jjnewStateCnt = 31;
int i = 1;
jjstateSet[0] = startState;
int kind = 0x7fffffff;
for (;;)
{
if (++jjround == 0x7fffffff)
ReInitRounds();
if (curChar < 64)
{
long l = 1L << curChar;
do
{
switch (jjstateSet[--i])
{
case 31:
if ((0xfffffffbffffdbffL & l) != 0L)
jjCheckNAddStates(0, 3);
else if (curChar == 34)
{
if (kind > 11)
kind = 11;
}
break;
case 0:
if ((0x3ff000000000000L & l) != 0L)
{
if (kind > 6)
kind = 6;
jjCheckNAdd(2);
}
else if (curChar == 34)
jjCheckNAddStates(0, 3);
else if (curChar == 39)
jjCheckNAddStates(4, 7);
break;
case 1:
if ((0x280000000000L & l) != 0L && kind > 5)
kind = 5;
break;
case 2:
if ((0x3ff000000000000L & l) == 0L)
break;
if (kind > 6)
kind = 6;
jjCheckNAdd(2);
break;
case 4:
if ((0x3ff000000000000L & l) != 0L)
jjstateSet[jjnewStateCnt++] = 5;
break;
case 5:
if ((0x3ff000000000000L & l) != 0L)
jjstateSet[jjnewStateCnt++] = 6;
break;
case 6:
if ((0x3ff000000000000L & l) != 0L)
jjstateSet[jjnewStateCnt++] = 7;
break;
case 7:
if ((0x3ff000000000000L & l) != 0L && kind > 8)
kind = 8;
break;
case 9:
if (curChar == 39)
jjCheckNAddStates(4, 7);
break;
case 10:
if ((0xffffff7fffffdbffL & l) != 0L)
jjCheckNAddStates(4, 7);
break;
case 12:
if ((0x808400000000L & l) != 0L)
jjCheckNAddStates(4, 7);
break;
case 14:
if ((0x3ff000000000000L & l) != 0L)
jjstateSet[jjnewStateCnt++] = 15;
break;
case 15:
if ((0x3ff000000000000L & l) != 0L)
jjstateSet[jjnewStateCnt++] = 16;
break;
case 16:
if ((0x3ff000000000000L & l) != 0L)
jjstateSet[jjnewStateCnt++] = 17;
break;
case 17:
if ((0x3ff000000000000L & l) != 0L)
jjCheckNAddStates(4, 7);
break;
case 19:
if (curChar == 39 && kind > 10)
kind = 10;
break;
case 20:
if (curChar == 34)
jjCheckNAddStates(0, 3);
break;
case 21:
if ((0xfffffffbffffdbffL & l) != 0L)
jjCheckNAddStates(0, 3);
break;
case 23:
if ((0x808400000000L & l) != 0L)
jjCheckNAddStates(0, 3);
break;
case 25:
if ((0x3ff000000000000L & l) != 0L)
jjstateSet[jjnewStateCnt++] = 26;
break;
case 26:
if ((0x3ff000000000000L & l) != 0L)
jjstateSet[jjnewStateCnt++] = 27;
break;
case 27:
if ((0x3ff000000000000L & l) != 0L)
jjstateSet[jjnewStateCnt++] = 28;
break;
case 28:
if ((0x3ff000000000000L & l) != 0L)
jjCheckNAddStates(0, 3);
break;
case 30:
if (curChar == 34 && kind > 11)
kind = 11;
break;
default: break;
}
} while (i != startsAt);
}
else if (curChar < 128)
{
long l = 1L << (curChar & 077);
do
{
switch (jjstateSet[--i])
{
case 31:
if ((0xffffffffefffffffL & l) != 0L)
jjCheckNAddStates(0, 3);
else if (curChar == 92)
jjstateSet[jjnewStateCnt++] = 24;
if (curChar == 92)
jjstateSet[jjnewStateCnt++] = 23;
break;
case 0:
if ((0x2000000020L & l) != 0L)
{
if (kind > 5)
kind = 5;
jjstateSet[jjnewStateCnt++] = 1;
}
else if (curChar == 92)
jjstateSet[jjnewStateCnt++] = 3;
break;
case 3:
if (curChar == 117)
jjstateSet[jjnewStateCnt++] = 4;
break;
case 4:
if ((0x7e0000007eL & l) != 0L)
jjstateSet[jjnewStateCnt++] = 5;
break;
case 5:
if ((0x7e0000007eL & l) != 0L)
jjstateSet[jjnewStateCnt++] = 6;
break;
case 6:
if ((0x7e0000007eL & l) != 0L)
jjstateSet[jjnewStateCnt++] = 7;
break;
case 7:
if ((0x7e0000007eL & l) != 0L && kind > 8)
kind = 8;
break;
case 8:
if (curChar == 92)
jjstateSet[jjnewStateCnt++] = 3;
break;
case 10:
if ((0xffffffffefffffffL & l) != 0L)
jjCheckNAddStates(4, 7);
break;
case 11:
if (curChar == 92)
jjstateSet[jjnewStateCnt++] = 12;
break;
case 12:
if ((0x14404410000000L & l) != 0L)
jjCheckNAddStates(4, 7);
break;
case 13:
if (curChar == 117)
jjstateSet[jjnewStateCnt++] = 14;
break;
case 14:
if ((0x7e0000007eL & l) != 0L)
jjstateSet[jjnewStateCnt++] = 15;
break;
case 15:
if ((0x7e0000007eL & l) != 0L)
jjstateSet[jjnewStateCnt++] = 16;
break;
case 16:
if ((0x7e0000007eL & l) != 0L)
jjstateSet[jjnewStateCnt++] = 17;
break;
case 17:
if ((0x7e0000007eL & l) != 0L)
jjCheckNAddStates(4, 7);
break;
case 18:
if (curChar == 92)
jjstateSet[jjnewStateCnt++] = 13;
break;
case 21:
if ((0xffffffffefffffffL & l) != 0L)
jjCheckNAddStates(0, 3);
break;
case 22:
if (curChar == 92)
jjstateSet[jjnewStateCnt++] = 23;
break;
case 23:
if ((0x14404410000000L & l) != 0L)
jjCheckNAddStates(0, 3);
break;
case 24:
if (curChar == 117)
jjstateSet[jjnewStateCnt++] = 25;
break;
case 25:
if ((0x7e0000007eL & l) != 0L)
jjstateSet[jjnewStateCnt++] = 26;
break;
case 26:
if ((0x7e0000007eL & l) != 0L)
jjstateSet[jjnewStateCnt++] = 27;
break;
case 27:
if ((0x7e0000007eL & l) != 0L)
jjstateSet[jjnewStateCnt++] = 28;
break;
case 28:
if ((0x7e0000007eL & l) != 0L)
jjCheckNAddStates(0, 3);
break;
case 29:
if (curChar == 92)
jjstateSet[jjnewStateCnt++] = 24;
break;
default: break;
}
} while (i != startsAt);
}
else
{
int hiByte = (curChar >> 8);
int i1 = hiByte >> 6;
long l1 = 1L << (hiByte & 077);
int i2 = (curChar & 0xff) >> 6;
long l2 = 1L << (curChar & 077);
do
{
switch (jjstateSet[--i])
{
case 31:
case 21:
if (jjCanMove_0(hiByte, i1, i2, l1, l2))
jjCheckNAddStates(0, 3);
break;
case 10:
if (jjCanMove_0(hiByte, i1, i2, l1, l2))
jjAddStates(4, 7);
break;
default: break;
}
} while (i != startsAt);
}
if (kind != 0x7fffffff)
{
jjmatchedKind = kind;
jjmatchedPos = curPos;
kind = 0x7fffffff;
}
++curPos;
if ((i = jjnewStateCnt) == (startsAt = 31 - (jjnewStateCnt = startsAt)))
return curPos;
try { curChar = input_stream.readChar(); }
catch (java.io.IOException e) { return curPos; }
}
}
private final int jjStopStringLiteralDfa_2(int pos, long active0)
{
switch (pos)
{
default:
return -1;
}
}
private final int jjStartNfa_2(int pos, long active0)
{
return jjMoveNfa_2(jjStopStringLiteralDfa_2(pos, active0), pos + 1);
}
private int jjMoveStringLiteralDfa0_2()
{
switch (curChar)
{
case 117:
return jjStopAtPos(0, 17);
default:
return jjMoveNfa_2(0, 0);
}
}
private int jjMoveNfa_2(int startState, int curPos)
{
int startsAt = 0;
jjnewStateCnt = 1;
int i = 1;
jjstateSet[0] = startState;
int kind = 0x7fffffff;
for (;;)
{
if (++jjround == 0x7fffffff)
ReInitRounds();
if (curChar < 64)
{
long l = 1L << curChar;
do
{
switch (jjstateSet[--i])
{
case 0:
if ((0x800400000000L & l) != 0L)
kind = 16;
break;
default: break;
}
} while (i != startsAt);
}
else if (curChar < 128)
{
long l = 1L << (curChar & 077);
do
{
switch (jjstateSet[--i])
{
case 0:
if ((0x14404410000000L & l) != 0L)
kind = 16;
break;
default: break;
}
} while (i != startsAt);
}
else
{
int hiByte = (curChar >> 8);
int i1 = hiByte >> 6;
long l1 = 1L << (hiByte & 077);
int i2 = (curChar & 0xff) >> 6;
long l2 = 1L << (curChar & 077);
do
{
switch (jjstateSet[--i])
{
default: break;
}
} while (i != startsAt);
}
if (kind != 0x7fffffff)
{
jjmatchedKind = kind;
jjmatchedPos = curPos;
kind = 0x7fffffff;
}
++curPos;
if ((i = jjnewStateCnt) == (startsAt = 1 - (jjnewStateCnt = startsAt)))
return curPos;
try { curChar = input_stream.readChar(); }
catch (java.io.IOException e) { return curPos; }
}
}
private final int jjStopStringLiteralDfa_1(int pos, long active0)
{
switch (pos)
{
default:
return -1;
}
}
private final int jjStartNfa_1(int pos, long active0)
{
return jjMoveNfa_1(jjStopStringLiteralDfa_1(pos, active0), pos + 1);
}
private int jjMoveStringLiteralDfa0_1()
{
switch (curChar)
{
case 92:
return jjStopAtPos(0, 13);
default:
return jjMoveNfa_1(0, 0);
}
}
private int jjMoveNfa_1(int startState, int curPos)
{
int startsAt = 0;
jjnewStateCnt = 2;
int i = 1;
jjstateSet[0] = startState;
int kind = 0x7fffffff;
for (;;)
{
if (++jjround == 0x7fffffff)
ReInitRounds();
if (curChar < 64)
{
long l = 1L << curChar;
do
{
switch (jjstateSet[--i])
{
case 0:
if ((0xfffffffbffffffffL & l) != 0L)
{
if (kind > 15)
kind = 15;
}
else if (curChar == 34)
{
if (kind > 14)
kind = 14;
}
break;
case 1:
if ((0xfffffffbffffffffL & l) != 0L)
kind = 15;
break;
default: break;
}
} while (i != startsAt);
}
else if (curChar < 128)
{
long l = 1L << (curChar & 077);
do
{
switch (jjstateSet[--i])
{
case 0:
if ((0xffffffffefffffffL & l) != 0L)
kind = 15;
break;
default: break;
}
} while (i != startsAt);
}
else
{
int hiByte = (curChar >> 8);
int i1 = hiByte >> 6;
long l1 = 1L << (hiByte & 077);
int i2 = (curChar & 0xff) >> 6;
long l2 = 1L << (curChar & 077);
do
{
switch (jjstateSet[--i])
{
case 0:
if (jjCanMove_0(hiByte, i1, i2, l1, l2) && kind > 15)
kind = 15;
break;
default: break;
}
} while (i != startsAt);
}
if (kind != 0x7fffffff)
{
jjmatchedKind = kind;
jjmatchedPos = curPos;
kind = 0x7fffffff;
}
++curPos;
if ((i = jjnewStateCnt) == (startsAt = 2 - (jjnewStateCnt = startsAt)))
return curPos;
try { curChar = input_stream.readChar(); }
catch (java.io.IOException e) { return curPos; }
}
}
static final int[] jjnextStates = {
21, 22, 29, 30, 10, 11, 18, 19,
};
private static final boolean jjCanMove_0(int hiByte, int i1, int i2, long l1, long l2)
{
switch (hiByte)
{
case 0:
return ((jjbitVec2[i2] & l2) != 0L);
default:
if ((jjbitVec0[i1] & l1) != 0L)
return true;
return false;
}
}
/** Token literal values. */
public static final String[] jjstrLiteralImages = {
"", null, null, null, null, null, null, null, null, null, null, null, "\42",
null, null, null, null, null, null, null, "\173", "\175", "\156\165\154\154", "\54",
"\72", "\133", "\135", "\164\162\165\145", "\146\141\154\163\145", "\55", "\56", };
/** Lexer state names. */
public static final String[] lexStateNames = {
"DEFAULT",
"STRING_STATE",
"ESC_STATE",
"HEX_STATE",
};
/** Lex State array. */
public static final int[] jjnewLexState = {
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 1, 2, 0, -1, 1, 3, -1, 1, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1,
};
static final long[] jjtoToken = {
0x7ff9dd61L,
};
static final long[] jjtoSkip = {
0x1eL,
};
static final long[] jjtoMore = {
0x22000L,
};
protected SimpleCharStream input_stream;
private final int[] jjrounds = new int[31];
private final int[] jjstateSet = new int[62];
protected char curChar;
/** Constructor. */
public JsonParserTokenManager(SimpleCharStream stream) {
if (SimpleCharStream.staticFlag)
throw new Error("ERROR: Cannot use a static CharStream class with a non-static lexical analyzer.");
input_stream = stream;
}
/** Constructor. */
public JsonParserTokenManager(SimpleCharStream stream, int lexState) {
this(stream);
SwitchTo(lexState);
}
/** Reinitialise parser. */
public void ReInit(SimpleCharStream stream)
{
jjmatchedPos = jjnewStateCnt = 0;
curLexState = defaultLexState;
input_stream = stream;
ReInitRounds();
}
private void ReInitRounds()
{
int i;
jjround = 0x80000001;
for (i = 31; i-- > 0;)
jjrounds[i] = 0x80000000;
}
/** Reinitialise parser. */
public void ReInit(SimpleCharStream stream, int lexState)
{
ReInit(stream);
SwitchTo(lexState);
}
/** Switch to specified lex state. */
public void SwitchTo(int lexState)
{
if (lexState >= 4 || lexState < 0)
throw new TokenMgrError("Error: Ignoring invalid lexical state : " + lexState + ". State unchanged.", TokenMgrError.INVALID_LEXICAL_STATE);
curLexState = lexState;
}
protected Token jjFillToken()
{
final Token t;
final String curTokenImage;
final int beginLine;
final int endLine;
final int beginColumn;
final int endColumn;
String im = jjstrLiteralImages[jjmatchedKind];
curTokenImage = (im == null) ? input_stream.GetImage() : im;
beginLine = input_stream.getBeginLine();
beginColumn = input_stream.getBeginColumn();
endLine = input_stream.getEndLine();
endColumn = input_stream.getEndColumn();
t = Token.newToken(jjmatchedKind, curTokenImage);
t.beginLine = beginLine;
t.endLine = endLine;
t.beginColumn = beginColumn;
t.endColumn = endColumn;
return t;
}
int curLexState = 0;
int defaultLexState = 0;
int jjnewStateCnt;
int jjround;
int jjmatchedPos;
int jjmatchedKind;
/** Get the next Token. */
public Token getNextToken()
{
Token matchedToken;
int curPos = 0;
EOFLoop:
for (;;)
{
try
{
curChar = input_stream.BeginToken();
}
catch (java.io.IOException e)
{
jjmatchedKind = 0;
matchedToken = jjFillToken();
return matchedToken;
}
for (;;)
{
switch (curLexState)
{
case 0:
try {
input_stream.backup(0);
while (curChar <= 32 && (0x100002600L & (1L << curChar)) != 0L)
curChar = input_stream.BeginToken();
}
catch (java.io.IOException e1) { continue EOFLoop; }
jjmatchedKind = 0x7fffffff;
jjmatchedPos = 0;
curPos = jjMoveStringLiteralDfa0_0();
break;
case 1:
jjmatchedKind = 0x7fffffff;
jjmatchedPos = 0;
curPos = jjMoveStringLiteralDfa0_1();
break;
case 2:
jjmatchedKind = 0x7fffffff;
jjmatchedPos = 0;
curPos = jjMoveStringLiteralDfa0_2();
break;
case 3:
jjmatchedKind = 0x7fffffff;
jjmatchedPos = 0;
curPos = jjMoveStringLiteralDfa0_3();
break;
}
if (jjmatchedKind != 0x7fffffff)
{
if (jjmatchedPos + 1 < curPos)
input_stream.backup(curPos - jjmatchedPos - 1);
if ((jjtoToken[jjmatchedKind >> 6] & (1L << (jjmatchedKind & 077))) != 0L)
{
matchedToken = jjFillToken();
if (jjnewLexState[jjmatchedKind] != -1)
curLexState = jjnewLexState[jjmatchedKind];
return matchedToken;
}
else if ((jjtoSkip[jjmatchedKind >> 6] & (1L << (jjmatchedKind & 077))) != 0L)
{
if (jjnewLexState[jjmatchedKind] != -1)
curLexState = jjnewLexState[jjmatchedKind];
continue EOFLoop;
}
if (jjnewLexState[jjmatchedKind] != -1)
curLexState = jjnewLexState[jjmatchedKind];
curPos = 0;
jjmatchedKind = 0x7fffffff;
try {
curChar = input_stream.readChar();
continue;
}
catch (java.io.IOException e1) { /*ignored*/ }
}
int error_line = input_stream.getEndLine();
int error_column = input_stream.getEndColumn();
String error_after = null;
boolean EOFSeen = false;
try { input_stream.readChar(); input_stream.backup(1); }
catch (java.io.IOException e1) {
EOFSeen = true;
error_after = curPos <= 1 ? "" : input_stream.GetImage();
if (curChar == '\n' || curChar == '\r') {
error_line++;
error_column = 0;
}
else
error_column++;
}
if (!EOFSeen) {
input_stream.backup(1);
error_after = curPos <= 1 ? "" : input_stream.GetImage();
}
throw new TokenMgrError(EOFSeen, curLexState, error_line, error_column, error_after, curChar, TokenMgrError.LEXICAL_ERROR);
}
}
}
private void jjCheckNAdd(int state)
{
if (jjrounds[state] != jjround)
{
jjstateSet[jjnewStateCnt++] = state;
jjrounds[state] = jjround;
}
}
private void jjAddStates(int start, int end)
{
do {
jjstateSet[jjnewStateCnt++] = jjnextStates[start];
} while (start++ != end);
}
private void jjCheckNAddTwoStates(int state1, int state2)
{
jjCheckNAdd(state1);
jjCheckNAdd(state2);
}
private void jjCheckNAddStates(int start, int end)
{
do {
jjCheckNAdd(jjnextStates[start]);
} while (start++ != end);
}
}
|
googleapis/google-cloud-java | 36,202 | java-securitycenter/proto-google-cloud-securitycenter-v2/src/main/java/com/google/cloud/securitycenter/v2/Cwe.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/securitycenter/v2/vulnerability.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.securitycenter.v2;
/**
*
*
* <pre>
* CWE stands for Common Weakness Enumeration. Information about this weakness,
* as described by [CWE](https://cwe.mitre.org/).
* </pre>
*
* Protobuf type {@code google.cloud.securitycenter.v2.Cwe}
*/
public final class Cwe extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.securitycenter.v2.Cwe)
CweOrBuilder {
private static final long serialVersionUID = 0L;
// Use Cwe.newBuilder() to construct.
private Cwe(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private Cwe() {
id_ = "";
references_ = java.util.Collections.emptyList();
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new Cwe();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.securitycenter.v2.VulnerabilityProto
.internal_static_google_cloud_securitycenter_v2_Cwe_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.securitycenter.v2.VulnerabilityProto
.internal_static_google_cloud_securitycenter_v2_Cwe_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.securitycenter.v2.Cwe.class,
com.google.cloud.securitycenter.v2.Cwe.Builder.class);
}
public static final int ID_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private volatile java.lang.Object id_ = "";
/**
*
*
* <pre>
* The CWE identifier, e.g. CWE-94
* </pre>
*
* <code>string id = 1;</code>
*
* @return The id.
*/
@java.lang.Override
public java.lang.String getId() {
java.lang.Object ref = id_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
id_ = s;
return s;
}
}
/**
*
*
* <pre>
* The CWE identifier, e.g. CWE-94
* </pre>
*
* <code>string id = 1;</code>
*
* @return The bytes for id.
*/
@java.lang.Override
public com.google.protobuf.ByteString getIdBytes() {
java.lang.Object ref = id_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
id_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int REFERENCES_FIELD_NUMBER = 2;
@SuppressWarnings("serial")
private java.util.List<com.google.cloud.securitycenter.v2.Reference> references_;
/**
*
*
* <pre>
* Any reference to the details on the CWE, for example,
* https://cwe.mitre.org/data/definitions/94.html
* </pre>
*
* <code>repeated .google.cloud.securitycenter.v2.Reference references = 2;</code>
*/
@java.lang.Override
public java.util.List<com.google.cloud.securitycenter.v2.Reference> getReferencesList() {
return references_;
}
/**
*
*
* <pre>
* Any reference to the details on the CWE, for example,
* https://cwe.mitre.org/data/definitions/94.html
* </pre>
*
* <code>repeated .google.cloud.securitycenter.v2.Reference references = 2;</code>
*/
@java.lang.Override
public java.util.List<? extends com.google.cloud.securitycenter.v2.ReferenceOrBuilder>
getReferencesOrBuilderList() {
return references_;
}
/**
*
*
* <pre>
* Any reference to the details on the CWE, for example,
* https://cwe.mitre.org/data/definitions/94.html
* </pre>
*
* <code>repeated .google.cloud.securitycenter.v2.Reference references = 2;</code>
*/
@java.lang.Override
public int getReferencesCount() {
return references_.size();
}
/**
*
*
* <pre>
* Any reference to the details on the CWE, for example,
* https://cwe.mitre.org/data/definitions/94.html
* </pre>
*
* <code>repeated .google.cloud.securitycenter.v2.Reference references = 2;</code>
*/
@java.lang.Override
public com.google.cloud.securitycenter.v2.Reference getReferences(int index) {
return references_.get(index);
}
/**
*
*
* <pre>
* Any reference to the details on the CWE, for example,
* https://cwe.mitre.org/data/definitions/94.html
* </pre>
*
* <code>repeated .google.cloud.securitycenter.v2.Reference references = 2;</code>
*/
@java.lang.Override
public com.google.cloud.securitycenter.v2.ReferenceOrBuilder getReferencesOrBuilder(int index) {
return references_.get(index);
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(id_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 1, id_);
}
for (int i = 0; i < references_.size(); i++) {
output.writeMessage(2, references_.get(i));
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(id_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, id_);
}
for (int i = 0; i < references_.size(); i++) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, references_.get(i));
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.securitycenter.v2.Cwe)) {
return super.equals(obj);
}
com.google.cloud.securitycenter.v2.Cwe other = (com.google.cloud.securitycenter.v2.Cwe) obj;
if (!getId().equals(other.getId())) return false;
if (!getReferencesList().equals(other.getReferencesList())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + ID_FIELD_NUMBER;
hash = (53 * hash) + getId().hashCode();
if (getReferencesCount() > 0) {
hash = (37 * hash) + REFERENCES_FIELD_NUMBER;
hash = (53 * hash) + getReferencesList().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.securitycenter.v2.Cwe parseFrom(java.nio.ByteBuffer data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.securitycenter.v2.Cwe parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.securitycenter.v2.Cwe parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.securitycenter.v2.Cwe parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.securitycenter.v2.Cwe parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.securitycenter.v2.Cwe parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.securitycenter.v2.Cwe parseFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.securitycenter.v2.Cwe parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.securitycenter.v2.Cwe parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.securitycenter.v2.Cwe parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.securitycenter.v2.Cwe parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.securitycenter.v2.Cwe parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.cloud.securitycenter.v2.Cwe prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* CWE stands for Common Weakness Enumeration. Information about this weakness,
* as described by [CWE](https://cwe.mitre.org/).
* </pre>
*
* Protobuf type {@code google.cloud.securitycenter.v2.Cwe}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.securitycenter.v2.Cwe)
com.google.cloud.securitycenter.v2.CweOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.securitycenter.v2.VulnerabilityProto
.internal_static_google_cloud_securitycenter_v2_Cwe_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.securitycenter.v2.VulnerabilityProto
.internal_static_google_cloud_securitycenter_v2_Cwe_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.securitycenter.v2.Cwe.class,
com.google.cloud.securitycenter.v2.Cwe.Builder.class);
}
// Construct using com.google.cloud.securitycenter.v2.Cwe.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
id_ = "";
if (referencesBuilder_ == null) {
references_ = java.util.Collections.emptyList();
} else {
references_ = null;
referencesBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000002);
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.securitycenter.v2.VulnerabilityProto
.internal_static_google_cloud_securitycenter_v2_Cwe_descriptor;
}
@java.lang.Override
public com.google.cloud.securitycenter.v2.Cwe getDefaultInstanceForType() {
return com.google.cloud.securitycenter.v2.Cwe.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.securitycenter.v2.Cwe build() {
com.google.cloud.securitycenter.v2.Cwe result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.securitycenter.v2.Cwe buildPartial() {
com.google.cloud.securitycenter.v2.Cwe result =
new com.google.cloud.securitycenter.v2.Cwe(this);
buildPartialRepeatedFields(result);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartialRepeatedFields(com.google.cloud.securitycenter.v2.Cwe result) {
if (referencesBuilder_ == null) {
if (((bitField0_ & 0x00000002) != 0)) {
references_ = java.util.Collections.unmodifiableList(references_);
bitField0_ = (bitField0_ & ~0x00000002);
}
result.references_ = references_;
} else {
result.references_ = referencesBuilder_.build();
}
}
private void buildPartial0(com.google.cloud.securitycenter.v2.Cwe result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.id_ = id_;
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.securitycenter.v2.Cwe) {
return mergeFrom((com.google.cloud.securitycenter.v2.Cwe) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.securitycenter.v2.Cwe other) {
if (other == com.google.cloud.securitycenter.v2.Cwe.getDefaultInstance()) return this;
if (!other.getId().isEmpty()) {
id_ = other.id_;
bitField0_ |= 0x00000001;
onChanged();
}
if (referencesBuilder_ == null) {
if (!other.references_.isEmpty()) {
if (references_.isEmpty()) {
references_ = other.references_;
bitField0_ = (bitField0_ & ~0x00000002);
} else {
ensureReferencesIsMutable();
references_.addAll(other.references_);
}
onChanged();
}
} else {
if (!other.references_.isEmpty()) {
if (referencesBuilder_.isEmpty()) {
referencesBuilder_.dispose();
referencesBuilder_ = null;
references_ = other.references_;
bitField0_ = (bitField0_ & ~0x00000002);
referencesBuilder_ =
com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders
? getReferencesFieldBuilder()
: null;
} else {
referencesBuilder_.addAllMessages(other.references_);
}
}
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
id_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000001;
break;
} // case 10
case 18:
{
com.google.cloud.securitycenter.v2.Reference m =
input.readMessage(
com.google.cloud.securitycenter.v2.Reference.parser(), extensionRegistry);
if (referencesBuilder_ == null) {
ensureReferencesIsMutable();
references_.add(m);
} else {
referencesBuilder_.addMessage(m);
}
break;
} // case 18
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.lang.Object id_ = "";
/**
*
*
* <pre>
* The CWE identifier, e.g. CWE-94
* </pre>
*
* <code>string id = 1;</code>
*
* @return The id.
*/
public java.lang.String getId() {
java.lang.Object ref = id_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
id_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* The CWE identifier, e.g. CWE-94
* </pre>
*
* <code>string id = 1;</code>
*
* @return The bytes for id.
*/
public com.google.protobuf.ByteString getIdBytes() {
java.lang.Object ref = id_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
id_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* The CWE identifier, e.g. CWE-94
* </pre>
*
* <code>string id = 1;</code>
*
* @param value The id to set.
* @return This builder for chaining.
*/
public Builder setId(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
id_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* The CWE identifier, e.g. CWE-94
* </pre>
*
* <code>string id = 1;</code>
*
* @return This builder for chaining.
*/
public Builder clearId() {
id_ = getDefaultInstance().getId();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
return this;
}
/**
*
*
* <pre>
* The CWE identifier, e.g. CWE-94
* </pre>
*
* <code>string id = 1;</code>
*
* @param value The bytes for id to set.
* @return This builder for chaining.
*/
public Builder setIdBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
id_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
private java.util.List<com.google.cloud.securitycenter.v2.Reference> references_ =
java.util.Collections.emptyList();
private void ensureReferencesIsMutable() {
if (!((bitField0_ & 0x00000002) != 0)) {
references_ =
new java.util.ArrayList<com.google.cloud.securitycenter.v2.Reference>(references_);
bitField0_ |= 0x00000002;
}
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.securitycenter.v2.Reference,
com.google.cloud.securitycenter.v2.Reference.Builder,
com.google.cloud.securitycenter.v2.ReferenceOrBuilder>
referencesBuilder_;
/**
*
*
* <pre>
* Any reference to the details on the CWE, for example,
* https://cwe.mitre.org/data/definitions/94.html
* </pre>
*
* <code>repeated .google.cloud.securitycenter.v2.Reference references = 2;</code>
*/
public java.util.List<com.google.cloud.securitycenter.v2.Reference> getReferencesList() {
if (referencesBuilder_ == null) {
return java.util.Collections.unmodifiableList(references_);
} else {
return referencesBuilder_.getMessageList();
}
}
/**
*
*
* <pre>
* Any reference to the details on the CWE, for example,
* https://cwe.mitre.org/data/definitions/94.html
* </pre>
*
* <code>repeated .google.cloud.securitycenter.v2.Reference references = 2;</code>
*/
public int getReferencesCount() {
if (referencesBuilder_ == null) {
return references_.size();
} else {
return referencesBuilder_.getCount();
}
}
/**
*
*
* <pre>
* Any reference to the details on the CWE, for example,
* https://cwe.mitre.org/data/definitions/94.html
* </pre>
*
* <code>repeated .google.cloud.securitycenter.v2.Reference references = 2;</code>
*/
public com.google.cloud.securitycenter.v2.Reference getReferences(int index) {
if (referencesBuilder_ == null) {
return references_.get(index);
} else {
return referencesBuilder_.getMessage(index);
}
}
/**
*
*
* <pre>
* Any reference to the details on the CWE, for example,
* https://cwe.mitre.org/data/definitions/94.html
* </pre>
*
* <code>repeated .google.cloud.securitycenter.v2.Reference references = 2;</code>
*/
public Builder setReferences(int index, com.google.cloud.securitycenter.v2.Reference value) {
if (referencesBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureReferencesIsMutable();
references_.set(index, value);
onChanged();
} else {
referencesBuilder_.setMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* Any reference to the details on the CWE, for example,
* https://cwe.mitre.org/data/definitions/94.html
* </pre>
*
* <code>repeated .google.cloud.securitycenter.v2.Reference references = 2;</code>
*/
public Builder setReferences(
int index, com.google.cloud.securitycenter.v2.Reference.Builder builderForValue) {
if (referencesBuilder_ == null) {
ensureReferencesIsMutable();
references_.set(index, builderForValue.build());
onChanged();
} else {
referencesBuilder_.setMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* Any reference to the details on the CWE, for example,
* https://cwe.mitre.org/data/definitions/94.html
* </pre>
*
* <code>repeated .google.cloud.securitycenter.v2.Reference references = 2;</code>
*/
public Builder addReferences(com.google.cloud.securitycenter.v2.Reference value) {
if (referencesBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureReferencesIsMutable();
references_.add(value);
onChanged();
} else {
referencesBuilder_.addMessage(value);
}
return this;
}
/**
*
*
* <pre>
* Any reference to the details on the CWE, for example,
* https://cwe.mitre.org/data/definitions/94.html
* </pre>
*
* <code>repeated .google.cloud.securitycenter.v2.Reference references = 2;</code>
*/
public Builder addReferences(int index, com.google.cloud.securitycenter.v2.Reference value) {
if (referencesBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureReferencesIsMutable();
references_.add(index, value);
onChanged();
} else {
referencesBuilder_.addMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* Any reference to the details on the CWE, for example,
* https://cwe.mitre.org/data/definitions/94.html
* </pre>
*
* <code>repeated .google.cloud.securitycenter.v2.Reference references = 2;</code>
*/
public Builder addReferences(
com.google.cloud.securitycenter.v2.Reference.Builder builderForValue) {
if (referencesBuilder_ == null) {
ensureReferencesIsMutable();
references_.add(builderForValue.build());
onChanged();
} else {
referencesBuilder_.addMessage(builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* Any reference to the details on the CWE, for example,
* https://cwe.mitre.org/data/definitions/94.html
* </pre>
*
* <code>repeated .google.cloud.securitycenter.v2.Reference references = 2;</code>
*/
public Builder addReferences(
int index, com.google.cloud.securitycenter.v2.Reference.Builder builderForValue) {
if (referencesBuilder_ == null) {
ensureReferencesIsMutable();
references_.add(index, builderForValue.build());
onChanged();
} else {
referencesBuilder_.addMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* Any reference to the details on the CWE, for example,
* https://cwe.mitre.org/data/definitions/94.html
* </pre>
*
* <code>repeated .google.cloud.securitycenter.v2.Reference references = 2;</code>
*/
public Builder addAllReferences(
java.lang.Iterable<? extends com.google.cloud.securitycenter.v2.Reference> values) {
if (referencesBuilder_ == null) {
ensureReferencesIsMutable();
com.google.protobuf.AbstractMessageLite.Builder.addAll(values, references_);
onChanged();
} else {
referencesBuilder_.addAllMessages(values);
}
return this;
}
/**
*
*
* <pre>
* Any reference to the details on the CWE, for example,
* https://cwe.mitre.org/data/definitions/94.html
* </pre>
*
* <code>repeated .google.cloud.securitycenter.v2.Reference references = 2;</code>
*/
public Builder clearReferences() {
if (referencesBuilder_ == null) {
references_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
} else {
referencesBuilder_.clear();
}
return this;
}
/**
*
*
* <pre>
* Any reference to the details on the CWE, for example,
* https://cwe.mitre.org/data/definitions/94.html
* </pre>
*
* <code>repeated .google.cloud.securitycenter.v2.Reference references = 2;</code>
*/
public Builder removeReferences(int index) {
if (referencesBuilder_ == null) {
ensureReferencesIsMutable();
references_.remove(index);
onChanged();
} else {
referencesBuilder_.remove(index);
}
return this;
}
/**
*
*
* <pre>
* Any reference to the details on the CWE, for example,
* https://cwe.mitre.org/data/definitions/94.html
* </pre>
*
* <code>repeated .google.cloud.securitycenter.v2.Reference references = 2;</code>
*/
public com.google.cloud.securitycenter.v2.Reference.Builder getReferencesBuilder(int index) {
return getReferencesFieldBuilder().getBuilder(index);
}
/**
*
*
* <pre>
* Any reference to the details on the CWE, for example,
* https://cwe.mitre.org/data/definitions/94.html
* </pre>
*
* <code>repeated .google.cloud.securitycenter.v2.Reference references = 2;</code>
*/
public com.google.cloud.securitycenter.v2.ReferenceOrBuilder getReferencesOrBuilder(int index) {
if (referencesBuilder_ == null) {
return references_.get(index);
} else {
return referencesBuilder_.getMessageOrBuilder(index);
}
}
/**
*
*
* <pre>
* Any reference to the details on the CWE, for example,
* https://cwe.mitre.org/data/definitions/94.html
* </pre>
*
* <code>repeated .google.cloud.securitycenter.v2.Reference references = 2;</code>
*/
public java.util.List<? extends com.google.cloud.securitycenter.v2.ReferenceOrBuilder>
getReferencesOrBuilderList() {
if (referencesBuilder_ != null) {
return referencesBuilder_.getMessageOrBuilderList();
} else {
return java.util.Collections.unmodifiableList(references_);
}
}
/**
*
*
* <pre>
* Any reference to the details on the CWE, for example,
* https://cwe.mitre.org/data/definitions/94.html
* </pre>
*
* <code>repeated .google.cloud.securitycenter.v2.Reference references = 2;</code>
*/
public com.google.cloud.securitycenter.v2.Reference.Builder addReferencesBuilder() {
return getReferencesFieldBuilder()
.addBuilder(com.google.cloud.securitycenter.v2.Reference.getDefaultInstance());
}
/**
*
*
* <pre>
* Any reference to the details on the CWE, for example,
* https://cwe.mitre.org/data/definitions/94.html
* </pre>
*
* <code>repeated .google.cloud.securitycenter.v2.Reference references = 2;</code>
*/
public com.google.cloud.securitycenter.v2.Reference.Builder addReferencesBuilder(int index) {
return getReferencesFieldBuilder()
.addBuilder(index, com.google.cloud.securitycenter.v2.Reference.getDefaultInstance());
}
/**
*
*
* <pre>
* Any reference to the details on the CWE, for example,
* https://cwe.mitre.org/data/definitions/94.html
* </pre>
*
* <code>repeated .google.cloud.securitycenter.v2.Reference references = 2;</code>
*/
public java.util.List<com.google.cloud.securitycenter.v2.Reference.Builder>
getReferencesBuilderList() {
return getReferencesFieldBuilder().getBuilderList();
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.securitycenter.v2.Reference,
com.google.cloud.securitycenter.v2.Reference.Builder,
com.google.cloud.securitycenter.v2.ReferenceOrBuilder>
getReferencesFieldBuilder() {
if (referencesBuilder_ == null) {
referencesBuilder_ =
new com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.securitycenter.v2.Reference,
com.google.cloud.securitycenter.v2.Reference.Builder,
com.google.cloud.securitycenter.v2.ReferenceOrBuilder>(
references_, ((bitField0_ & 0x00000002) != 0), getParentForChildren(), isClean());
references_ = null;
}
return referencesBuilder_;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.securitycenter.v2.Cwe)
}
// @@protoc_insertion_point(class_scope:google.cloud.securitycenter.v2.Cwe)
private static final com.google.cloud.securitycenter.v2.Cwe DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.securitycenter.v2.Cwe();
}
public static com.google.cloud.securitycenter.v2.Cwe getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<Cwe> PARSER =
new com.google.protobuf.AbstractParser<Cwe>() {
@java.lang.Override
public Cwe parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<Cwe> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<Cwe> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.securitycenter.v2.Cwe getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
apache/derby | 36,349 | java/org.apache.derby.client/org/apache/derby/client/am/ClientClob.java | /*
Derby - Class org.apache.derby.client.am.ClientClob
Licensed to the Apache Software Foundation (ASF) under one or more
contributor license agreements. See the NOTICE file distributed with
this work for additional information regarding copyright ownership.
The ASF licenses this file to You under the Apache License, Version 2.0
(the "License"); you may not use this file except in compliance with
the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package org.apache.derby.client.am;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.io.OutputStream;
import java.io.Reader;
import java.io.StringReader;
import java.io.Writer;
import java.nio.charset.Charset;
import java.sql.Clob;
import java.sql.SQLException;
import org.apache.derby.client.net.EncodedInputStream;
import org.apache.derby.shared.common.reference.SQLState;
/**
* This class implements the JDBC {@code java.sql.Clob} interface.
*/
public class ClientClob extends Lob implements Clob {
//---------------------navigational members-----------------------------------
//-----------------------------state------------------------------------------
protected String string_ = null;
// Only used for input purposes. For output, each getXXXStream call
// must generate an independent stream.
private InputStream asciiStream_ = null;
private InputStream unicodeStream_ = null;
private Reader characterStream_ = null;
// used for input
// Therefore, we always convert a String to UTF-8 before we flow it for input
private byte[] utf8String_;
//---------------------constructors/finalizer---------------------------------
public ClientClob(Agent agent, String string) {
this(agent,
false);
string_ = string;
setSqlLength(string_.length());
dataType_ |= STRING;
}
// CTOR for output, when a btc isn't available; the encoding is
public ClientClob(Agent agent,
byte[] unconvertedBytes,
Charset charset,
int dataOffset) throws SqlException {
this(agent,
false);
// check for null encoding is needed because the net layer
// will no longer throw an exception if the server didn't specify
// a mixed or double byte ccsid (ccsid = 0). this check for null in the
// cursor is only required for types which can have mixed or double
// byte ccsids.
if (charset == null) {
throw new SqlException(agent.logWriter_,
new ClientMessageId(SQLState.CHARACTER_CONVERTER_NOT_AVAILABLE));
}
string_ = new String(unconvertedBytes,
dataOffset,
unconvertedBytes.length - dataOffset,
charset);
setSqlLength(string_.length());
dataType_ |= STRING;
}
// CTOR for ascii/unicode stream input
//"ISO-8859-1", "UTF-8", or "UnicodeBigUnmarked"
public ClientClob(Agent agent,
InputStream inputStream,
Charset encoding,
int length) {
this(agent,
false);
setSqlLength(length);
if (encoding.equals(Cursor.ISO_8859_1)) {
asciiStream_ = inputStream;
dataType_ |= ASCII_STREAM;
} else if (encoding.equals(Cursor.UTF_8)) {
unicodeStream_ = inputStream;
dataType_ |= UNICODE_STREAM;
} else if (encoding.equals(Cursor.UTF_16BE)) {
characterStream_ =
new InputStreamReader(inputStream, Cursor.UTF_16BE);
dataType_ |= CHARACTER_STREAM;
setSqlLength(length / 2);
}
}
/**
* Create a <code>ClientClob</code> of unknown length with the specified
* encoding.
*
* This constructor was added to support the JDBC 4 length less overloads.
* Note that a <code>ClientClob</code> created with this constructor is
* made for input to the database only. Do not pass it out to the user!
*
* @param agent
* @param inputStream the data to insert
* @param encoding encoding to use for characters. Only "ISO-8859-1" is
* allowed.
*/
ClientClob(Agent agent, InputStream inputStream, Charset encoding)
throws SqlException {
this(agent,
isLayerBStreamingPossible( agent ));
if (encoding.equals(Cursor.ISO_8859_1)) {
asciiStream_ = inputStream;
dataType_ |= ASCII_STREAM;
} else {
throw new SqlException(agent_.logWriter_,
new ClientMessageId(SQLState.UNSUPPORTED_ENCODING),
encoding + " InputStream", "String/Clob");
}
}
// CTOR for character stream input
// THE ENCODING IS ASSUMED TO BE "UTF-16BE"
ClientClob(Agent agent, Reader reader, int length) {
this(agent,
false);
setSqlLength(length);
characterStream_ = reader;
dataType_ |= CHARACTER_STREAM;
}
/**
* Create a <code>ClientClob</code> object for a Clob value stored
* on the server and indentified by <code>locator</code>.
* @param agent context for this <code>Clob</code>
* object (incl. connection).
* @param locator reference id to <code>Clob</code> value on server.
*/
public ClientClob(Agent agent, int locator)
{
super(agent, false);
locator_ = locator;
dataType_ |= LOCATOR;
}
/**
* Create a <code>ClientClob</code> of unknown length.
*
* This constructor was added to support the JDBC 4 length less overloads.
* Note that a <code>ClientClob</code> created with this constructor is
* made for input to the database only. Do not pass it out to the user!
*
* @param agent
* @param reader the data to insert
*/
ClientClob(Agent agent, Reader reader) {
this(agent,
isLayerBStreamingPossible( agent ) );
// Wrap reader in stream to share code.
unicodeStream_ = EncodedInputStream.createUTF8Stream(reader);
// Override type to share logic with the other stream types.
dataType_ |= UNICODE_STREAM;
}
private ClientClob(Agent agent,
boolean willBeLayerBStreamed) {
super(agent,
willBeLayerBStreamed);
}
// ---------------------------jdbc 2------------------------------------------
// Create another method lengthX for internal calls
public long length() throws SQLException {
//call checkValidity to exit by throwing a SQLException if
//the Clob object has been freed by calling free() on it
checkValidity();
try
{
synchronized (agent_.connection_) {
if (agent_.loggingEnabled()) {
agent_.logWriter_.traceEntry(this, "length");
}
long length = super.sqlLength();
if (agent_.loggingEnabled()) {
agent_.logWriter_.traceExit(this, "length", length);
}
return length;
}
}
catch ( SqlException se )
{
throw se.getSQLException();
}
}
/**
* Returns a copy of the specified substring
* in the <code>CLOB</code> value
* designated by this <code>ClientClob</code> object.
* The substring begins at position
* <code>pos</code> and has up to <code>length</code> consecutive
* characters. The starting position must be between 1 and the length
* of the CLOB plus 1. This allows for zero-length CLOB values, from
* which only zero-length substrings can be returned.
* If a larger length is requested than there are characters available,
* characters to the end of the CLOB are returned.
* @param pos the first character of the substring to be extracted.
* The first character is at position 1.
* @param length the number of consecutive characters to be copied
* @return a <code>String</code> that is the specified substring in the
* <code>CLOB</code> value designated by this <code>ClientClob</code>
* object
* @exception SQLException if there is an error accessing the
* <code>CLOB</code>
* NOTE: If the starting position is the length of the CLOB plus 1,
* zero characters are returned regardless of the length requested.
*/
public String getSubString(long pos, int length) throws SQLException {
//call checkValidity to exit by throwing a SQLException if
//the Clob object has been freed by calling free() on it
checkValidity();
try
{
synchronized (agent_.connection_) {
String retVal = null;
if (agent_.loggingEnabled()) {
agent_.logWriter_.traceEntry(this, "getSubString", (int) pos, length);
}
if ( pos <= 0 ) {
throw new SqlException(agent_.logWriter_,
new ClientMessageId(SQLState.BLOB_BAD_POSITION), pos);
}
if ( length < 0 ) {
throw new SqlException(agent_.logWriter_,
new ClientMessageId(SQLState.BLOB_NONPOSITIVE_LENGTH),
length);
}
if (pos > sqlLength() + 1) {
throw new SqlException(agent_.logWriter_,
new ClientMessageId(SQLState.BLOB_POSITION_TOO_LARGE),
pos);
}
retVal = getSubStringX(pos, length);
if (agent_.loggingEnabled()) {
agent_.logWriter_.traceExit(this, "getSubString", retVal);
}
return retVal;
}
}
catch ( SqlException se )
{
throw se.getSQLException();
}
}
private String getSubStringX(long pos, int length) throws SqlException
{
checkForClosedConnection();
// actual length is the lesser of the length requested
// and the number of characters available from pos to the end
long actualLength = Math.min(this.sqlLength() - pos + 1, (long) length);
//Check to see if the Clob object is locator enabled.
if (isLocator()) {
//The Clob object is locator enabled. Hence call the stored
//procedure CLOBGETLENGTH to determine the length of the Clob.
return agent_.connection_.locatorProcedureCall()
.clobGetSubString(locator_, pos, (int)actualLength);
}
else {
//The Clob object is not locator enabled.
return string_.substring
((int) pos - 1, (int) (pos - 1 + actualLength));
}
}
public Reader getCharacterStream() throws SQLException {
//call checkValidity to exit by throwing a SQLException if
//the Clob object has been freed by calling free() on it
checkValidity();
try
{
synchronized (agent_.connection_) {
if (agent_.loggingEnabled()) {
agent_.logWriter_.traceEntry(this, "getCharacterStream");
}
Reader retVal = getCharacterStreamX();
if (agent_.loggingEnabled()) {
agent_.logWriter_.traceExit(this, "getCharacterStream", retVal);
}
return retVal;
}
}
catch ( SqlException se )
{
throw se.getSQLException();
}
}
Reader getCharacterStreamX() throws SqlException {
checkForClosedConnection();
//check is this Lob is locator enabled
if (isLocator()) {
//The Lob is locator enabled. Return an instance of the
//update sensitive Reader that wraps inside it a
//Buffered Locator Reader. The wrapper class
//watches out for updates.
return new UpdateSensitiveClobLocatorReader
(agent_.connection_, this);
}
else if (isCharacterStream()) // this Lob is used for input
{
return characterStream_;
}
return new StringReader(string_);
}
public InputStream getAsciiStream() throws SQLException {
//call checkValidity to exit by throwing a SQLException if
//the Clob object has been freed by calling free() on it
checkValidity();
try
{
synchronized (agent_.connection_) {
if (agent_.loggingEnabled()) {
agent_.logWriter_.traceEntry(this, "getAsciiStream");
}
InputStream retVal = getAsciiStreamX();
if (agent_.loggingEnabled()) {
agent_.logWriter_.traceExit(this, "getAsciiStream", retVal);
}
return retVal;
}
}
catch ( SqlException se )
{
throw se.getSQLException();
}
}
InputStream getAsciiStreamX() throws SqlException {
checkForClosedConnection();
if (isAsciiStream()) // this Lob is used for input
{
return asciiStream_;
}
else if(isLocator()) { // Check to see if this Lob is locator enabled
//The Lob is locator enabled. Return an instance
//of the update sensitive wrappers that wrap inside
//it a Buffered Locator enabled InputStream. The
//wrapper watches out for updates to the underlying
//Clob.
return new UpdateSensitiveClobLocatorInputStream
(agent_.connection_,this);
}
else {
return new AsciiStream(string_, new StringReader(string_));
}
}
public long position(String searchstr, long start) throws SQLException {
//call checkValidity to exit by throwing a SQLException if
//the Clob object has been freed by calling free() on it
checkValidity();
try
{
synchronized (agent_.connection_) {
if (agent_.loggingEnabled()) {
agent_.logWriter_.traceEntry(this,
"position(String, long)",
searchstr,
start);
}
if (searchstr == null) {
throw new SqlException(agent_.logWriter_,
new ClientMessageId(SQLState.BLOB_NULL_PATTERN_OR_SEARCH_STR));
}
if (start < 1) {
throw new SqlException(agent_.logWriter_,
new ClientMessageId(SQLState.BLOB_BAD_POSITION),
start);
}
long pos = positionX(searchstr, start);
if (agent_.loggingEnabled()) {
agent_.logWriter_.traceExit(this, "position(String, long)", pos);
}
return pos;
}
}
catch ( SqlException se )
{
throw se.getSQLException();
}
}
private long positionX(String searchstr, long start) throws SqlException {
checkForClosedConnection();
long index = -1;
if (start <= 0) {
throw new SqlException(agent_.logWriter_,
new ClientMessageId(SQLState.INVALID_API_PARAMETER),
start, "start", "Clob.position()");
}
//Check is locator support is available for this LOB.
if (isLocator()) {
//Locator support is available. Hence call
//CLOBGETPOSITIONFROMSTRING to determine the position
//of the given substring inside the LOB.
index = agent_.connection_.locatorProcedureCall()
.clobGetPositionFromString(locator_, searchstr, start);
} else {
//Locator support is not available.
index = string_.indexOf(searchstr, (int) start - 1);
if (index != -1) {
index++; // api index starts at 1
}
}
return index;
}
public long position(Clob searchstr, long start) throws SQLException {
//call checkValidity to exit by throwing a SQLException if
//the Clob object has been freed by calling free() on it
checkValidity();
try
{
synchronized (agent_.connection_) {
if (agent_.loggingEnabled()) {
agent_.logWriter_.traceEntry(this,
"position(Clob, long)",
searchstr,
start);
}
if (start < 1) {
throw new SqlException(agent_.logWriter_,
new ClientMessageId(SQLState.BLOB_BAD_POSITION), start);
}
if (searchstr == null) {
throw new SqlException(agent_.logWriter_,
new ClientMessageId(SQLState.BLOB_NULL_PATTERN_OR_SEARCH_STR));
}
long pos = positionX(searchstr, start);
if (agent_.loggingEnabled()) {
agent_.logWriter_.traceExit(this, "position(Clob, long)", pos);
}
return pos;
}
}
catch ( SqlException se )
{
throw se.getSQLException();
}
}
private long positionX(Clob searchstr, long start) throws SqlException {
checkForClosedConnection();
if (start <= 0) {
throw new SqlException(agent_.logWriter_,
new ClientMessageId(SQLState.INVALID_API_PARAMETER),
start, "start", "Clob.position()");
}
// if the searchstr is longer than the source, no match
long index;
try {
if (searchstr.length() > sqlLength()) {
return -1;
}
//Check if locator support is available for this LOB.
if (isLocator()) {
//Locator support is available. Hence call
//CLOBGETPOSITIONFROMLOCATOR to determine the position
//of the given Clob inside the LOB.
index = agent_.connection_.locatorProcedureCall()
.clobGetPositionFromLocator(locator_,
((ClientClob)searchstr).getLocator(),
start);
} else {
//Locator support is not available.
index = string_.indexOf(searchstr.getSubString(1L,
(int) searchstr.length()),
(int) start - 1);
//increase the index by one since String positions are
//0-based and Clob positions are 1-based
if (index != -1) {
index++;
}
}
} catch (SQLException e) {
throw new SqlException(e);
}
return index;
}
//---------------------------- jdbc 3.0 -----------------------------------
public int setString(long pos, String str) throws SQLException {
//call checkValidity to exit by throwing a SQLException if
//the Clob object has been freed by calling free() on it
checkValidity();
try
{
synchronized (agent_.connection_) {
if (agent_.loggingEnabled()) {
agent_.logWriter_.traceEntry(this, "setString", (int) pos, str);
}
int length = setStringX(pos, str, 0, str.length());
if (agent_.loggingEnabled()) {
agent_.logWriter_.traceExit(this, "setString", length);
}
return length;
}
}
catch ( SqlException se )
{
throw se.getSQLException();
}
}
public int setString(long pos, String str, int offset, int len) throws SQLException {
//call checkValidity to exit by throwing a SQLException if
//the Clob object has been freed by calling free() on it
checkValidity();
try
{
synchronized (agent_.connection_) {
if (agent_.loggingEnabled()) {
agent_.logWriter_.traceEntry(this, "setString", (int) pos, str, offset, len);
}
int length = setStringX(pos, str, offset, len);
if (agent_.loggingEnabled()) {
agent_.logWriter_.traceExit(this, "setString", length);
}
return length;
}
}
catch ( SqlException se )
{
throw se.getSQLException();
}
}
int setStringX(long pos, String str, int offset, int len)
throws SqlException {
if ((int) pos <= 0 ) {
throw new SqlException(agent_.logWriter_,
new ClientMessageId(SQLState.BLOB_BAD_POSITION), pos);
}
if ( pos - 1 > sqlLength()) {
throw new SqlException(agent_.logWriter_,
new ClientMessageId(SQLState.BLOB_POSITION_TOO_LARGE), pos);
}
if (str == null) {
throw new SqlException(agent_.logWriter_,
new ClientMessageId(
SQLState.BLOB_NULL_PATTERN_OR_SEARCH_STR));
}
if (str.length() == 0) {
return 0;
}
if ((offset < 0) || offset >= str.length() ) {
throw new SqlException(agent_.logWriter_,
new ClientMessageId(SQLState.BLOB_INVALID_OFFSET), offset);
}
if ( len < 0 ) {
throw new SqlException(agent_.logWriter_,
new ClientMessageId(SQLState.BLOB_NONPOSITIVE_LENGTH), len);
}
if (offset + len > str.length()) {
throw new SqlException(agent_.logWriter_,
new ClientMessageId(
SQLState.LANG_SUBSTR_START_ADDING_LEN_OUT_OF_RANGE),
offset, len, str);
}
if (len == 0) {
return 0;
}
int length = 0;
length = Math.min((str.length() - offset), len);
//check if the Clob object is locator enabled
if (isLocator()) {
//The Clob is locator enabled. Call the CLOBSETSTRING
//stored procedure to set the given string in the Clob.
agent_.connection_.locatorProcedureCall().clobSetString
(locator_, pos, length, str.substring(offset, offset + length));
if (pos+length-1 > sqlLength()) { // Wrote beyond the old end
// Update length
setSqlLength(pos + length - 1);
}
//The Clob value has been
//updated. Increment the
//update count.
incrementUpdateCount();
}
else {
//The Clob is not locator enabled.
reInitForNonLocator(
string_.substring(0, (int) pos - 1)
.concat(str.substring(offset, offset + length)));
}
return length;
}
public OutputStream setAsciiStream(long pos) throws SQLException {
//call checkValidity to exit by throwing a SQLException if
//the Clob object has been freed by calling free() on it
checkValidity();
try
{
synchronized (agent_.connection_) {
if (agent_.loggingEnabled()) {
agent_.logWriter_.traceEntry(this, "setAsciiStream", (int) pos);
}
OutputStream outStream = null;
if(isLocator()) { // Check to see if the Lob is locator enabled
//The Lob is locator enabled. Return an instance of the
//Locator enabled Clob specific OutputStream implementation.
outStream = new ClobLocatorOutputStream
(this, pos);
}
else {
//The Lob is not locator enabled.
outStream = new
ClobOutputStream(this, pos);
}
if (agent_.loggingEnabled()) {
agent_.logWriter_.traceExit(this, "setAsciiStream", outStream);
}
return outStream;
}
}
catch ( SqlException se )
{
throw se.getSQLException();
}
}
public Writer setCharacterStream(long pos) throws SQLException {
//call checkValidity to exit by throwing a SQLException if
//the Clob object has been freed by calling free() on it
checkValidity();
try
{
synchronized (agent_.connection_) {
if (agent_.loggingEnabled()) {
agent_.logWriter_.traceEntry(this, "setCharacterStream", (int) pos);
}
Writer writer = null;
//Check to see if this Clob is locator enabled.
if (isLocator()) {
//return an instance of the locator enabled implementation
//of the writer interface
writer = new ClobLocatorWriter(agent_.connection_, this, pos);
}
else {//The Lob is not locator enabled.
writer = new ClobWriter(this, pos);
}
if (agent_.loggingEnabled()) {
agent_.logWriter_.traceExit(this, "setCharacterStream", writer);
}
return writer;
}
}
catch ( SqlException se )
{
throw se.getSQLException();
}
}
public void truncate(long len) throws SQLException {
//call checkValidity to exit by throwing a SQLException if
//the Clob object has been freed by calling free() on it
checkValidity();
try
{
synchronized (agent_.connection_) {
if (agent_.loggingEnabled()) {
agent_.logWriter_.traceEntry(this, " truncate", (int) len);
}
if (len < 0 ) {
throw new SqlException(agent_.logWriter_,
new ClientMessageId(SQLState.BLOB_NONPOSITIVE_LENGTH),
len);
}
if ( len > sqlLength()) {
throw new SqlException(agent_.logWriter_,
new ClientMessageId(SQLState.BLOB_LENGTH_TOO_LONG),
len);
}
if (len == sqlLength()) {
return;
}
//check whether the Lob is locator enabled.
if (isLocator()) {
//The Lob is locator enabled then call the stored
//procedure CLOBTRUNCATE to truncate this Lob.
agent_.connection_.locatorProcedureCall().
clobTruncate(locator_, len);
// The Clob value has been modified.
// Increment the update count and update the length.
incrementUpdateCount();
setSqlLength(len);
}
else {
//The Lob is not locator enabled.
reInitForNonLocator(string_.substring(0, (int) len));
}
}
}
catch ( SqlException se )
{
throw se.getSQLException();
}
}
//---------------------------- jdbc 4.0 -------------------------------------
/**
* This method frees the <code>Clob</code> object and releases the resources the resources
* that it holds. The object is invalid once the <code>free</code> method
* is called. If <code>free</code> is called multiple times, the
* subsequent calls to <code>free</code> are treated as a no-op.
*
* @throws SQLException if an error occurs releasing
* the Clob's resources
*/
public void free()
throws SQLException {
//calling free() on a already freed object is treated as a no-op
if (!isValid_) return;
//now that free has been called the Blob object is no longer
//valid
isValid_ = false;
try {
synchronized (agent_.connection_) {
if (agent_.loggingEnabled()) {
agent_.logWriter_.traceEntry(this, "free");
}
if (isLocator()) {
agent_.connection_.locatorProcedureCall()
.clobReleaseLocator(locator_);
}
}
} catch (SqlException se) {
throw se.getSQLException();
}
if(isString()) {
string_ = null;
utf8String_ = null;
}
if(isAsciiStream()) {
try {
asciiStream_.close();
}
catch(IOException ioe) {
throw new SqlException(null, new ClientMessageId(SQLState.IO_ERROR_UPON_LOB_FREE)).getSQLException();
}
}
if(isUnicodeStream()) {
try {
unicodeStream_.close();
}
catch(IOException ioe) {
throw new SqlException(null, new ClientMessageId(SQLState.IO_ERROR_UPON_LOB_FREE)).getSQLException();
}
}
if(isCharacterStream()) {
try {
characterStream_.close();
}
catch(IOException ioe) {
throw new SqlException(null, new ClientMessageId(SQLState.IO_ERROR_UPON_LOB_FREE)).getSQLException();
}
}
}
/**
* Returns a <code>Reader</code> object that contains a partial
* <code>Clob</code> value, starting with the character specified by pos,
* which is length characters in length.
*
* @param pos the offset to the first character of the partial value to
* be retrieved. The first character in the Clob is at position 1.
* @param length the length in characters of the partial value to be
* retrieved.
* @return <code>Reader</code> through which the partial <code>Clob</code>
* value can be read.
* @throws SQLException if pos is less than 1 or if pos is greater than the
* number of
* characters in the {@code Clob} or if {@code pos + length} is greater than
* {@code Clob.length() +1}
*/
public Reader getCharacterStream(long pos, long length)
throws SQLException {
//call checkValidity to exit by throwing a SQLException if
//the Clob object has been freed by calling free() on it
checkValidity();
synchronized (agent_.connection_) {
if (agent_.loggingEnabled()) {
agent_.logWriter_.traceEntry(this, "getCharacterStream",
(int) pos, length);
}
checkPosAndLength(pos, length);
Reader retVal = null;
//check if the Lob is locator enabled.
if(isLocator()) {
//1) The Lob is locator enabled. Return the update
// sensitive wrapper that wraps inside it a
// locator enabled Clob Reader. The wrapper
// watches out for updates to the underlying
// Clob.
//2) len is the number of characters in the
// stream starting from pos.
//3) checkPosAndLength will ensure that pos and
// length fall within the boundaries of the
// Clob object.
try {
retVal = new UpdateSensitiveClobLocatorReader
(agent_.connection_, this,
pos, length);
}
catch(SqlException sqle) {
throw sqle.getSQLException();
}
}
else {
//The Lob is not locator enabled.
String retVal_str = null;
try {
retVal_str = getSubStringX(pos, (int)length);
}
catch(SqlException sqle) {
throw sqle.getSQLException();
}
retVal = new StringReader(retVal_str);
if (agent_.loggingEnabled()) {
agent_.logWriter_.traceExit(this, "getCharacterStream", retVal);
}
}
return retVal;
}
}
//----------------------------helper methods----------------------------------
public boolean isString() {
return ((dataType_ & STRING) == STRING);
}
public boolean isAsciiStream() {
return ((dataType_ & ASCII_STREAM) == ASCII_STREAM);
}
public boolean isCharacterStream() {
return ((dataType_ & CHARACTER_STREAM) == CHARACTER_STREAM);
}
public boolean isUnicodeStream() {
return ((dataType_ & UNICODE_STREAM) == UNICODE_STREAM);
}
public InputStream getUnicodeStream() {
return unicodeStream_;
}
public String getString() {
return string_;
}
public byte[] getUtf8String() {
return utf8String_;
}
// Return the length of the equivalent UTF-8 string
// precondition: string_ is not null and dataType_ includes STRING
public int getUTF8Length() {
if (utf8String_ != null) {
return utf8String_.length;
}
utf8String_ = string_.getBytes(Cursor.UTF_8);
return utf8String_.length;
}
/**
* Reinitialize the value of this CLOB.
*
* This is legacy code, only used when talking to servers that don't
* support locators.
*
* @param newString the new value
*/
// The StringBufferInputStream class is deprecated, but we don't care too
// much since this code is only for talking to very old servers. Suppress
// the deprecation warnings for now.
@SuppressWarnings("deprecation")
void reInitForNonLocator(String newString) {
string_ = newString;
asciiStream_ = new java.io.StringBufferInputStream(string_);
unicodeStream_ = new java.io.StringBufferInputStream(string_);
characterStream_ = new StringReader(string_);
setSqlLength(string_.length());
}
/**
* Materialize the stream used for input to the database.
*
* @throws SqlException on error
*/
protected void materializeStream()
throws SqlException {
unicodeStream_ = super.materializeStream(isAsciiStream() ?
asciiStream_ :
unicodeStream_,
"java.sql.Clob");
dataType_ = UNICODE_STREAM;
}
/*---------------------------------------------------------------------
Methods used in the locator implementation.
----------------------------------------------------------------------*/
/**
* Get the length in bytes of the <code>Clob</code> value represented by
* this locator based <code>Clob</code> object.
*
* A stored procedure call will be made to get it from the server.
* @throws org.apache.derby.client.am.SqlException
* @return length of <code>Clob</code> in bytes
*/
long getLocatorLength() throws SqlException
{
return agent_.connection_.locatorProcedureCall()
.clobGetLength(locator_);
}
}
|
apache/poi | 36,478 | poi/src/main/java/org/apache/poi/ss/util/PropertyTemplate.java | /* ====================================================================
Licensed to the Apache Software Foundation (ASF) under one or more
contributor license agreements. See the NOTICE file distributed with
this work for additional information regarding copyright ownership.
The ASF licenses this file to You under the Apache License, Version 2.0
(the "License"); you may not use this file except in compliance with
the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
==================================================================== */
package org.apache.poi.ss.util;
import org.apache.poi.hssf.usermodel.HSSFWorkbook;
import org.apache.poi.ss.SpreadsheetVersion;
import org.apache.poi.ss.usermodel.BorderExtent;
import org.apache.poi.ss.usermodel.BorderStyle;
import org.apache.poi.ss.usermodel.Cell;
import org.apache.poi.ss.usermodel.CellPropertyType;
import org.apache.poi.ss.usermodel.IndexedColors;
import org.apache.poi.ss.usermodel.Row;
import org.apache.poi.ss.usermodel.Sheet;
import org.apache.poi.ss.usermodel.Workbook;
import org.apache.poi.util.Removal;
import java.util.EnumMap;
import java.util.EnumSet;
import java.util.HashMap;
import java.util.Map;
import java.util.Set;
/**
* <p>
* A PropertyTemplate is a template that can be applied to any sheet in
* a project. It contains all the border type and color attributes needed to
* draw all the borders for a single sheet. That template can be applied to any
* sheet in any workbook.
*
* This class requires the full spreadsheet to be in memory, so
* <code>SXSSFWorkbook</code> Spreadsheets are not
* supported. The same PropertyTemplate can, however, be applied to both
* {@link HSSFWorkbook} and <code>XSSFWorkbook</code>
* objects if necessary. Portions of the border that fall outside the max range
* of the {@link Workbook} sheet are ignored.
* </p>
*
* <p>
* This would replace {@link RegionUtil}.
* </p>
*/
public final class PropertyTemplate {
/**
* This is a list of cell properties for one shot application to a range of
* cells at a later time.
*/
private final Map<CellAddress, EnumMap<CellPropertyType, Object>> _propertyTemplate;
/**
* Create a PropertyTemplate object
*/
public PropertyTemplate() {
_propertyTemplate = new HashMap<>();
}
/**
* Create a PropertyTemplate object from another PropertyTemplate
*
* @param template a PropertyTemplate object
*/
public PropertyTemplate(PropertyTemplate template) {
this();
for (Map.Entry<CellAddress, EnumMap<CellPropertyType, Object>> entry : template.getTemplate().entrySet()) {
_propertyTemplate.put(new CellAddress(entry.getKey()), cloneCellProperties(entry.getValue()));
}
}
private Map<CellAddress, EnumMap<CellPropertyType, Object>> getTemplate() {
return _propertyTemplate;
}
private static EnumMap<CellPropertyType, Object> cloneCellProperties(EnumMap<CellPropertyType, Object> properties) {
return new EnumMap<>(properties);
}
/**
* Draws a group of cell borders for a cell range. The borders are not
* applied to the cells at this time, just the template is drawn. To apply
* the drawn borders to a sheet, use {@link #applyBorders}.
*
* @param range
* - {@link CellRangeAddress} range of cells on which borders are
* drawn.
* @param borderType
* - Type of border to draw. {@link BorderStyle}.
* @param extent
* - {@link BorderExtent} of the borders to be
* applied.
*/
public void drawBorders(CellRangeAddress range, BorderStyle borderType,
BorderExtent extent) {
switch (extent) {
case NONE:
removeBorders(range);
break;
case ALL:
drawHorizontalBorders(range, borderType, BorderExtent.ALL);
drawVerticalBorders(range, borderType, BorderExtent.ALL);
break;
case INSIDE:
drawHorizontalBorders(range, borderType, BorderExtent.INSIDE);
drawVerticalBorders(range, borderType, BorderExtent.INSIDE);
break;
case OUTSIDE:
drawOutsideBorders(range, borderType, BorderExtent.ALL);
break;
case TOP:
drawTopBorder(range, borderType);
break;
case BOTTOM:
drawBottomBorder(range, borderType);
break;
case LEFT:
drawLeftBorder(range, borderType);
break;
case RIGHT:
drawRightBorder(range, borderType);
break;
case HORIZONTAL:
drawHorizontalBorders(range, borderType, BorderExtent.ALL);
break;
case INSIDE_HORIZONTAL:
drawHorizontalBorders(range, borderType, BorderExtent.INSIDE);
break;
case OUTSIDE_HORIZONTAL:
drawOutsideBorders(range, borderType, BorderExtent.HORIZONTAL);
break;
case VERTICAL:
drawVerticalBorders(range, borderType, BorderExtent.ALL);
break;
case INSIDE_VERTICAL:
drawVerticalBorders(range, borderType, BorderExtent.INSIDE);
break;
case OUTSIDE_VERTICAL:
drawOutsideBorders(range, borderType, BorderExtent.VERTICAL);
break;
}
}
/**
* Draws a group of cell borders for a cell range. The borders are not
* applied to the cells at this time, just the template is drawn. To apply
* the drawn borders to a sheet, use {@link #applyBorders}.
*
* @param range
* - {@link CellRangeAddress} range of cells on which borders are
* drawn.
* @param borderType
* - Type of border to draw. {@link BorderStyle}.
* @param color
* - Color index from {@link IndexedColors} used to draw the
* borders.
* @param extent
* - {@link BorderExtent} of the borders to be
* applied.
*/
public void drawBorders(CellRangeAddress range, BorderStyle borderType,
short color, BorderExtent extent) {
drawBorders(range, borderType, extent);
if (borderType != BorderStyle.NONE) {
drawBorderColors(range, color, extent);
}
}
/**
* <p>
* Draws the top border for a range of cells
* </p>
*
* @param range
* - {@link CellRangeAddress} range of cells on which borders are
* drawn.
* @param borderType
* - Type of border to draw. {@link BorderStyle}.
*/
private void drawTopBorder(CellRangeAddress range, BorderStyle borderType) {
int row = range.getFirstRow();
int firstCol = range.getFirstColumn();
int lastCol = range.getLastColumn();
for (int i = firstCol; i <= lastCol; i++) {
addProperty(row, i, CellPropertyType.BORDER_TOP, borderType);
if (borderType == BorderStyle.NONE && row > 0) {
addProperty(row - 1, i, CellPropertyType.BORDER_BOTTOM, borderType);
}
}
}
/**
* <p>
* Draws the bottom border for a range of cells
* </p>
*
* @param range
* - {@link CellRangeAddress} range of cells on which borders are
* drawn.
* @param borderType
* - Type of border to draw. {@link BorderStyle}.
*/
private void drawBottomBorder(CellRangeAddress range,
BorderStyle borderType) {
int row = range.getLastRow();
int firstCol = range.getFirstColumn();
int lastCol = range.getLastColumn();
for (int i = firstCol; i <= lastCol; i++) {
addProperty(row, i, CellPropertyType.BORDER_BOTTOM, borderType);
if (borderType == BorderStyle.NONE
&& row < SpreadsheetVersion.EXCEL2007.getMaxRows() - 1) {
addProperty(row + 1, i, CellPropertyType.BORDER_TOP, borderType);
}
}
}
/**
* <p>
* Draws the left border for a range of cells
* </p>
*
* @param range
* - {@link CellRangeAddress} range of cells on which borders are
* drawn.
* @param borderType
* - Type of border to draw. {@link BorderStyle}.
*/
private void drawLeftBorder(CellRangeAddress range,
BorderStyle borderType) {
int firstRow = range.getFirstRow();
int lastRow = range.getLastRow();
int col = range.getFirstColumn();
for (int i = firstRow; i <= lastRow; i++) {
addProperty(i, col, CellPropertyType.BORDER_LEFT, borderType);
if (borderType == BorderStyle.NONE && col > 0) {
addProperty(i, col - 1, CellPropertyType.BORDER_RIGHT, borderType);
}
}
}
/**
* <p>
* Draws the right border for a range of cells
* </p>
*
* @param range
* - {@link CellRangeAddress} range of cells on which borders are
* drawn.
* @param borderType
* - Type of border to draw. {@link BorderStyle}.
*/
private void drawRightBorder(CellRangeAddress range,
BorderStyle borderType) {
int firstRow = range.getFirstRow();
int lastRow = range.getLastRow();
int col = range.getLastColumn();
for (int i = firstRow; i <= lastRow; i++) {
addProperty(i, col, CellPropertyType.BORDER_RIGHT, borderType);
if (borderType == BorderStyle.NONE
&& col < SpreadsheetVersion.EXCEL2007.getMaxColumns() - 1) {
addProperty(i, col + 1, CellPropertyType.BORDER_LEFT, borderType);
}
}
}
/**
* <p>
* Draws the outside borders for a range of cells.
* </p>
*
* @param range
* - {@link CellRangeAddress} range of cells on which borders are
* drawn.
* @param borderType
* - Type of border to draw. {@link BorderStyle}.
* @param extent
* - {@link BorderExtent} of the borders to be
* applied. Valid Values are:
* <ul>
* <li>BorderExtent.ALL</li>
* <li>BorderExtent.HORIZONTAL</li>
* <li>BorderExtent.VERTICAL</li>
* </ul>
*/
private void drawOutsideBorders(CellRangeAddress range,
BorderStyle borderType, BorderExtent extent) {
switch (extent) {
case ALL:
case HORIZONTAL:
case VERTICAL:
if (extent == BorderExtent.ALL || extent == BorderExtent.HORIZONTAL) {
drawTopBorder(range, borderType);
drawBottomBorder(range, borderType);
}
if (extent == BorderExtent.ALL || extent == BorderExtent.VERTICAL) {
drawLeftBorder(range, borderType);
drawRightBorder(range, borderType);
}
break;
default:
throw new IllegalArgumentException(
"Unsupported PropertyTemplate.Extent, valid Extents are ALL, HORIZONTAL, and VERTICAL");
}
}
/**
* <p>
* Draws the horizontal borders for a range of cells.
* </p>
*
* @param range
* - {@link CellRangeAddress} range of cells on which borders are
* drawn.
* @param borderType
* - Type of border to draw. {@link BorderStyle}.
* @param extent
* - {@link BorderExtent} of the borders to be
* applied. Valid Values are:
* <ul>
* <li>BorderExtent.ALL</li>
* <li>BorderExtent.INSIDE</li>
* </ul>
*/
private void drawHorizontalBorders(CellRangeAddress range,
BorderStyle borderType, BorderExtent extent) {
switch (extent) {
case ALL:
case INSIDE:
int firstRow = range.getFirstRow();
int lastRow = range.getLastRow();
int firstCol = range.getFirstColumn();
int lastCol = range.getLastColumn();
for (int i = firstRow; i <= lastRow; i++) {
CellRangeAddress row = new CellRangeAddress(i, i, firstCol,
lastCol);
if (extent == BorderExtent.ALL || i > firstRow) {
drawTopBorder(row, borderType);
}
if (extent == BorderExtent.ALL || i < lastRow) {
drawBottomBorder(row, borderType);
}
}
break;
default:
throw new IllegalArgumentException(
"Unsupported PropertyTemplate.Extent, valid Extents are ALL and INSIDE");
}
}
/**
* <p>
* Draws the vertical borders for a range of cells.
* </p>
*
* @param range
* - {@link CellRangeAddress} range of cells on which borders are
* drawn.
* @param borderType
* - Type of border to draw. {@link BorderStyle}.
* @param extent
* - {@link BorderExtent} of the borders to be
* applied. Valid Values are:
* <ul>
* <li>BorderExtent.ALL</li>
* <li>BorderExtent.INSIDE</li>
* </ul>
*/
private void drawVerticalBorders(CellRangeAddress range,
BorderStyle borderType, BorderExtent extent) {
switch (extent) {
case ALL:
case INSIDE:
int firstRow = range.getFirstRow();
int lastRow = range.getLastRow();
int firstCol = range.getFirstColumn();
int lastCol = range.getLastColumn();
for (int i = firstCol; i <= lastCol; i++) {
CellRangeAddress row = new CellRangeAddress(firstRow, lastRow,
i, i);
if (extent == BorderExtent.ALL || i > firstCol) {
drawLeftBorder(row, borderType);
}
if (extent == BorderExtent.ALL || i < lastCol) {
drawRightBorder(row, borderType);
}
}
break;
default:
throw new IllegalArgumentException(
"Unsupported PropertyTemplate.Extent, valid Extents are ALL and INSIDE");
}
}
/**
* Removes all border properties from this PropertyTemplate for the
* specified range.
*
* @param range - {@link CellRangeAddress} range of cells to remove borders.
*/
private void removeBorders(CellRangeAddress range) {
EnumSet<CellPropertyType> properties = EnumSet.of(
CellPropertyType.BORDER_TOP,
CellPropertyType.BORDER_BOTTOM,
CellPropertyType.BORDER_LEFT,
CellPropertyType.BORDER_RIGHT);
for (int row = range.getFirstRow(); row <= range.getLastRow(); row++) {
for (int col = range.getFirstColumn(); col <= range
.getLastColumn(); col++) {
removeProperties(row, col, properties);
}
}
removeBorderColors(range);
}
/**
* Applies the drawn borders to a Sheet. The borders that are applied are
* the ones that have been drawn by the {@link #drawBorders} and
* {@link #drawBorderColors} methods.
*
* @param sheet
* - {@link Sheet} on which to apply borders
*/
public void applyBorders(Sheet sheet) {
Workbook wb = sheet.getWorkbook();
for (Map.Entry<CellAddress, EnumMap<CellPropertyType, Object>> entry : _propertyTemplate
.entrySet()) {
CellAddress cellAddress = entry.getKey();
if (cellAddress.getRow() < wb.getSpreadsheetVersion().getMaxRows()
&& cellAddress.getColumn() < wb.getSpreadsheetVersion()
.getMaxColumns()) {
Map<CellPropertyType, Object> properties = entry.getValue();
Row row = CellUtil.getRow(cellAddress.getRow(), sheet);
Cell cell = CellUtil.getCell(row, cellAddress.getColumn());
CellUtil.setCellStylePropertiesEnum(cell, properties);
}
}
}
/**
* Sets the color for a group of cell borders for a cell range. The borders
* are not applied to the cells at this time, just the template is drawn. If
* the borders do not exist, a BORDER_THIN border is used. To apply the
* drawn borders to a sheet, use {@link #applyBorders}.
*
* @param range
* - {@link CellRangeAddress} range of cells on which colors are
* set.
* @param color
* - Color index from {@link IndexedColors} used to draw the
* borders.
* @param extent
* - {@link BorderExtent} of the borders for which
* colors are set.
*/
public void drawBorderColors(CellRangeAddress range, short color,
BorderExtent extent) {
switch (extent) {
case NONE:
removeBorderColors(range);
break;
case ALL:
drawHorizontalBorderColors(range, color, BorderExtent.ALL);
drawVerticalBorderColors(range, color, BorderExtent.ALL);
break;
case INSIDE:
drawHorizontalBorderColors(range, color, BorderExtent.INSIDE);
drawVerticalBorderColors(range, color, BorderExtent.INSIDE);
break;
case OUTSIDE:
drawOutsideBorderColors(range, color, BorderExtent.ALL);
break;
case TOP:
drawTopBorderColor(range, color);
break;
case BOTTOM:
drawBottomBorderColor(range, color);
break;
case LEFT:
drawLeftBorderColor(range, color);
break;
case RIGHT:
drawRightBorderColor(range, color);
break;
case HORIZONTAL:
drawHorizontalBorderColors(range, color, BorderExtent.ALL);
break;
case INSIDE_HORIZONTAL:
drawHorizontalBorderColors(range, color, BorderExtent.INSIDE);
break;
case OUTSIDE_HORIZONTAL:
drawOutsideBorderColors(range, color, BorderExtent.HORIZONTAL);
break;
case VERTICAL:
drawVerticalBorderColors(range, color, BorderExtent.ALL);
break;
case INSIDE_VERTICAL:
drawVerticalBorderColors(range, color, BorderExtent.INSIDE);
break;
case OUTSIDE_VERTICAL:
drawOutsideBorderColors(range, color, BorderExtent.VERTICAL);
break;
}
}
/**
* <p>
* Sets the color of the top border for a range of cells.
* </p>
*
* @param range
* - {@link CellRangeAddress} range of cells on which colors are
* set.
* @param color
* - Color index from {@link IndexedColors} used to draw the
* borders.
*/
private void drawTopBorderColor(CellRangeAddress range, short color) {
int row = range.getFirstRow();
int firstCol = range.getFirstColumn();
int lastCol = range.getLastColumn();
for (int i = firstCol; i <= lastCol; i++) {
if (getBorderStyle(row, i,
CellPropertyType.BORDER_TOP) == BorderStyle.NONE) {
drawTopBorder(new CellRangeAddress(row, row, i, i),
BorderStyle.THIN);
}
addProperty(row, i, CellPropertyType.TOP_BORDER_COLOR, color);
}
}
/**
* <p>
* Sets the color of the bottom border for a range of cells.
* </p>
*
* @param range
* - {@link CellRangeAddress} range of cells on which colors are
* set.
* @param color
* - Color index from {@link IndexedColors} used to draw the
* borders.
*/
private void drawBottomBorderColor(CellRangeAddress range, short color) {
int row = range.getLastRow();
int firstCol = range.getFirstColumn();
int lastCol = range.getLastColumn();
for (int i = firstCol; i <= lastCol; i++) {
if (getBorderStyle(row, i,
CellPropertyType.BORDER_BOTTOM) == BorderStyle.NONE) {
drawBottomBorder(new CellRangeAddress(row, row, i, i),
BorderStyle.THIN);
}
addProperty(row, i, CellPropertyType.BOTTOM_BORDER_COLOR, color);
}
}
/**
* <p>
* Sets the color of the left border for a range of cells.
* </p>
*
* @param range
* - {@link CellRangeAddress} range of cells on which colors are
* set.
* @param color
* - Color index from {@link IndexedColors} used to draw the
* borders.
*/
private void drawLeftBorderColor(CellRangeAddress range, short color) {
int firstRow = range.getFirstRow();
int lastRow = range.getLastRow();
int col = range.getFirstColumn();
for (int i = firstRow; i <= lastRow; i++) {
if (getBorderStyle(i, col,
CellPropertyType.BORDER_LEFT) == BorderStyle.NONE) {
drawLeftBorder(new CellRangeAddress(i, i, col, col),
BorderStyle.THIN);
}
addProperty(i, col, CellPropertyType.LEFT_BORDER_COLOR, color);
}
}
/**
* <p>
* Sets the color of the right border for a range of cells. If the border is
* not drawn, it defaults to BORDER_THIN
* </p>
*
* @param range
* - {@link CellRangeAddress} range of cells on which colors are
* set.
* @param color
* - Color index from {@link IndexedColors} used to draw the
* borders.
*/
private void drawRightBorderColor(CellRangeAddress range, short color) {
int firstRow = range.getFirstRow();
int lastRow = range.getLastRow();
int col = range.getLastColumn();
for (int i = firstRow; i <= lastRow; i++) {
if (getBorderStyle(i, col,
CellPropertyType.BORDER_RIGHT) == BorderStyle.NONE) {
drawRightBorder(new CellRangeAddress(i, i, col, col),
BorderStyle.THIN);
}
addProperty(i, col, CellPropertyType.RIGHT_BORDER_COLOR, color);
}
}
/**
* <p>
* Sets the color of the outside borders for a range of cells.
* </p>
*
* @param range
* - {@link CellRangeAddress} range of cells on which colors are
* set.
* @param color
* - Color index from {@link IndexedColors} used to draw the
* borders.
* @param extent
* - {@link BorderExtent} of the borders for which
* colors are set. Valid Values are:
* <ul>
* <li>BorderExtent.ALL</li>
* <li>BorderExtent.HORIZONTAL</li>
* <li>BorderExtent.VERTICAL</li>
* </ul>
*/
private void drawOutsideBorderColors(CellRangeAddress range, short color,
BorderExtent extent) {
switch (extent) {
case ALL:
case HORIZONTAL:
case VERTICAL:
if (extent == BorderExtent.ALL || extent == BorderExtent.HORIZONTAL) {
drawTopBorderColor(range, color);
drawBottomBorderColor(range, color);
}
if (extent == BorderExtent.ALL || extent == BorderExtent.VERTICAL) {
drawLeftBorderColor(range, color);
drawRightBorderColor(range, color);
}
break;
default:
throw new IllegalArgumentException(
"Unsupported PropertyTemplate.Extent, valid Extents are ALL, HORIZONTAL, and VERTICAL");
}
}
/**
* <p>
* Sets the color of the horizontal borders for a range of cells.
* </p>
*
* @param range
* - {@link CellRangeAddress} range of cells on which colors are
* set.
* @param color
* - Color index from {@link IndexedColors} used to draw the
* borders.
* @param extent
* - {@link BorderExtent} of the borders for which
* colors are set. Valid Values are:
* <ul>
* <li>BorderExtent.ALL</li>
* <li>BorderExtent.INSIDE</li>
* </ul>
*/
private void drawHorizontalBorderColors(CellRangeAddress range, short color,
BorderExtent extent) {
switch (extent) {
case ALL:
case INSIDE:
int firstRow = range.getFirstRow();
int lastRow = range.getLastRow();
int firstCol = range.getFirstColumn();
int lastCol = range.getLastColumn();
for (int i = firstRow; i <= lastRow; i++) {
CellRangeAddress row = new CellRangeAddress(i, i, firstCol,
lastCol);
if (extent == BorderExtent.ALL || i > firstRow) {
drawTopBorderColor(row, color);
}
if (extent == BorderExtent.ALL || i < lastRow) {
drawBottomBorderColor(row, color);
}
}
break;
default:
throw new IllegalArgumentException(
"Unsupported PropertyTemplate.Extent, valid Extents are ALL and INSIDE");
}
}
/**
* <p>
* Sets the color of the vertical borders for a range of cells.
* </p>
*
* @param range
* - {@link CellRangeAddress} range of cells on which colors are
* set.
* @param color
* - Color index from {@link IndexedColors} used to draw the
* borders.
* @param extent
* - {@link BorderExtent} of the borders for which
* colors are set. Valid Values are:
* <ul>
* <li>BorderExtent.ALL</li>
* <li>BorderExtent.INSIDE</li>
* </ul>
*/
private void drawVerticalBorderColors(CellRangeAddress range, short color,
BorderExtent extent) {
switch (extent) {
case ALL:
case INSIDE:
int firstRow = range.getFirstRow();
int lastRow = range.getLastRow();
int firstCol = range.getFirstColumn();
int lastCol = range.getLastColumn();
for (int i = firstCol; i <= lastCol; i++) {
CellRangeAddress row = new CellRangeAddress(firstRow, lastRow,
i, i);
if (extent == BorderExtent.ALL || i > firstCol) {
drawLeftBorderColor(row, color);
}
if (extent == BorderExtent.ALL || i < lastCol) {
drawRightBorderColor(row, color);
}
}
break;
default:
throw new IllegalArgumentException(
"Unsupported PropertyTemplate.Extent, valid Extents are ALL and INSIDE");
}
}
/**
* Removes all border properties from this PropertyTemplate for the
* specified range.
*
* @param range - {@link CellRangeAddress} range of cells to remove borders.
*/
private void removeBorderColors(CellRangeAddress range) {
Set<CellPropertyType> properties = EnumSet.of(
CellPropertyType.TOP_BORDER_COLOR,
CellPropertyType.BOTTOM_BORDER_COLOR,
CellPropertyType.LEFT_BORDER_COLOR,
CellPropertyType.RIGHT_BORDER_COLOR);
for (int row = range.getFirstRow(); row <= range.getLastRow(); row++) {
for (int col = range.getFirstColumn(); col <= range
.getLastColumn(); col++) {
removeProperties(row, col, properties);
}
}
}
/**
* Adds a property to this PropertyTemplate for a given cell
*/
private void addProperty(int row, int col, CellPropertyType property, short value) {
addProperty(row, col, property, Short.valueOf(value));
}
/**
* Adds a property to this PropertyTemplate for a given cell
*/
private void addProperty(int row, int col, CellPropertyType property, Object value) {
CellAddress cell = new CellAddress(row, col);
EnumMap<CellPropertyType, Object> cellProperties = _propertyTemplate.get(cell);
if (cellProperties == null) {
cellProperties = new EnumMap<>(CellPropertyType.class);
}
cellProperties.put(property, value);
_propertyTemplate.put(cell, cellProperties);
}
/**
* Removes a set of properties from this PropertyTemplate for a
* given cell
*/
private void removeProperties(int row, int col, Set<CellPropertyType> properties) {
CellAddress cell = new CellAddress(row, col);
EnumMap<CellPropertyType, Object> cellProperties = _propertyTemplate.get(cell);
if (cellProperties != null) {
cellProperties.keySet().removeAll(properties);
if (cellProperties.isEmpty()) {
_propertyTemplate.remove(cell);
} else {
_propertyTemplate.put(cell, cellProperties);
}
}
}
/**
* Retrieves the number of borders assigned to a cell
*/
public int getNumBorders(CellAddress cell) {
Map<CellPropertyType, Object> cellProperties = _propertyTemplate.get(cell);
if (cellProperties == null) {
return 0;
}
int count = 0;
for (CellPropertyType property : cellProperties.keySet()) {
if (property.equals(CellPropertyType.BORDER_TOP))
count += 1;
if (property.equals(CellPropertyType.BORDER_BOTTOM))
count += 1;
if (property.equals(CellPropertyType.BORDER_LEFT))
count += 1;
if (property.equals(CellPropertyType.BORDER_RIGHT))
count += 1;
}
return count;
}
/**
* Retrieves the number of borders assigned to a cell
*/
public int getNumBorders(int row, int col) {
return getNumBorders(new CellAddress(row, col));
}
/**
* Retrieves the number of border colors assigned to a cell
*/
public int getNumBorderColors(CellAddress cell) {
Map<CellPropertyType, Object> cellProperties = _propertyTemplate.get(cell);
if (cellProperties == null) {
return 0;
}
int count = 0;
for (CellPropertyType property : cellProperties.keySet()) {
if (property.equals(CellPropertyType.TOP_BORDER_COLOR))
count += 1;
if (property.equals(CellPropertyType.BOTTOM_BORDER_COLOR))
count += 1;
if (property.equals(CellPropertyType.LEFT_BORDER_COLOR))
count += 1;
if (property.equals(CellPropertyType.RIGHT_BORDER_COLOR))
count += 1;
}
return count;
}
/**
* Retrieves the number of border colors assigned to a cell
*/
public int getNumBorderColors(int row, int col) {
return getNumBorderColors(new CellAddress(row, col));
}
/**
* Retrieves the border style for a given cell
*/
public BorderStyle getBorderStyle(CellAddress cell, CellPropertyType property) {
BorderStyle value = BorderStyle.NONE;
Map<CellPropertyType, Object> cellProperties = _propertyTemplate.get(cell);
if (cellProperties != null) {
Object obj = cellProperties.get(property);
if (obj instanceof BorderStyle) {
value = (BorderStyle) obj;
}
}
return value;
}
/**
* Retrieves the border style for a given cell
*
* @deprecated See {@link #getBorderStyle(CellAddress, CellPropertyType)}
*/
@Deprecated
public BorderStyle getBorderStyle(CellAddress cell, String propertyName) {
return getBorderStyle(cell, CellUtil.namePropertyMap.get(propertyName));
}
/**
* Retrieves the border style for a given cell
*/
public BorderStyle getBorderStyle(int row, int col, CellPropertyType property) {
return getBorderStyle(new CellAddress(row, col), property);
}
/**
* Retrieves the border style for a given cell
*
* @deprecated See {@link #getBorderStyle(int, int, CellPropertyType)}
*/
@Deprecated
public BorderStyle getBorderStyle(int row, int col, String propertyName) {
return getBorderStyle(new CellAddress(row, col), CellUtil.namePropertyMap.get(propertyName));
}
/**
* Retrieves the border style for a given cell
*/
public short getTemplateProperty(CellAddress cell, CellPropertyType property) {
short value = 0;
Map<CellPropertyType, Object> cellProperties = _propertyTemplate.get(cell);
if (cellProperties != null) {
Object obj = cellProperties.get(property);
if (obj != null) {
value = getShort(obj);
}
}
return value;
}
/**
* Retrieves the border style for a given cell
*
* @deprecated See {@link #getTemplateProperty(CellAddress, CellPropertyType)}
*/
@Deprecated
public short getTemplateProperty(CellAddress cell, String propertyName) {
return getTemplateProperty(cell, CellUtil.namePropertyMap.get(propertyName));
}
/**
* Retrieves the border style for a given cell
*/
public short getTemplateProperty(int row, int col, CellPropertyType property) {
return getTemplateProperty(new CellAddress(row, col), property);
}
/**
* Retrieves the border style for a given cell
*
* @deprecated See {@link #getTemplateProperty(int, int, CellPropertyType)}
*/
@Deprecated
@Removal(version = "7.0.0")
public short getTemplateProperty(int row, int col, String propertyName) {
return getTemplateProperty(new CellAddress(row, col), CellUtil.namePropertyMap.get(propertyName));
}
/**
* Converts a Short object to a short value or 0 if the object is not a
* Short
*
* @param value Potentially short value to convert
* @return short value, or 0 if not a short
*/
private static short getShort(Object value) {
if (value instanceof Number) {
return ((Number) value).shortValue();
}
return 0;
}
}
|
googleapis/google-cloud-java | 36,199 | java-analytics-admin/proto-google-analytics-admin-v1alpha/src/main/java/com/google/analytics/admin/v1alpha/ListAudiencesResponse.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/analytics/admin/v1alpha/analytics_admin.proto
// Protobuf Java Version: 3.25.8
package com.google.analytics.admin.v1alpha;
/**
*
*
* <pre>
* Response message for ListAudiences RPC.
* </pre>
*
* Protobuf type {@code google.analytics.admin.v1alpha.ListAudiencesResponse}
*/
public final class ListAudiencesResponse extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.analytics.admin.v1alpha.ListAudiencesResponse)
ListAudiencesResponseOrBuilder {
private static final long serialVersionUID = 0L;
// Use ListAudiencesResponse.newBuilder() to construct.
private ListAudiencesResponse(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private ListAudiencesResponse() {
audiences_ = java.util.Collections.emptyList();
nextPageToken_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new ListAudiencesResponse();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.analytics.admin.v1alpha.AnalyticsAdminProto
.internal_static_google_analytics_admin_v1alpha_ListAudiencesResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.analytics.admin.v1alpha.AnalyticsAdminProto
.internal_static_google_analytics_admin_v1alpha_ListAudiencesResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.analytics.admin.v1alpha.ListAudiencesResponse.class,
com.google.analytics.admin.v1alpha.ListAudiencesResponse.Builder.class);
}
public static final int AUDIENCES_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private java.util.List<com.google.analytics.admin.v1alpha.Audience> audiences_;
/**
*
*
* <pre>
* List of Audiences.
* </pre>
*
* <code>repeated .google.analytics.admin.v1alpha.Audience audiences = 1;</code>
*/
@java.lang.Override
public java.util.List<com.google.analytics.admin.v1alpha.Audience> getAudiencesList() {
return audiences_;
}
/**
*
*
* <pre>
* List of Audiences.
* </pre>
*
* <code>repeated .google.analytics.admin.v1alpha.Audience audiences = 1;</code>
*/
@java.lang.Override
public java.util.List<? extends com.google.analytics.admin.v1alpha.AudienceOrBuilder>
getAudiencesOrBuilderList() {
return audiences_;
}
/**
*
*
* <pre>
* List of Audiences.
* </pre>
*
* <code>repeated .google.analytics.admin.v1alpha.Audience audiences = 1;</code>
*/
@java.lang.Override
public int getAudiencesCount() {
return audiences_.size();
}
/**
*
*
* <pre>
* List of Audiences.
* </pre>
*
* <code>repeated .google.analytics.admin.v1alpha.Audience audiences = 1;</code>
*/
@java.lang.Override
public com.google.analytics.admin.v1alpha.Audience getAudiences(int index) {
return audiences_.get(index);
}
/**
*
*
* <pre>
* List of Audiences.
* </pre>
*
* <code>repeated .google.analytics.admin.v1alpha.Audience audiences = 1;</code>
*/
@java.lang.Override
public com.google.analytics.admin.v1alpha.AudienceOrBuilder getAudiencesOrBuilder(int index) {
return audiences_.get(index);
}
public static final int NEXT_PAGE_TOKEN_FIELD_NUMBER = 2;
@SuppressWarnings("serial")
private volatile java.lang.Object nextPageToken_ = "";
/**
*
*
* <pre>
* A token, which can be sent as `page_token` to retrieve the next page.
* If this field is omitted, there are no subsequent pages.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The nextPageToken.
*/
@java.lang.Override
public java.lang.String getNextPageToken() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
nextPageToken_ = s;
return s;
}
}
/**
*
*
* <pre>
* A token, which can be sent as `page_token` to retrieve the next page.
* If this field is omitted, there are no subsequent pages.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The bytes for nextPageToken.
*/
@java.lang.Override
public com.google.protobuf.ByteString getNextPageTokenBytes() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
nextPageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
for (int i = 0; i < audiences_.size(); i++) {
output.writeMessage(1, audiences_.get(i));
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 2, nextPageToken_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
for (int i = 0; i < audiences_.size(); i++) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, audiences_.get(i));
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, nextPageToken_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.analytics.admin.v1alpha.ListAudiencesResponse)) {
return super.equals(obj);
}
com.google.analytics.admin.v1alpha.ListAudiencesResponse other =
(com.google.analytics.admin.v1alpha.ListAudiencesResponse) obj;
if (!getAudiencesList().equals(other.getAudiencesList())) return false;
if (!getNextPageToken().equals(other.getNextPageToken())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (getAudiencesCount() > 0) {
hash = (37 * hash) + AUDIENCES_FIELD_NUMBER;
hash = (53 * hash) + getAudiencesList().hashCode();
}
hash = (37 * hash) + NEXT_PAGE_TOKEN_FIELD_NUMBER;
hash = (53 * hash) + getNextPageToken().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.analytics.admin.v1alpha.ListAudiencesResponse parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.analytics.admin.v1alpha.ListAudiencesResponse parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.analytics.admin.v1alpha.ListAudiencesResponse parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.analytics.admin.v1alpha.ListAudiencesResponse parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.analytics.admin.v1alpha.ListAudiencesResponse parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.analytics.admin.v1alpha.ListAudiencesResponse parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.analytics.admin.v1alpha.ListAudiencesResponse parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.analytics.admin.v1alpha.ListAudiencesResponse parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.analytics.admin.v1alpha.ListAudiencesResponse parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.analytics.admin.v1alpha.ListAudiencesResponse parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.analytics.admin.v1alpha.ListAudiencesResponse parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.analytics.admin.v1alpha.ListAudiencesResponse parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.analytics.admin.v1alpha.ListAudiencesResponse prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Response message for ListAudiences RPC.
* </pre>
*
* Protobuf type {@code google.analytics.admin.v1alpha.ListAudiencesResponse}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.analytics.admin.v1alpha.ListAudiencesResponse)
com.google.analytics.admin.v1alpha.ListAudiencesResponseOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.analytics.admin.v1alpha.AnalyticsAdminProto
.internal_static_google_analytics_admin_v1alpha_ListAudiencesResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.analytics.admin.v1alpha.AnalyticsAdminProto
.internal_static_google_analytics_admin_v1alpha_ListAudiencesResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.analytics.admin.v1alpha.ListAudiencesResponse.class,
com.google.analytics.admin.v1alpha.ListAudiencesResponse.Builder.class);
}
// Construct using com.google.analytics.admin.v1alpha.ListAudiencesResponse.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
if (audiencesBuilder_ == null) {
audiences_ = java.util.Collections.emptyList();
} else {
audiences_ = null;
audiencesBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000001);
nextPageToken_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.analytics.admin.v1alpha.AnalyticsAdminProto
.internal_static_google_analytics_admin_v1alpha_ListAudiencesResponse_descriptor;
}
@java.lang.Override
public com.google.analytics.admin.v1alpha.ListAudiencesResponse getDefaultInstanceForType() {
return com.google.analytics.admin.v1alpha.ListAudiencesResponse.getDefaultInstance();
}
@java.lang.Override
public com.google.analytics.admin.v1alpha.ListAudiencesResponse build() {
com.google.analytics.admin.v1alpha.ListAudiencesResponse result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.analytics.admin.v1alpha.ListAudiencesResponse buildPartial() {
com.google.analytics.admin.v1alpha.ListAudiencesResponse result =
new com.google.analytics.admin.v1alpha.ListAudiencesResponse(this);
buildPartialRepeatedFields(result);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartialRepeatedFields(
com.google.analytics.admin.v1alpha.ListAudiencesResponse result) {
if (audiencesBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0)) {
audiences_ = java.util.Collections.unmodifiableList(audiences_);
bitField0_ = (bitField0_ & ~0x00000001);
}
result.audiences_ = audiences_;
} else {
result.audiences_ = audiencesBuilder_.build();
}
}
private void buildPartial0(com.google.analytics.admin.v1alpha.ListAudiencesResponse result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000002) != 0)) {
result.nextPageToken_ = nextPageToken_;
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.analytics.admin.v1alpha.ListAudiencesResponse) {
return mergeFrom((com.google.analytics.admin.v1alpha.ListAudiencesResponse) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.analytics.admin.v1alpha.ListAudiencesResponse other) {
if (other == com.google.analytics.admin.v1alpha.ListAudiencesResponse.getDefaultInstance())
return this;
if (audiencesBuilder_ == null) {
if (!other.audiences_.isEmpty()) {
if (audiences_.isEmpty()) {
audiences_ = other.audiences_;
bitField0_ = (bitField0_ & ~0x00000001);
} else {
ensureAudiencesIsMutable();
audiences_.addAll(other.audiences_);
}
onChanged();
}
} else {
if (!other.audiences_.isEmpty()) {
if (audiencesBuilder_.isEmpty()) {
audiencesBuilder_.dispose();
audiencesBuilder_ = null;
audiences_ = other.audiences_;
bitField0_ = (bitField0_ & ~0x00000001);
audiencesBuilder_ =
com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders
? getAudiencesFieldBuilder()
: null;
} else {
audiencesBuilder_.addAllMessages(other.audiences_);
}
}
}
if (!other.getNextPageToken().isEmpty()) {
nextPageToken_ = other.nextPageToken_;
bitField0_ |= 0x00000002;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
com.google.analytics.admin.v1alpha.Audience m =
input.readMessage(
com.google.analytics.admin.v1alpha.Audience.parser(), extensionRegistry);
if (audiencesBuilder_ == null) {
ensureAudiencesIsMutable();
audiences_.add(m);
} else {
audiencesBuilder_.addMessage(m);
}
break;
} // case 10
case 18:
{
nextPageToken_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000002;
break;
} // case 18
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.util.List<com.google.analytics.admin.v1alpha.Audience> audiences_ =
java.util.Collections.emptyList();
private void ensureAudiencesIsMutable() {
if (!((bitField0_ & 0x00000001) != 0)) {
audiences_ =
new java.util.ArrayList<com.google.analytics.admin.v1alpha.Audience>(audiences_);
bitField0_ |= 0x00000001;
}
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.analytics.admin.v1alpha.Audience,
com.google.analytics.admin.v1alpha.Audience.Builder,
com.google.analytics.admin.v1alpha.AudienceOrBuilder>
audiencesBuilder_;
/**
*
*
* <pre>
* List of Audiences.
* </pre>
*
* <code>repeated .google.analytics.admin.v1alpha.Audience audiences = 1;</code>
*/
public java.util.List<com.google.analytics.admin.v1alpha.Audience> getAudiencesList() {
if (audiencesBuilder_ == null) {
return java.util.Collections.unmodifiableList(audiences_);
} else {
return audiencesBuilder_.getMessageList();
}
}
/**
*
*
* <pre>
* List of Audiences.
* </pre>
*
* <code>repeated .google.analytics.admin.v1alpha.Audience audiences = 1;</code>
*/
public int getAudiencesCount() {
if (audiencesBuilder_ == null) {
return audiences_.size();
} else {
return audiencesBuilder_.getCount();
}
}
/**
*
*
* <pre>
* List of Audiences.
* </pre>
*
* <code>repeated .google.analytics.admin.v1alpha.Audience audiences = 1;</code>
*/
public com.google.analytics.admin.v1alpha.Audience getAudiences(int index) {
if (audiencesBuilder_ == null) {
return audiences_.get(index);
} else {
return audiencesBuilder_.getMessage(index);
}
}
/**
*
*
* <pre>
* List of Audiences.
* </pre>
*
* <code>repeated .google.analytics.admin.v1alpha.Audience audiences = 1;</code>
*/
public Builder setAudiences(int index, com.google.analytics.admin.v1alpha.Audience value) {
if (audiencesBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureAudiencesIsMutable();
audiences_.set(index, value);
onChanged();
} else {
audiencesBuilder_.setMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* List of Audiences.
* </pre>
*
* <code>repeated .google.analytics.admin.v1alpha.Audience audiences = 1;</code>
*/
public Builder setAudiences(
int index, com.google.analytics.admin.v1alpha.Audience.Builder builderForValue) {
if (audiencesBuilder_ == null) {
ensureAudiencesIsMutable();
audiences_.set(index, builderForValue.build());
onChanged();
} else {
audiencesBuilder_.setMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* List of Audiences.
* </pre>
*
* <code>repeated .google.analytics.admin.v1alpha.Audience audiences = 1;</code>
*/
public Builder addAudiences(com.google.analytics.admin.v1alpha.Audience value) {
if (audiencesBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureAudiencesIsMutable();
audiences_.add(value);
onChanged();
} else {
audiencesBuilder_.addMessage(value);
}
return this;
}
/**
*
*
* <pre>
* List of Audiences.
* </pre>
*
* <code>repeated .google.analytics.admin.v1alpha.Audience audiences = 1;</code>
*/
public Builder addAudiences(int index, com.google.analytics.admin.v1alpha.Audience value) {
if (audiencesBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureAudiencesIsMutable();
audiences_.add(index, value);
onChanged();
} else {
audiencesBuilder_.addMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* List of Audiences.
* </pre>
*
* <code>repeated .google.analytics.admin.v1alpha.Audience audiences = 1;</code>
*/
public Builder addAudiences(
com.google.analytics.admin.v1alpha.Audience.Builder builderForValue) {
if (audiencesBuilder_ == null) {
ensureAudiencesIsMutable();
audiences_.add(builderForValue.build());
onChanged();
} else {
audiencesBuilder_.addMessage(builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* List of Audiences.
* </pre>
*
* <code>repeated .google.analytics.admin.v1alpha.Audience audiences = 1;</code>
*/
public Builder addAudiences(
int index, com.google.analytics.admin.v1alpha.Audience.Builder builderForValue) {
if (audiencesBuilder_ == null) {
ensureAudiencesIsMutable();
audiences_.add(index, builderForValue.build());
onChanged();
} else {
audiencesBuilder_.addMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* List of Audiences.
* </pre>
*
* <code>repeated .google.analytics.admin.v1alpha.Audience audiences = 1;</code>
*/
public Builder addAllAudiences(
java.lang.Iterable<? extends com.google.analytics.admin.v1alpha.Audience> values) {
if (audiencesBuilder_ == null) {
ensureAudiencesIsMutable();
com.google.protobuf.AbstractMessageLite.Builder.addAll(values, audiences_);
onChanged();
} else {
audiencesBuilder_.addAllMessages(values);
}
return this;
}
/**
*
*
* <pre>
* List of Audiences.
* </pre>
*
* <code>repeated .google.analytics.admin.v1alpha.Audience audiences = 1;</code>
*/
public Builder clearAudiences() {
if (audiencesBuilder_ == null) {
audiences_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
} else {
audiencesBuilder_.clear();
}
return this;
}
/**
*
*
* <pre>
* List of Audiences.
* </pre>
*
* <code>repeated .google.analytics.admin.v1alpha.Audience audiences = 1;</code>
*/
public Builder removeAudiences(int index) {
if (audiencesBuilder_ == null) {
ensureAudiencesIsMutable();
audiences_.remove(index);
onChanged();
} else {
audiencesBuilder_.remove(index);
}
return this;
}
/**
*
*
* <pre>
* List of Audiences.
* </pre>
*
* <code>repeated .google.analytics.admin.v1alpha.Audience audiences = 1;</code>
*/
public com.google.analytics.admin.v1alpha.Audience.Builder getAudiencesBuilder(int index) {
return getAudiencesFieldBuilder().getBuilder(index);
}
/**
*
*
* <pre>
* List of Audiences.
* </pre>
*
* <code>repeated .google.analytics.admin.v1alpha.Audience audiences = 1;</code>
*/
public com.google.analytics.admin.v1alpha.AudienceOrBuilder getAudiencesOrBuilder(int index) {
if (audiencesBuilder_ == null) {
return audiences_.get(index);
} else {
return audiencesBuilder_.getMessageOrBuilder(index);
}
}
/**
*
*
* <pre>
* List of Audiences.
* </pre>
*
* <code>repeated .google.analytics.admin.v1alpha.Audience audiences = 1;</code>
*/
public java.util.List<? extends com.google.analytics.admin.v1alpha.AudienceOrBuilder>
getAudiencesOrBuilderList() {
if (audiencesBuilder_ != null) {
return audiencesBuilder_.getMessageOrBuilderList();
} else {
return java.util.Collections.unmodifiableList(audiences_);
}
}
/**
*
*
* <pre>
* List of Audiences.
* </pre>
*
* <code>repeated .google.analytics.admin.v1alpha.Audience audiences = 1;</code>
*/
public com.google.analytics.admin.v1alpha.Audience.Builder addAudiencesBuilder() {
return getAudiencesFieldBuilder()
.addBuilder(com.google.analytics.admin.v1alpha.Audience.getDefaultInstance());
}
/**
*
*
* <pre>
* List of Audiences.
* </pre>
*
* <code>repeated .google.analytics.admin.v1alpha.Audience audiences = 1;</code>
*/
public com.google.analytics.admin.v1alpha.Audience.Builder addAudiencesBuilder(int index) {
return getAudiencesFieldBuilder()
.addBuilder(index, com.google.analytics.admin.v1alpha.Audience.getDefaultInstance());
}
/**
*
*
* <pre>
* List of Audiences.
* </pre>
*
* <code>repeated .google.analytics.admin.v1alpha.Audience audiences = 1;</code>
*/
public java.util.List<com.google.analytics.admin.v1alpha.Audience.Builder>
getAudiencesBuilderList() {
return getAudiencesFieldBuilder().getBuilderList();
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.analytics.admin.v1alpha.Audience,
com.google.analytics.admin.v1alpha.Audience.Builder,
com.google.analytics.admin.v1alpha.AudienceOrBuilder>
getAudiencesFieldBuilder() {
if (audiencesBuilder_ == null) {
audiencesBuilder_ =
new com.google.protobuf.RepeatedFieldBuilderV3<
com.google.analytics.admin.v1alpha.Audience,
com.google.analytics.admin.v1alpha.Audience.Builder,
com.google.analytics.admin.v1alpha.AudienceOrBuilder>(
audiences_, ((bitField0_ & 0x00000001) != 0), getParentForChildren(), isClean());
audiences_ = null;
}
return audiencesBuilder_;
}
private java.lang.Object nextPageToken_ = "";
/**
*
*
* <pre>
* A token, which can be sent as `page_token` to retrieve the next page.
* If this field is omitted, there are no subsequent pages.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The nextPageToken.
*/
public java.lang.String getNextPageToken() {
java.lang.Object ref = nextPageToken_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
nextPageToken_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* A token, which can be sent as `page_token` to retrieve the next page.
* If this field is omitted, there are no subsequent pages.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The bytes for nextPageToken.
*/
public com.google.protobuf.ByteString getNextPageTokenBytes() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
nextPageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* A token, which can be sent as `page_token` to retrieve the next page.
* If this field is omitted, there are no subsequent pages.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @param value The nextPageToken to set.
* @return This builder for chaining.
*/
public Builder setNextPageToken(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
nextPageToken_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* A token, which can be sent as `page_token` to retrieve the next page.
* If this field is omitted, there are no subsequent pages.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return This builder for chaining.
*/
public Builder clearNextPageToken() {
nextPageToken_ = getDefaultInstance().getNextPageToken();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
return this;
}
/**
*
*
* <pre>
* A token, which can be sent as `page_token` to retrieve the next page.
* If this field is omitted, there are no subsequent pages.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @param value The bytes for nextPageToken to set.
* @return This builder for chaining.
*/
public Builder setNextPageTokenBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
nextPageToken_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.analytics.admin.v1alpha.ListAudiencesResponse)
}
// @@protoc_insertion_point(class_scope:google.analytics.admin.v1alpha.ListAudiencesResponse)
private static final com.google.analytics.admin.v1alpha.ListAudiencesResponse DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.analytics.admin.v1alpha.ListAudiencesResponse();
}
public static com.google.analytics.admin.v1alpha.ListAudiencesResponse getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<ListAudiencesResponse> PARSER =
new com.google.protobuf.AbstractParser<ListAudiencesResponse>() {
@java.lang.Override
public ListAudiencesResponse parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<ListAudiencesResponse> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<ListAudiencesResponse> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.analytics.admin.v1alpha.ListAudiencesResponse getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
apache/pinot | 36,483 | pinot-core/src/test/java/org/apache/pinot/queries/ForwardIndexHandlerReloadQueriesTest.java | /**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.pinot.queries;
import java.io.File;
import java.net.URL;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import org.apache.commons.io.FileUtils;
import org.apache.pinot.common.response.broker.BrokerResponseNative;
import org.apache.pinot.common.response.broker.ResultTable;
import org.apache.pinot.common.utils.DataSchema;
import org.apache.pinot.segment.local.indexsegment.immutable.ImmutableSegmentLoader;
import org.apache.pinot.segment.local.segment.creator.impl.SegmentIndexCreationDriverImpl;
import org.apache.pinot.segment.local.segment.index.loader.IndexLoadingConfig;
import org.apache.pinot.segment.spi.ColumnMetadata;
import org.apache.pinot.segment.spi.ImmutableSegment;
import org.apache.pinot.segment.spi.IndexSegment;
import org.apache.pinot.segment.spi.creator.SegmentGeneratorConfig;
import org.apache.pinot.segment.spi.creator.SegmentIndexCreationDriver;
import org.apache.pinot.spi.config.table.FieldConfig;
import org.apache.pinot.spi.config.table.TableConfig;
import org.apache.pinot.spi.config.table.TableType;
import org.apache.pinot.spi.data.FieldSpec.DataType;
import org.apache.pinot.spi.data.Schema;
import org.apache.pinot.spi.utils.builder.TableConfigBuilder;
import org.testng.annotations.AfterMethod;
import org.testng.annotations.BeforeMethod;
import org.testng.annotations.Test;
import static org.testng.Assert.*;
/**
* The <code>ForwardIndexHandlerReloadQueriesTest</code> class sets up the index segment for the
* no forward index multi-value queries test with reload.
* <p>There are totally 14 columns, 100000 records inside the original Avro file where 10 columns are selected to build
* the index segment. Selected columns information are as following:
* <ul>
* ColumnName, FieldType, DataType, Cardinality, IsSorted, HasInvertedIndex, IsMultiValue, HasDictionary, RangeIndex
* <li>column1, METRIC, INT, 51594, F, F, F, F, F</li>
* <li>column2, METRIC, INT, 42242, F, F, F, F, F</li>
* <li>column3, DIMENSION, STRING, 5, F, F, F, F, F</li>
* <li>column5, DIMENSION, STRING, 9, F, F, F, F, F</li>
* <li>column6, DIMENSION, INT, 18499, F, F, T, T, F</li>
* <li>column7, DIMENSION, INT, 359, F, F, T, F, F</li>
* <li>column8, DIMENSION, INT, 850, F, T, F, T, F</li>
* <li>column9, METRIC, INT, 146, F, T, F, T, T</li>
* <li>column10, METRIC, INT, 3960, F, F, F, F, T</li>
* <li>daysSinceEpoch, TIME, INT, 1, T, F, F, T, F</li>
* </ul>
*/
public class ForwardIndexHandlerReloadQueriesTest extends BaseQueriesTest {
private static final File INDEX_DIR =
new File(FileUtils.getTempDirectory(), ForwardIndexHandlerReloadQueriesTest.class.getSimpleName());
private static final String AVRO_DATA = "data" + File.separator + "test_data-mv.avro";
private static final String RAW_TABLE_NAME = "testTable";
private static final String SEGMENT_NAME = "testSegment";
//@formatter:off
private static final Schema SCHEMA = new Schema.SchemaBuilder().setSchemaName(RAW_TABLE_NAME)
.addMetric("column1", DataType.INT)
.addMetric("column2", DataType.INT)
.addSingleValueDimension("column3", DataType.STRING)
.addSingleValueDimension("column5", DataType.STRING)
.addMultiValueDimension("column6", DataType.INT)
.addMultiValueDimension("column7", DataType.INT)
.addSingleValueDimension("column8", DataType.INT)
.addMetric("column9", DataType.INT)
.addMetric("column10", DataType.INT)
.addDateTime("daysSinceEpoch", DataType.INT, "EPOCH|DAYS", "1:DAYS")
.build();
// Hard-coded query filter
protected static final String FILTER = " WHERE column1 > 100000000"
+ " AND column2 BETWEEN 20000000 AND 1000000000"
+ " AND column3 <> 'w'"
+ " AND (column6 < 500000 OR column7 NOT IN (225, 407))"
+ " AND daysSinceEpoch = 1756015683";
//@formatter:on
private IndexSegment _indexSegment;
// Contains 2 identical index segments.
private List<IndexSegment> _indexSegments;
@BeforeMethod
public void setUp()
throws Exception {
FileUtils.deleteQuietly(INDEX_DIR);
List<String> noDictionaryColumns = List.of("column1", "column2", "column3", "column5", "column7", "column10");
List<String> invertedIndexColumns = List.of("column8", "column9");
List<FieldConfig> fieldConfigs = new ArrayList<>(noDictionaryColumns.size());
for (String column : noDictionaryColumns) {
fieldConfigs.add(
new FieldConfig(column, FieldConfig.EncodingType.RAW, List.of(), FieldConfig.CompressionCodec.SNAPPY, null));
}
TableConfig tableConfig = createTableConfig(noDictionaryColumns, invertedIndexColumns, List.of(), fieldConfigs);
URL resource = getClass().getClassLoader().getResource(AVRO_DATA);
assertNotNull(resource);
String avroFile = resource.getFile();
SegmentGeneratorConfig generatorConfig = new SegmentGeneratorConfig(tableConfig, SCHEMA);
generatorConfig.setInputFilePath(avroFile);
generatorConfig.setOutDir(INDEX_DIR.getAbsolutePath());
generatorConfig.setSegmentName(SEGMENT_NAME);
generatorConfig.setSkipTimeValueCheck(true);
SegmentIndexCreationDriver driver = new SegmentIndexCreationDriverImpl();
driver.init(generatorConfig);
driver.build();
ImmutableSegment segment =
ImmutableSegmentLoader.load(new File(INDEX_DIR, SEGMENT_NAME), new IndexLoadingConfig(tableConfig, SCHEMA));
Map<String, ColumnMetadata> columnMetadataMap = segment.getSegmentMetadata().getColumnMetadataMap();
for (Map.Entry<String, ColumnMetadata> entry : columnMetadataMap.entrySet()) {
String column = entry.getKey();
ColumnMetadata metadata = entry.getValue();
assertNotNull(segment.getForwardIndex(column));
if (noDictionaryColumns.contains(column)) {
assertFalse(metadata.hasDictionary());
assertNull(segment.getDictionary(column));
} else {
assertTrue(metadata.hasDictionary());
assertNotNull(segment.getDictionary(column));
}
if (invertedIndexColumns.contains(column)) {
assertNotNull(segment.getInvertedIndex(column));
}
}
_indexSegment = segment;
_indexSegments = List.of(segment, segment);
}
private TableConfig createTableConfig(List<String> noDictionaryColumns, List<String> invertedIndexColumns,
List<String> rangeIndexColumns, List<FieldConfig> fieldConfigs) {
return new TableConfigBuilder(TableType.OFFLINE).setTableName(RAW_TABLE_NAME).setTimeColumnName("daysSinceEpoch")
.setNoDictionaryColumns(noDictionaryColumns).setInvertedIndexColumns(invertedIndexColumns)
.setRangeIndexColumns(rangeIndexColumns).setFieldConfigList(fieldConfigs).build();
}
@AfterMethod
public void tearDown() {
_indexSegment.destroy();
FileUtils.deleteQuietly(INDEX_DIR);
}
@Override
protected String getFilter() {
return FILTER;
}
@Override
protected IndexSegment getIndexSegment() {
return _indexSegment;
}
@Override
protected List<IndexSegment> getIndexSegments() {
return _indexSegments;
}
@Test
public void testSelectQueries()
throws Exception {
String query =
"SELECT column1, column2, column3, column6, column7, column10 FROM testTable WHERE column10 > 674022574 AND "
+ "column1 > 100000000 AND column2 BETWEEN 20000000 AND 1000000000 AND column3 <> 'w' AND (column6 < "
+ "500000 OR column7 NOT IN (225, 407)) AND daysSinceEpoch = 1756015683 ORDER BY column1";
BrokerResponseNative brokerResponseNative = getBrokerResponse(query);
assertTrue(brokerResponseNative.getExceptions() == null || brokerResponseNative.getExceptions().size() == 0);
ResultTable resultTable = brokerResponseNative.getResultTable();
assertEquals(brokerResponseNative.getNumRowsResultSet(), 10);
assertEquals(brokerResponseNative.getTotalDocs(), 400_000L);
assertEquals(brokerResponseNative.getNumDocsScanned(), 1184L);
assertEquals(brokerResponseNative.getNumSegmentsProcessed(), 4L);
assertEquals(brokerResponseNative.getNumSegmentsMatched(), 4L);
assertEquals(brokerResponseNative.getNumEntriesScannedPostFilter(), 1384L);
assertEquals(brokerResponseNative.getNumEntriesScannedInFilter(), 913464L);
assertNotNull(brokerResponseNative.getExceptions());
DataSchema dataSchema = new DataSchema(new String[]{
"column1", "column2", "column3", "column6", "column7", "column10"
}, new DataSchema.ColumnDataType[]{
DataSchema.ColumnDataType.INT, DataSchema.ColumnDataType.INT, DataSchema.ColumnDataType.STRING,
DataSchema.ColumnDataType.INT_ARRAY, DataSchema.ColumnDataType.INT_ARRAY, DataSchema.ColumnDataType.INT
});
assertEquals(resultTable.getDataSchema(), dataSchema);
List<Object[]> resultRows1 = resultTable.getRows();
changePropertiesAndReloadSegment();
// Run the same query again.
brokerResponseNative = getBrokerResponse(query);
assertTrue(brokerResponseNative.getExceptions() == null || brokerResponseNative.getExceptions().size() == 0);
resultTable = brokerResponseNative.getResultTable();
assertEquals(brokerResponseNative.getNumRowsResultSet(), 10);
assertEquals(brokerResponseNative.getTotalDocs(), 400_000L);
assertEquals(brokerResponseNative.getNumDocsScanned(), 1184L);
assertEquals(brokerResponseNative.getNumSegmentsProcessed(), 4L);
assertEquals(brokerResponseNative.getNumSegmentsMatched(), 4L);
assertEquals(brokerResponseNative.getNumEntriesScannedPostFilter(), 1384L);
assertEquals(brokerResponseNative.getNumEntriesScannedInFilter(), 250896L);
assertNotNull(brokerResponseNative.getExceptions());
dataSchema = new DataSchema(new String[]{
"column1", "column2", "column3", "column6", "column7", "column10"
}, new DataSchema.ColumnDataType[]{
DataSchema.ColumnDataType.INT, DataSchema.ColumnDataType.INT, DataSchema.ColumnDataType.STRING,
DataSchema.ColumnDataType.INT_ARRAY, DataSchema.ColumnDataType.INT_ARRAY, DataSchema.ColumnDataType.INT
});
assertEquals(resultTable.getDataSchema(), dataSchema);
List<Object[]> resultRows2 = resultTable.getRows();
validateBeforeAfterQueryResults(resultRows1, resultRows2);
}
@Test
public void testSelectWithDistinctQueries()
throws Exception {
String query =
"SELECT DISTINCT column1, column2, column3, column6, column7, column9, column10 FROM testTable ORDER BY "
+ "column1 LIMIT 10";
BrokerResponseNative brokerResponseNative = getBrokerResponse(query);
assertTrue(brokerResponseNative.getExceptions() == null || brokerResponseNative.getExceptions().size() == 0);
ResultTable resultTable = brokerResponseNative.getResultTable();
assertEquals(brokerResponseNative.getNumRowsResultSet(), 10);
assertEquals(brokerResponseNative.getTotalDocs(), 400_000L);
assertEquals(brokerResponseNative.getNumDocsScanned(), 400_000L);
assertEquals(brokerResponseNative.getNumSegmentsProcessed(), 4L);
assertEquals(brokerResponseNative.getNumSegmentsMatched(), 4L);
assertEquals(brokerResponseNative.getNumEntriesScannedPostFilter(), 2800000L);
assertEquals(brokerResponseNative.getNumEntriesScannedInFilter(), 0L);
assertNotNull(brokerResponseNative.getExceptions());
assertEquals(brokerResponseNative.getExceptions().size(), 0);
DataSchema dataSchema = new DataSchema(new String[]{
"column1", "column2", "column3", "column6", "column7", "column9", "column10"
}, new DataSchema.ColumnDataType[]{
DataSchema.ColumnDataType.INT, DataSchema.ColumnDataType.INT, DataSchema.ColumnDataType.STRING,
DataSchema.ColumnDataType.INT, DataSchema.ColumnDataType.INT, DataSchema.ColumnDataType.INT,
DataSchema.ColumnDataType.INT
});
assertEquals(resultTable.getDataSchema(), dataSchema);
List<Object[]> resultRows1 = resultTable.getRows();
changePropertiesAndReloadSegment();
brokerResponseNative = getBrokerResponse(query);
assertTrue(brokerResponseNative.getExceptions() == null || brokerResponseNative.getExceptions().size() == 0);
resultTable = brokerResponseNative.getResultTable();
assertEquals(brokerResponseNative.getNumRowsResultSet(), 10);
assertEquals(brokerResponseNative.getTotalDocs(), 400_000L);
assertEquals(brokerResponseNative.getNumDocsScanned(), 400_000L);
assertEquals(brokerResponseNative.getNumSegmentsProcessed(), 4L);
assertEquals(brokerResponseNative.getNumSegmentsMatched(), 4L);
assertEquals(brokerResponseNative.getNumEntriesScannedPostFilter(), 2800000L);
assertEquals(brokerResponseNative.getNumEntriesScannedInFilter(), 0L);
assertNotNull(brokerResponseNative.getExceptions());
assertEquals(brokerResponseNative.getExceptions().size(), 0);
dataSchema = new DataSchema(new String[]{
"column1", "column2", "column3", "column6", "column7", "column9", "column10"
}, new DataSchema.ColumnDataType[]{
DataSchema.ColumnDataType.INT, DataSchema.ColumnDataType.INT, DataSchema.ColumnDataType.STRING,
DataSchema.ColumnDataType.INT, DataSchema.ColumnDataType.INT, DataSchema.ColumnDataType.INT,
DataSchema.ColumnDataType.INT
});
assertEquals(resultTable.getDataSchema(), dataSchema);
List<Object[]> resultRows2 = resultTable.getRows();
validateBeforeAfterQueryResults(resultRows1, resultRows2);
}
@Test
public void testSelectWithGroupByOrderByQueries()
throws Exception {
String query =
"SELECT column1, column7, column9 FROM testTable GROUP BY column1, column7, column9 ORDER BY column1, "
+ "column7, column9 LIMIT 10";
BrokerResponseNative brokerResponseNative = getBrokerResponse(query);
ResultTable resultTable = brokerResponseNative.getResultTable();
assertEquals(brokerResponseNative.getNumRowsResultSet(), 10);
assertEquals(brokerResponseNative.getTotalDocs(), 400_000L);
assertEquals(brokerResponseNative.getNumDocsScanned(), 400000L);
assertEquals(brokerResponseNative.getNumSegmentsProcessed(), 4L);
assertEquals(brokerResponseNative.getNumSegmentsMatched(), 4L);
assertEquals(brokerResponseNative.getNumEntriesScannedPostFilter(), 1200000L);
assertEquals(brokerResponseNative.getNumEntriesScannedInFilter(), 0L);
assertNotNull(brokerResponseNative.getExceptions());
assertEquals(brokerResponseNative.getExceptions().size(), 0);
assertEquals(resultTable.getDataSchema(),
new DataSchema(new String[]{"column1", "column7", "column9"}, new DataSchema.ColumnDataType[]{
DataSchema.ColumnDataType.INT, DataSchema.ColumnDataType.INT, DataSchema.ColumnDataType.INT
}));
List<Object[]> resultRows1 = resultTable.getRows();
int previousVal = -1;
for (Object[] resultRow : resultRows1) {
assertEquals(resultRow.length, 3);
assertTrue((int) resultRow[0] >= previousVal);
previousVal = (int) resultRow[0];
}
changePropertiesAndReloadSegment();
brokerResponseNative = getBrokerResponse(query);
resultTable = brokerResponseNative.getResultTable();
assertEquals(brokerResponseNative.getNumRowsResultSet(), 10);
assertEquals(brokerResponseNative.getTotalDocs(), 400_000L);
assertEquals(brokerResponseNative.getNumDocsScanned(), 400000L);
assertEquals(brokerResponseNative.getNumSegmentsProcessed(), 4L);
assertEquals(brokerResponseNative.getNumSegmentsMatched(), 4L);
assertEquals(brokerResponseNative.getNumEntriesScannedPostFilter(), 1200000L);
assertEquals(brokerResponseNative.getNumEntriesScannedInFilter(), 0L);
assertNotNull(brokerResponseNative.getExceptions());
assertEquals(brokerResponseNative.getExceptions().size(), 0);
assertEquals(resultTable.getDataSchema(),
new DataSchema(new String[]{"column1", "column7", "column9"}, new DataSchema.ColumnDataType[]{
DataSchema.ColumnDataType.INT, DataSchema.ColumnDataType.INT, DataSchema.ColumnDataType.INT
}));
List<Object[]> resultRows2 = resultTable.getRows();
previousVal = -1;
for (Object[] resultRow : resultRows2) {
assertEquals(resultRow.length, 3);
assertTrue((int) resultRow[0] >= previousVal);
previousVal = (int) resultRow[0];
}
validateBeforeAfterQueryResults(resultRows1, resultRows2);
}
@Test
public void testAllSelectAggregations()
throws Exception {
String query =
"SELECT MAX(column1), MIN(column1), MAX(column2), MIN(column2), MAXMV(column6), MINMV(column6), MAXMV"
+ "(column7), MINMV(column7), MAX(column9), MIN(column9), MAX(column10), MIN(column10) FROM testTable";
BrokerResponseNative brokerResponseNative = getBrokerResponse(query);
assertTrue(brokerResponseNative.getExceptions() == null || brokerResponseNative.getExceptions().size() == 0);
ResultTable resultTable = brokerResponseNative.getResultTable();
assertEquals(brokerResponseNative.getNumRowsResultSet(), 1);
assertEquals(brokerResponseNative.getTotalDocs(), 400_000L);
assertEquals(brokerResponseNative.getNumDocsScanned(), 400_000L);
assertEquals(brokerResponseNative.getNumSegmentsProcessed(), 4L);
assertEquals(brokerResponseNative.getNumSegmentsMatched(), 4L);
assertEquals(brokerResponseNative.getNumEntriesScannedPostFilter(), 0);
assertEquals(brokerResponseNative.getNumEntriesScannedInFilter(), 0L);
assertNotNull(brokerResponseNative.getExceptions());
assertEquals(brokerResponseNative.getExceptions().size(), 0);
assertEquals(resultTable.getDataSchema(), new DataSchema(new String[]{
"max(column1)", "min(column1)",
"max" + "(column2)", "min(column2)", "maxmv(column6)", "minmv(column6)",
"maxmv" + "(column7)", "minmv(column7)", "max(column9)", "min(column9)", "max(column10)", "min(column10)"
}, new DataSchema.ColumnDataType[]{
DataSchema.ColumnDataType.DOUBLE, DataSchema.ColumnDataType.DOUBLE, DataSchema.ColumnDataType.DOUBLE,
DataSchema.ColumnDataType.DOUBLE, DataSchema.ColumnDataType.DOUBLE, DataSchema.ColumnDataType.DOUBLE,
DataSchema.ColumnDataType.DOUBLE, DataSchema.ColumnDataType.DOUBLE, DataSchema.ColumnDataType.DOUBLE,
DataSchema.ColumnDataType.DOUBLE, DataSchema.ColumnDataType.DOUBLE, DataSchema.ColumnDataType.DOUBLE
}));
List<Object[]> beforeResultRows = resultTable.getRows();
changePropertiesAndReloadSegment();
brokerResponseNative = getBrokerResponse(query);
assertTrue(brokerResponseNative.getExceptions() == null || brokerResponseNative.getExceptions().size() == 0);
resultTable = brokerResponseNative.getResultTable();
assertEquals(brokerResponseNative.getNumRowsResultSet(), 1);
assertEquals(brokerResponseNative.getTotalDocs(), 400_000L);
assertEquals(brokerResponseNative.getNumDocsScanned(), 400_000L);
assertEquals(brokerResponseNative.getNumSegmentsProcessed(), 4L);
assertEquals(brokerResponseNative.getNumSegmentsMatched(), 4L);
assertEquals(brokerResponseNative.getNumEntriesScannedPostFilter(), 0);
assertEquals(brokerResponseNative.getNumEntriesScannedInFilter(), 0L);
assertNotNull(brokerResponseNative.getExceptions());
assertEquals(brokerResponseNative.getExceptions().size(), 0);
assertEquals(resultTable.getDataSchema(), new DataSchema(new String[]{
"max(column1)", "min(column1)",
"max" + "(column2)", "min(column2)", "maxmv(column6)", "minmv(column6)",
"maxmv" + "(column7)", "minmv(column7)", "max(column9)", "min(column9)", "max(column10)", "min(column10)"
}, new DataSchema.ColumnDataType[]{
DataSchema.ColumnDataType.DOUBLE, DataSchema.ColumnDataType.DOUBLE, DataSchema.ColumnDataType.DOUBLE,
DataSchema.ColumnDataType.DOUBLE, DataSchema.ColumnDataType.DOUBLE, DataSchema.ColumnDataType.DOUBLE,
DataSchema.ColumnDataType.DOUBLE, DataSchema.ColumnDataType.DOUBLE, DataSchema.ColumnDataType.DOUBLE,
DataSchema.ColumnDataType.DOUBLE, DataSchema.ColumnDataType.DOUBLE, DataSchema.ColumnDataType.DOUBLE
}));
List<Object[]> afterResultRows = resultTable.getRows();
validateBeforeAfterQueryResults(beforeResultRows, afterResultRows);
}
@Test
public void testMaxArrayLengthAggregation()
throws Exception {
// TEST1 - Before Reload: Test for column7.
String query1 = "SELECT MAX(ARRAYLENGTH(column7)) from testTable LIMIT 10";
BrokerResponseNative brokerResponseNative = getBrokerResponse(query1);
assertTrue(brokerResponseNative.getExceptions() == null || brokerResponseNative.getExceptions().size() == 0);
ResultTable resultTable = brokerResponseNative.getResultTable();
assertEquals(brokerResponseNative.getNumRowsResultSet(), 1);
assertEquals(brokerResponseNative.getTotalDocs(), 400_000L);
assertEquals(brokerResponseNative.getNumDocsScanned(), 400_000L);
assertEquals(brokerResponseNative.getNumSegmentsProcessed(), 4L);
assertEquals(brokerResponseNative.getNumSegmentsMatched(), 4L);
assertEquals(brokerResponseNative.getNumEntriesScannedPostFilter(), 400000);
assertEquals(brokerResponseNative.getNumEntriesScannedInFilter(), 0L);
assertNotNull(brokerResponseNative.getExceptions());
assertEquals(brokerResponseNative.getExceptions().size(), 0);
assertEquals(resultTable.getDataSchema(), new DataSchema(new String[]{"max(arraylength(column7))"},
new DataSchema.ColumnDataType[]{DataSchema.ColumnDataType.DOUBLE}));
List<Object[]> beforeResultRows1 = resultTable.getRows();
// TEST2 - Before Reload: Test for column6.
String query2 = "SELECT MAX(ARRAYLENGTH(column6)) from testTable LIMIT 10";
brokerResponseNative = getBrokerResponse(query2);
assertTrue(brokerResponseNative.getExceptions() == null || brokerResponseNative.getExceptions().size() == 0);
resultTable = brokerResponseNative.getResultTable();
assertEquals(brokerResponseNative.getNumRowsResultSet(), 1);
assertEquals(brokerResponseNative.getTotalDocs(), 400_000L);
assertEquals(brokerResponseNative.getNumDocsScanned(), 400_000L);
assertEquals(brokerResponseNative.getNumSegmentsProcessed(), 4L);
assertEquals(brokerResponseNative.getNumSegmentsMatched(), 4L);
assertEquals(brokerResponseNative.getNumEntriesScannedPostFilter(), 400000);
assertEquals(brokerResponseNative.getNumEntriesScannedInFilter(), 0L);
assertNotNull(brokerResponseNative.getExceptions());
assertEquals(brokerResponseNative.getExceptions().size(), 0);
assertEquals(resultTable.getDataSchema(), new DataSchema(new String[]{"max(arraylength(column6))"},
new DataSchema.ColumnDataType[]{DataSchema.ColumnDataType.DOUBLE}));
List<Object[]> beforeResultRows2 = resultTable.getRows();
changePropertiesAndReloadSegment();
// TEST1 - After Reload: Test for column7.
brokerResponseNative = getBrokerResponse(query1);
assertTrue(brokerResponseNative.getExceptions() == null || brokerResponseNative.getExceptions().size() == 0);
resultTable = brokerResponseNative.getResultTable();
assertEquals(brokerResponseNative.getNumRowsResultSet(), 1);
assertEquals(brokerResponseNative.getTotalDocs(), 400_000L);
assertEquals(brokerResponseNative.getNumDocsScanned(), 400_000L);
assertEquals(brokerResponseNative.getNumSegmentsProcessed(), 4L);
assertEquals(brokerResponseNative.getNumSegmentsMatched(), 4L);
assertEquals(brokerResponseNative.getNumEntriesScannedPostFilter(), 400000);
assertEquals(brokerResponseNative.getNumEntriesScannedInFilter(), 0L);
assertNotNull(brokerResponseNative.getExceptions());
assertEquals(brokerResponseNative.getExceptions().size(), 0);
assertEquals(resultTable.getDataSchema(), new DataSchema(new String[]{"max(arraylength(column7))"},
new DataSchema.ColumnDataType[]{DataSchema.ColumnDataType.DOUBLE}));
List<Object[]> afterResultRows1 = resultTable.getRows();
validateBeforeAfterQueryResults(beforeResultRows1, afterResultRows1);
// TEST2 - After Reload: Test for column6.
brokerResponseNative = getBrokerResponse(query2);
assertTrue(brokerResponseNative.getExceptions() == null || brokerResponseNative.getExceptions().size() == 0);
resultTable = brokerResponseNative.getResultTable();
assertEquals(brokerResponseNative.getNumRowsResultSet(), 1);
assertEquals(brokerResponseNative.getTotalDocs(), 400_000L);
assertEquals(brokerResponseNative.getNumDocsScanned(), 400_000L);
assertEquals(brokerResponseNative.getNumSegmentsProcessed(), 4L);
assertEquals(brokerResponseNative.getNumSegmentsMatched(), 4L);
assertEquals(brokerResponseNative.getNumEntriesScannedPostFilter(), 400000);
assertEquals(brokerResponseNative.getNumEntriesScannedInFilter(), 0L);
assertNotNull(brokerResponseNative.getExceptions());
assertEquals(brokerResponseNative.getExceptions().size(), 0);
assertEquals(resultTable.getDataSchema(), new DataSchema(new String[]{"max(arraylength(column6))"},
new DataSchema.ColumnDataType[]{DataSchema.ColumnDataType.DOUBLE}));
List<Object[]> afterResultRows2 = resultTable.getRows();
validateBeforeAfterQueryResults(beforeResultRows2, afterResultRows2);
}
@Test
public void testSelectWithAggregationQueries()
throws Exception {
// TEST1 - Before Reload: Test where column7 is in filter.
String query1 = "SET \"timeoutMs\" = 30000; SELECT column1, max(column1), sum(column10) from testTable WHERE "
+ "column7 = 2147483647 GROUP BY column1 ORDER BY column1";
BrokerResponseNative brokerResponseNative = getBrokerResponse(query1);
assertTrue(brokerResponseNative.getExceptions() == null || brokerResponseNative.getExceptions().size() == 0);
ResultTable resultTable = brokerResponseNative.getResultTable();
assertEquals(brokerResponseNative.getNumRowsResultSet(), 10);
assertEquals(brokerResponseNative.getTotalDocs(), 400_000L);
assertEquals(brokerResponseNative.getNumDocsScanned(), 199_756L);
assertEquals(brokerResponseNative.getNumSegmentsProcessed(), 4L);
assertEquals(brokerResponseNative.getNumSegmentsMatched(), 4L);
assertEquals(brokerResponseNative.getNumEntriesScannedPostFilter(), 399_512L);
assertEquals(brokerResponseNative.getNumEntriesScannedInFilter(), 536360L);
assertNotNull(brokerResponseNative.getExceptions());
assertEquals(brokerResponseNative.getExceptions().size(), 0);
assertEquals(resultTable.getDataSchema(),
new DataSchema(new String[]{"column1", "max(column1)", "sum(column10)"}, new DataSchema.ColumnDataType[]{
DataSchema.ColumnDataType.INT, DataSchema.ColumnDataType.DOUBLE, DataSchema.ColumnDataType.DOUBLE
}));
List<Object[]> beforeResultRows1 = resultTable.getRows();
// TEST2 - Before Reload: Test where column6 is in filter.
String query2 = "SELECT column1, max(column1), sum(column10) from testTable WHERE column6 = 1001 GROUP BY "
+ "column1 ORDER BY column1";
brokerResponseNative = getBrokerResponse(query2);
assertTrue(brokerResponseNative.getExceptions() == null || brokerResponseNative.getExceptions().size() == 0);
resultTable = brokerResponseNative.getResultTable();
assertEquals(brokerResponseNative.getNumRowsResultSet(), 1);
assertEquals(brokerResponseNative.getTotalDocs(), 400_000L);
assertEquals(brokerResponseNative.getNumDocsScanned(), 8);
assertEquals(brokerResponseNative.getNumSegmentsProcessed(), 4L);
assertEquals(brokerResponseNative.getNumSegmentsMatched(), 4L);
assertEquals(brokerResponseNative.getNumEntriesScannedPostFilter(), 16L);
assertEquals(brokerResponseNative.getNumEntriesScannedInFilter(), 426752L);
assertNotNull(brokerResponseNative.getExceptions());
assertEquals(brokerResponseNative.getExceptions().size(), 0);
assertEquals(resultTable.getDataSchema(),
new DataSchema(new String[]{"column1", "max(column1)", "sum(column10)"}, new DataSchema.ColumnDataType[]{
DataSchema.ColumnDataType.INT, DataSchema.ColumnDataType.DOUBLE, DataSchema.ColumnDataType.DOUBLE
}));
List<Object[]> beforeResultRows2 = resultTable.getRows();
changePropertiesAndReloadSegment();
// TEST1 - After reload. Test where column7 is in filter.
brokerResponseNative = getBrokerResponse(query1);
assertTrue(brokerResponseNative.getExceptions() == null || brokerResponseNative.getExceptions().size() == 0);
resultTable = brokerResponseNative.getResultTable();
assertEquals(brokerResponseNative.getNumRowsResultSet(), 10);
assertEquals(brokerResponseNative.getTotalDocs(), 400_000L);
assertEquals(brokerResponseNative.getNumDocsScanned(), 199_756L);
assertEquals(brokerResponseNative.getNumSegmentsProcessed(), 4L);
assertEquals(brokerResponseNative.getNumSegmentsMatched(), 4L);
assertEquals(brokerResponseNative.getNumEntriesScannedPostFilter(), 399_512L);
assertEquals(brokerResponseNative.getNumEntriesScannedInFilter(), 0L);
assertNotNull(brokerResponseNative.getExceptions());
assertEquals(brokerResponseNative.getExceptions().size(), 0);
assertEquals(resultTable.getDataSchema(),
new DataSchema(new String[]{"column1", "max(column1)", "sum(column10)"}, new DataSchema.ColumnDataType[]{
DataSchema.ColumnDataType.INT, DataSchema.ColumnDataType.DOUBLE, DataSchema.ColumnDataType.DOUBLE
}));
List<Object[]> afterResultRows1 = resultTable.getRows();
validateBeforeAfterQueryResults(beforeResultRows1, afterResultRows1);
// TEST2 - After Reload: Test where column6 is in filter.
brokerResponseNative = getBrokerResponse(query2);
assertTrue(brokerResponseNative.getExceptions() == null || brokerResponseNative.getExceptions().size() == 0);
resultTable = brokerResponseNative.getResultTable();
assertEquals(brokerResponseNative.getNumRowsResultSet(), 1);
assertEquals(brokerResponseNative.getTotalDocs(), 400_000L);
assertEquals(brokerResponseNative.getNumDocsScanned(), 8);
assertEquals(brokerResponseNative.getNumSegmentsProcessed(), 4L);
assertEquals(brokerResponseNative.getNumSegmentsMatched(), 4L);
assertEquals(brokerResponseNative.getNumEntriesScannedPostFilter(), 16L);
assertEquals(brokerResponseNative.getNumEntriesScannedInFilter(), 426752);
assertNotNull(brokerResponseNative.getExceptions());
assertEquals(brokerResponseNative.getExceptions().size(), 0);
assertEquals(resultTable.getDataSchema(),
new DataSchema(new String[]{"column1", "max(column1)", "sum(column10)"}, new DataSchema.ColumnDataType[]{
DataSchema.ColumnDataType.INT, DataSchema.ColumnDataType.DOUBLE, DataSchema.ColumnDataType.DOUBLE
}));
List<Object[]> afterResultRows2 = resultTable.getRows();
validateBeforeAfterQueryResults(beforeResultRows2, afterResultRows2);
}
@Test
public void testRangeIndexAfterReload()
throws Exception {
String query = "select count(*) from testTable where column10 > 674022574 and column9 < 674022574";
BrokerResponseNative brokerResponseNative = getBrokerResponse(query);
assertTrue(brokerResponseNative.getExceptions() == null || brokerResponseNative.getExceptions().size() == 0);
ResultTable resultTable1 = brokerResponseNative.getResultTable();
assertEquals(brokerResponseNative.getNumRowsResultSet(), 1);
assertEquals(brokerResponseNative.getTotalDocs(), 400_000L);
assertEquals(brokerResponseNative.getNumDocsScanned(), 40224L);
assertEquals(brokerResponseNative.getNumSegmentsProcessed(), 4L);
assertEquals(brokerResponseNative.getNumSegmentsMatched(), 4L);
assertEquals(brokerResponseNative.getNumEntriesScannedPostFilter(), 0L);
assertEquals(brokerResponseNative.getNumEntriesScannedInFilter(), 479412L);
assertNotNull(brokerResponseNative.getExceptions());
DataSchema dataSchema = new DataSchema(new String[]{
"count(*)"
}, new DataSchema.ColumnDataType[]{
DataSchema.ColumnDataType.LONG
});
assertEquals(resultTable1.getDataSchema(), dataSchema);
List<Object[]> resultRows1 = resultTable1.getRows();
changePropertiesAndReloadSegment();
brokerResponseNative = getBrokerResponse(query);
assertTrue(brokerResponseNative.getExceptions() == null || brokerResponseNative.getExceptions().size() == 0);
resultTable1 = brokerResponseNative.getResultTable();
assertEquals(brokerResponseNative.getNumRowsResultSet(), 1);
assertEquals(brokerResponseNative.getTotalDocs(), 400_000L);
assertEquals(brokerResponseNative.getNumDocsScanned(), 40224L);
assertEquals(brokerResponseNative.getNumSegmentsProcessed(), 4L);
assertEquals(brokerResponseNative.getNumSegmentsMatched(), 4L);
assertEquals(brokerResponseNative.getNumEntriesScannedPostFilter(), 0L);
assertEquals(brokerResponseNative.getNumEntriesScannedInFilter(), 0L);
assertNotNull(brokerResponseNative.getExceptions());
dataSchema = new DataSchema(new String[]{
"count(*)"
}, new DataSchema.ColumnDataType[]{
DataSchema.ColumnDataType.LONG
});
assertEquals(resultTable1.getDataSchema(), dataSchema);
List<Object[]> resultRows2 = resultTable1.getRows();
validateBeforeAfterQueryResults(resultRows1, resultRows2);
}
/**
* As a part of segmentReload, the ForwardIndexHandler will perform the following operations:
*
* column1 -> change compression.
* column6 -> disable dictionary
* column9 -> disable dictionary
* column3 -> Enable dictionary.
* column2 -> Enable dictionary. Add inverted index.
* column7 -> Enable dictionary. Add inverted index.
* column10 -> Enable dictionary.
*/
private void changePropertiesAndReloadSegment()
throws Exception {
List<String> noDictionaryColumns = List.of("column1", "column5", "column6", "column9");
List<String> invertedIndexColumns = List.of("column2", "column7", "column8");
List<String> rangeIndexColumns = List.of("column9", "column10");
List<FieldConfig> fieldConfigs = new ArrayList<>(noDictionaryColumns.size());
for (String column : noDictionaryColumns) {
FieldConfig.CompressionCodec compressionCodec = FieldConfig.CompressionCodec.SNAPPY;
if (column.equals("column1")) {
compressionCodec = FieldConfig.CompressionCodec.ZSTANDARD;
}
fieldConfigs.add(new FieldConfig(column, FieldConfig.EncodingType.RAW, List.of(), compressionCodec, null));
}
TableConfig tableConfig =
createTableConfig(noDictionaryColumns, invertedIndexColumns, rangeIndexColumns, fieldConfigs);
IndexLoadingConfig indexLoadingConfig = new IndexLoadingConfig(tableConfig, SCHEMA);
// Reload the segments to pick up the new configs
File indexDir = new File(INDEX_DIR, SEGMENT_NAME);
ImmutableSegment segment = ImmutableSegmentLoader.load(indexDir, indexLoadingConfig);
_indexSegment.destroy();
_indexSegment = segment;
_indexSegments = List.of(segment, segment);
Map<String, ColumnMetadata> columnMetadataMap = segment.getSegmentMetadata().getColumnMetadataMap();
for (Map.Entry<String, ColumnMetadata> entry : columnMetadataMap.entrySet()) {
String column = entry.getKey();
ColumnMetadata metadata = entry.getValue();
assertNotNull(segment.getForwardIndex(column));
if (noDictionaryColumns.contains(column)) {
assertFalse(metadata.hasDictionary());
assertNull(segment.getDictionary(column));
} else {
assertTrue(metadata.hasDictionary());
assertNotNull(segment.getDictionary(column));
}
if (invertedIndexColumns.contains(column)) {
assertNotNull(segment.getInvertedIndex(column));
}
}
}
}
|
googleapis/google-cloud-java | 36,194 | java-api-gateway/proto-google-cloud-api-gateway-v1/src/main/java/com/google/cloud/apigateway/v1/CreateApiConfigRequest.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/apigateway/v1/apigateway.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.apigateway.v1;
/**
*
*
* <pre>
* Request message for ApiGatewayService.CreateApiConfig
* </pre>
*
* Protobuf type {@code google.cloud.apigateway.v1.CreateApiConfigRequest}
*/
public final class CreateApiConfigRequest extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.apigateway.v1.CreateApiConfigRequest)
CreateApiConfigRequestOrBuilder {
private static final long serialVersionUID = 0L;
// Use CreateApiConfigRequest.newBuilder() to construct.
private CreateApiConfigRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private CreateApiConfigRequest() {
parent_ = "";
apiConfigId_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new CreateApiConfigRequest();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.apigateway.v1.Apigateway
.internal_static_google_cloud_apigateway_v1_CreateApiConfigRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.apigateway.v1.Apigateway
.internal_static_google_cloud_apigateway_v1_CreateApiConfigRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.apigateway.v1.CreateApiConfigRequest.class,
com.google.cloud.apigateway.v1.CreateApiConfigRequest.Builder.class);
}
private int bitField0_;
public static final int PARENT_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private volatile java.lang.Object parent_ = "";
/**
*
*
* <pre>
* Required. Parent resource of the API Config, of the form:
* `projects/*/locations/global/apis/*`
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The parent.
*/
@java.lang.Override
public java.lang.String getParent() {
java.lang.Object ref = parent_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
parent_ = s;
return s;
}
}
/**
*
*
* <pre>
* Required. Parent resource of the API Config, of the form:
* `projects/*/locations/global/apis/*`
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for parent.
*/
@java.lang.Override
public com.google.protobuf.ByteString getParentBytes() {
java.lang.Object ref = parent_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
parent_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int API_CONFIG_ID_FIELD_NUMBER = 2;
@SuppressWarnings("serial")
private volatile java.lang.Object apiConfigId_ = "";
/**
*
*
* <pre>
* Required. Identifier to assign to the API Config. Must be unique within scope of
* the parent resource.
* </pre>
*
* <code>string api_config_id = 2 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The apiConfigId.
*/
@java.lang.Override
public java.lang.String getApiConfigId() {
java.lang.Object ref = apiConfigId_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
apiConfigId_ = s;
return s;
}
}
/**
*
*
* <pre>
* Required. Identifier to assign to the API Config. Must be unique within scope of
* the parent resource.
* </pre>
*
* <code>string api_config_id = 2 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The bytes for apiConfigId.
*/
@java.lang.Override
public com.google.protobuf.ByteString getApiConfigIdBytes() {
java.lang.Object ref = apiConfigId_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
apiConfigId_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int API_CONFIG_FIELD_NUMBER = 3;
private com.google.cloud.apigateway.v1.ApiConfig apiConfig_;
/**
*
*
* <pre>
* Required. API resource.
* </pre>
*
* <code>
* .google.cloud.apigateway.v1.ApiConfig api_config = 3 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the apiConfig field is set.
*/
@java.lang.Override
public boolean hasApiConfig() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* Required. API resource.
* </pre>
*
* <code>
* .google.cloud.apigateway.v1.ApiConfig api_config = 3 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The apiConfig.
*/
@java.lang.Override
public com.google.cloud.apigateway.v1.ApiConfig getApiConfig() {
return apiConfig_ == null
? com.google.cloud.apigateway.v1.ApiConfig.getDefaultInstance()
: apiConfig_;
}
/**
*
*
* <pre>
* Required. API resource.
* </pre>
*
* <code>
* .google.cloud.apigateway.v1.ApiConfig api_config = 3 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
@java.lang.Override
public com.google.cloud.apigateway.v1.ApiConfigOrBuilder getApiConfigOrBuilder() {
return apiConfig_ == null
? com.google.cloud.apigateway.v1.ApiConfig.getDefaultInstance()
: apiConfig_;
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 1, parent_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(apiConfigId_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 2, apiConfigId_);
}
if (((bitField0_ & 0x00000001) != 0)) {
output.writeMessage(3, getApiConfig());
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, parent_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(apiConfigId_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, apiConfigId_);
}
if (((bitField0_ & 0x00000001) != 0)) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(3, getApiConfig());
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.apigateway.v1.CreateApiConfigRequest)) {
return super.equals(obj);
}
com.google.cloud.apigateway.v1.CreateApiConfigRequest other =
(com.google.cloud.apigateway.v1.CreateApiConfigRequest) obj;
if (!getParent().equals(other.getParent())) return false;
if (!getApiConfigId().equals(other.getApiConfigId())) return false;
if (hasApiConfig() != other.hasApiConfig()) return false;
if (hasApiConfig()) {
if (!getApiConfig().equals(other.getApiConfig())) return false;
}
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + PARENT_FIELD_NUMBER;
hash = (53 * hash) + getParent().hashCode();
hash = (37 * hash) + API_CONFIG_ID_FIELD_NUMBER;
hash = (53 * hash) + getApiConfigId().hashCode();
if (hasApiConfig()) {
hash = (37 * hash) + API_CONFIG_FIELD_NUMBER;
hash = (53 * hash) + getApiConfig().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.apigateway.v1.CreateApiConfigRequest parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.apigateway.v1.CreateApiConfigRequest parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.apigateway.v1.CreateApiConfigRequest parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.apigateway.v1.CreateApiConfigRequest parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.apigateway.v1.CreateApiConfigRequest parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.apigateway.v1.CreateApiConfigRequest parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.apigateway.v1.CreateApiConfigRequest parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.apigateway.v1.CreateApiConfigRequest parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.apigateway.v1.CreateApiConfigRequest parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.apigateway.v1.CreateApiConfigRequest parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.apigateway.v1.CreateApiConfigRequest parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.apigateway.v1.CreateApiConfigRequest parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.apigateway.v1.CreateApiConfigRequest prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Request message for ApiGatewayService.CreateApiConfig
* </pre>
*
* Protobuf type {@code google.cloud.apigateway.v1.CreateApiConfigRequest}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.apigateway.v1.CreateApiConfigRequest)
com.google.cloud.apigateway.v1.CreateApiConfigRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.apigateway.v1.Apigateway
.internal_static_google_cloud_apigateway_v1_CreateApiConfigRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.apigateway.v1.Apigateway
.internal_static_google_cloud_apigateway_v1_CreateApiConfigRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.apigateway.v1.CreateApiConfigRequest.class,
com.google.cloud.apigateway.v1.CreateApiConfigRequest.Builder.class);
}
// Construct using com.google.cloud.apigateway.v1.CreateApiConfigRequest.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {
getApiConfigFieldBuilder();
}
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
parent_ = "";
apiConfigId_ = "";
apiConfig_ = null;
if (apiConfigBuilder_ != null) {
apiConfigBuilder_.dispose();
apiConfigBuilder_ = null;
}
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.apigateway.v1.Apigateway
.internal_static_google_cloud_apigateway_v1_CreateApiConfigRequest_descriptor;
}
@java.lang.Override
public com.google.cloud.apigateway.v1.CreateApiConfigRequest getDefaultInstanceForType() {
return com.google.cloud.apigateway.v1.CreateApiConfigRequest.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.apigateway.v1.CreateApiConfigRequest build() {
com.google.cloud.apigateway.v1.CreateApiConfigRequest result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.apigateway.v1.CreateApiConfigRequest buildPartial() {
com.google.cloud.apigateway.v1.CreateApiConfigRequest result =
new com.google.cloud.apigateway.v1.CreateApiConfigRequest(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(com.google.cloud.apigateway.v1.CreateApiConfigRequest result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.parent_ = parent_;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.apiConfigId_ = apiConfigId_;
}
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000004) != 0)) {
result.apiConfig_ = apiConfigBuilder_ == null ? apiConfig_ : apiConfigBuilder_.build();
to_bitField0_ |= 0x00000001;
}
result.bitField0_ |= to_bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.apigateway.v1.CreateApiConfigRequest) {
return mergeFrom((com.google.cloud.apigateway.v1.CreateApiConfigRequest) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.apigateway.v1.CreateApiConfigRequest other) {
if (other == com.google.cloud.apigateway.v1.CreateApiConfigRequest.getDefaultInstance())
return this;
if (!other.getParent().isEmpty()) {
parent_ = other.parent_;
bitField0_ |= 0x00000001;
onChanged();
}
if (!other.getApiConfigId().isEmpty()) {
apiConfigId_ = other.apiConfigId_;
bitField0_ |= 0x00000002;
onChanged();
}
if (other.hasApiConfig()) {
mergeApiConfig(other.getApiConfig());
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
parent_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000001;
break;
} // case 10
case 18:
{
apiConfigId_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000002;
break;
} // case 18
case 26:
{
input.readMessage(getApiConfigFieldBuilder().getBuilder(), extensionRegistry);
bitField0_ |= 0x00000004;
break;
} // case 26
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.lang.Object parent_ = "";
/**
*
*
* <pre>
* Required. Parent resource of the API Config, of the form:
* `projects/*/locations/global/apis/*`
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The parent.
*/
public java.lang.String getParent() {
java.lang.Object ref = parent_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
parent_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Required. Parent resource of the API Config, of the form:
* `projects/*/locations/global/apis/*`
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for parent.
*/
public com.google.protobuf.ByteString getParentBytes() {
java.lang.Object ref = parent_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
parent_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Required. Parent resource of the API Config, of the form:
* `projects/*/locations/global/apis/*`
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The parent to set.
* @return This builder for chaining.
*/
public Builder setParent(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
parent_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. Parent resource of the API Config, of the form:
* `projects/*/locations/global/apis/*`
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return This builder for chaining.
*/
public Builder clearParent() {
parent_ = getDefaultInstance().getParent();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. Parent resource of the API Config, of the form:
* `projects/*/locations/global/apis/*`
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The bytes for parent to set.
* @return This builder for chaining.
*/
public Builder setParentBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
parent_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
private java.lang.Object apiConfigId_ = "";
/**
*
*
* <pre>
* Required. Identifier to assign to the API Config. Must be unique within scope of
* the parent resource.
* </pre>
*
* <code>string api_config_id = 2 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The apiConfigId.
*/
public java.lang.String getApiConfigId() {
java.lang.Object ref = apiConfigId_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
apiConfigId_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Required. Identifier to assign to the API Config. Must be unique within scope of
* the parent resource.
* </pre>
*
* <code>string api_config_id = 2 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The bytes for apiConfigId.
*/
public com.google.protobuf.ByteString getApiConfigIdBytes() {
java.lang.Object ref = apiConfigId_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
apiConfigId_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Required. Identifier to assign to the API Config. Must be unique within scope of
* the parent resource.
* </pre>
*
* <code>string api_config_id = 2 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @param value The apiConfigId to set.
* @return This builder for chaining.
*/
public Builder setApiConfigId(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
apiConfigId_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. Identifier to assign to the API Config. Must be unique within scope of
* the parent resource.
* </pre>
*
* <code>string api_config_id = 2 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return This builder for chaining.
*/
public Builder clearApiConfigId() {
apiConfigId_ = getDefaultInstance().getApiConfigId();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. Identifier to assign to the API Config. Must be unique within scope of
* the parent resource.
* </pre>
*
* <code>string api_config_id = 2 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @param value The bytes for apiConfigId to set.
* @return This builder for chaining.
*/
public Builder setApiConfigIdBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
apiConfigId_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
private com.google.cloud.apigateway.v1.ApiConfig apiConfig_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.apigateway.v1.ApiConfig,
com.google.cloud.apigateway.v1.ApiConfig.Builder,
com.google.cloud.apigateway.v1.ApiConfigOrBuilder>
apiConfigBuilder_;
/**
*
*
* <pre>
* Required. API resource.
* </pre>
*
* <code>
* .google.cloud.apigateway.v1.ApiConfig api_config = 3 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the apiConfig field is set.
*/
public boolean hasApiConfig() {
return ((bitField0_ & 0x00000004) != 0);
}
/**
*
*
* <pre>
* Required. API resource.
* </pre>
*
* <code>
* .google.cloud.apigateway.v1.ApiConfig api_config = 3 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The apiConfig.
*/
public com.google.cloud.apigateway.v1.ApiConfig getApiConfig() {
if (apiConfigBuilder_ == null) {
return apiConfig_ == null
? com.google.cloud.apigateway.v1.ApiConfig.getDefaultInstance()
: apiConfig_;
} else {
return apiConfigBuilder_.getMessage();
}
}
/**
*
*
* <pre>
* Required. API resource.
* </pre>
*
* <code>
* .google.cloud.apigateway.v1.ApiConfig api_config = 3 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setApiConfig(com.google.cloud.apigateway.v1.ApiConfig value) {
if (apiConfigBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
apiConfig_ = value;
} else {
apiConfigBuilder_.setMessage(value);
}
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. API resource.
* </pre>
*
* <code>
* .google.cloud.apigateway.v1.ApiConfig api_config = 3 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setApiConfig(com.google.cloud.apigateway.v1.ApiConfig.Builder builderForValue) {
if (apiConfigBuilder_ == null) {
apiConfig_ = builderForValue.build();
} else {
apiConfigBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. API resource.
* </pre>
*
* <code>
* .google.cloud.apigateway.v1.ApiConfig api_config = 3 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder mergeApiConfig(com.google.cloud.apigateway.v1.ApiConfig value) {
if (apiConfigBuilder_ == null) {
if (((bitField0_ & 0x00000004) != 0)
&& apiConfig_ != null
&& apiConfig_ != com.google.cloud.apigateway.v1.ApiConfig.getDefaultInstance()) {
getApiConfigBuilder().mergeFrom(value);
} else {
apiConfig_ = value;
}
} else {
apiConfigBuilder_.mergeFrom(value);
}
if (apiConfig_ != null) {
bitField0_ |= 0x00000004;
onChanged();
}
return this;
}
/**
*
*
* <pre>
* Required. API resource.
* </pre>
*
* <code>
* .google.cloud.apigateway.v1.ApiConfig api_config = 3 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder clearApiConfig() {
bitField0_ = (bitField0_ & ~0x00000004);
apiConfig_ = null;
if (apiConfigBuilder_ != null) {
apiConfigBuilder_.dispose();
apiConfigBuilder_ = null;
}
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. API resource.
* </pre>
*
* <code>
* .google.cloud.apigateway.v1.ApiConfig api_config = 3 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.cloud.apigateway.v1.ApiConfig.Builder getApiConfigBuilder() {
bitField0_ |= 0x00000004;
onChanged();
return getApiConfigFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* Required. API resource.
* </pre>
*
* <code>
* .google.cloud.apigateway.v1.ApiConfig api_config = 3 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.cloud.apigateway.v1.ApiConfigOrBuilder getApiConfigOrBuilder() {
if (apiConfigBuilder_ != null) {
return apiConfigBuilder_.getMessageOrBuilder();
} else {
return apiConfig_ == null
? com.google.cloud.apigateway.v1.ApiConfig.getDefaultInstance()
: apiConfig_;
}
}
/**
*
*
* <pre>
* Required. API resource.
* </pre>
*
* <code>
* .google.cloud.apigateway.v1.ApiConfig api_config = 3 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.apigateway.v1.ApiConfig,
com.google.cloud.apigateway.v1.ApiConfig.Builder,
com.google.cloud.apigateway.v1.ApiConfigOrBuilder>
getApiConfigFieldBuilder() {
if (apiConfigBuilder_ == null) {
apiConfigBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.apigateway.v1.ApiConfig,
com.google.cloud.apigateway.v1.ApiConfig.Builder,
com.google.cloud.apigateway.v1.ApiConfigOrBuilder>(
getApiConfig(), getParentForChildren(), isClean());
apiConfig_ = null;
}
return apiConfigBuilder_;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.apigateway.v1.CreateApiConfigRequest)
}
// @@protoc_insertion_point(class_scope:google.cloud.apigateway.v1.CreateApiConfigRequest)
private static final com.google.cloud.apigateway.v1.CreateApiConfigRequest DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.apigateway.v1.CreateApiConfigRequest();
}
public static com.google.cloud.apigateway.v1.CreateApiConfigRequest getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<CreateApiConfigRequest> PARSER =
new com.google.protobuf.AbstractParser<CreateApiConfigRequest>() {
@java.lang.Override
public CreateApiConfigRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<CreateApiConfigRequest> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<CreateApiConfigRequest> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.apigateway.v1.CreateApiConfigRequest getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
oracle/graal | 36,521 | substratevm/src/com.oracle.svm.jdwp.resident/src/com/oracle/svm/jdwp/resident/ObjectIdMap.java | /*
* Copyright (c) 2023, 2025, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License version 2 only, as
* published by the Free Software Foundation. Oracle designates this
* particular file as subject to the "Classpath" exception as provided
* by Oracle in the LICENSE file that accompanied this code.
*
* This code is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* version 2 for more details (a copy is included in the LICENSE file that
* accompanied this code).
*
* You should have received a copy of the GNU General Public License version
* 2 along with this work; if not, write to the Free Software Foundation,
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
*
* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
* or visit www.oracle.com if you need additional information or have any
* questions.
*/
package com.oracle.svm.jdwp.resident;
import java.lang.ref.Reference;
import java.lang.ref.ReferenceQueue;
import java.lang.ref.WeakReference;
import jdk.internal.misc.Unsafe;
import jdk.graal.compiler.core.common.SuppressFBWarnings;
import org.graalvm.nativeimage.ImageSingletons;
/**
* Mapping of objects to unique IDs and back. Allows concurrent access and automatically disposes
* mapping of GCed values. IDs are linearly increasing long values.
* <p>
* This is a lock-free implementation. The {@link LockFreeHashMap} accesses the hash table via an
* 'accessFlag', which assures exclusive resize of the table. The resize is done by prepending a
* bigger hash table to the table chain. After resize, new objects are always put to the first and
* the biggest table.
* <p>
* Objects are stored in arrays of {@link HashNode} nodes. The arrays must not be muted to assure
* data consistency. When nodes need to be changed, a new array is created and is safely (CAS)
* replaced in the table index.
* <p>
* Every inserted object gets a unique ID assigned. The ID is generated only after the object's node
* is stored in a table to assure uniqueness of IDs. See
* {@link HashNode#finalizeId(ObjectIdMap.LockFreeHashMap, ObjectIdMap.LockFreeHashMap.HashingTable)}.
*/
public final class ObjectIdMap {
private static final int INITIAL_SIZE_BITS = 10; // The first table has size 2^INITIAL_SIZE_BITS
// An attemt to resize when the current resizeCount % (table.length/RESIZE_ATTEMPT) == 0
private static final int RESIZE_ATTEMPT = 16;
private volatile LockFreeHashMap map;
private static final long mapOffset = Unsafe.getUnsafe().objectFieldOffset(ObjectIdMap.class, "map");
private static long getObjectArrayByteOffset(long index) {
long offset = Unsafe.getUnsafe().arrayBaseOffset(Object[].class);
int scale = Unsafe.getUnsafe().arrayIndexScale(Object[].class);
try {
return Math.addExact(offset, Math.multiplyExact(index, scale));
} catch (ArithmeticException ex) {
throw new IndexOutOfBoundsException(index);
}
}
@SuppressWarnings("unchecked")
private static <T> T getElementVolatile(T[] array, long index) {
long arrayByteOffset = getObjectArrayByteOffset(index);
return (T) Unsafe.getUnsafe().getReferenceVolatile(array, arrayByteOffset);
}
private static boolean compareAndSetElement(Object[] array, long index, Object existingElement, Object newElement) {
long arrayByteOffset = getObjectArrayByteOffset(index);
return Unsafe.getUnsafe().compareAndSetReference(array, arrayByteOffset, existingElement, newElement);
}
private LockFreeHashMap getMap() {
LockFreeHashMap theMap = map;
if (theMap == null) {
// We have no map yet
theMap = new LockFreeHashMap();
LockFreeHashMap oldMap = (LockFreeHashMap) Unsafe.getUnsafe().compareAndExchangeReference(this, mapOffset, null, theMap);
if (oldMap != null) {
// It was set already
theMap = oldMap;
}
}
return theMap;
}
/**
* Returns the ID, or -1 when the object is not tracked.
*/
public long getIdExisting(Object obj) {
if (obj == null) {
return 0;
}
return getMap().getIdExisting(obj);
}
public long getIdOrCreateWeak(Object obj) {
return getMap().getIdOrCreateWeak(obj);
}
public Object getObject(long id) {
if (id == 0) {
return null;
}
return getMap().getObject(id);
}
/**
* Decreases the hold count by one, replaces HashNodeStrong with HashNodeWeak when holdCount is
* zero.
*
* @param id The object id
* @return true when the object reference exists.
*/
public boolean enableCollection(long id) {
return enableCollection(id, 1, false);
}
/**
* Decreases the hold count by {@code disposeIfNotHold} and when holdCount is zero then either
* dispose the node, or replace HashNodeStrong with HashNodeWeak.
*
* @param id The object id
* @param refCount the count to decrease the hold count by.
* @param disposeIfNotHold whether to dispose the object ID when hold count decrements to zero.
* @return true when the object reference existed.
*/
public boolean enableCollection(long id, int refCount, boolean disposeIfNotHold) {
return getMap().enableCollection(id, refCount, disposeIfNotHold);
}
/**
* Increases the hold count by one, replaces HashNodeWeak with HashNodeStrong when holdCount is
* zero.
*
* @param id The object id
* @return true when the object reference exists.
*/
public boolean disableCollection(long id) {
return getMap().disableCollection(id);
}
/**
* Check if the object was collected.
*
* @param id The object id.
* @return <code>true</code> when the object was collected, <code>false</code> when the object
* still exists in memory, <code>null</code> when the id never referenced an object.
*/
@SuppressFBWarnings(value = "NP_BOOLEAN_RETURN_NULL", justification = "Intentional.")
public Boolean isCollected(long id) {
return getMap().isCollected(id);
}
public void reset() {
LockFreeHashMap theMap;
do {
theMap = map;
if (theMap == null) {
return;
}
} while (!Unsafe.getUnsafe().compareAndSetReference(this, mapOffset, theMap, null));
// Reset 'theMap', it will not be used any more
theMap.reset();
}
public <T> T toObject(long objectId, Class<T> targetClass) {
Object object = getObject(objectId);
return targetClass.cast(object);
}
public long toId(Object object) {
return getIdOrCreateWeak(object);
}
private static class LockFreeHashMap {
private volatile TableAccessFlag accessFlag;
private static final long accessFlagOffset = Unsafe.getUnsafe().objectFieldOffset(LockFreeHashMap.class, "accessFlag");
private final ReferenceQueue<Object> refQueue = new ReferenceQueue<>();
private volatile Thread refQueueThread;
private volatile long lastId;
private static final long lastIdOffset = Unsafe.getUnsafe().objectFieldOffset(LockFreeHashMap.class, "lastId");
LockFreeHashMap() {
}
long getNextId() {
long id = lastId;
do {
long witnessId = Unsafe.getUnsafe().compareAndExchangeLong(this, lastIdOffset, id, id + 1);
if (witnessId != id) {
// Try again
id = witnessId;
} else {
// id + 1 was written successfully
return id + 1;
}
} while (true);
}
private Thread startCleanupThread() {
Thread queueThread = Thread.ofPlatform().name("JDWP Object map cleanup queue").unstarted(() -> {
while (true) {
try {
Reference<?> ref = refQueue.remove();
HashNode node = (HashNodeWeak) ref;
dispose(node);
} catch (InterruptedException ex) {
break;
}
}
});
if (ImageSingletons.contains(ThreadStartDeathSupport.class)) {
ThreadStartDeathSupport.get().setDebuggerThreadObjectQueue(queueThread);
}
queueThread.setDaemon(true);
queueThread.start();
return queueThread;
}
void reset() {
Thread queueThread = refQueueThread;
if (queueThread != null) {
queueThread.interrupt();
if (ImageSingletons.contains(ThreadStartDeathSupport.class)) {
ThreadStartDeathSupport.get().setDebuggerThreadObjectQueue(null);
}
}
// GC of this object and the ReferenceQueue will make all its elements eligible for GC.
if (queueThread != null) {
try { // The queue thread should finish eventually.
queueThread.join();
} catch (InterruptedException e) {
// The join was interrupted, we give up
}
}
}
private TableAccessFlag getTableAccess() {
TableAccessFlag flag = accessFlag;
if (flag == null) {
// No table was created yet, create the first one:
HashingTable table = new HashingTable(1 << INITIAL_SIZE_BITS, null);
flag = new TableAccessFlag(0, table);
TableAccessFlag oldFlag = (TableAccessFlag) Unsafe.getUnsafe().compareAndExchangeReference(this, accessFlagOffset, null, flag);
if (oldFlag == null) {
// We have successfully set the first table
refQueueThread = startCleanupThread();
} else {
// It was set already
flag = oldFlag;
}
}
return flag;
}
private boolean setNewTableAccess(TableAccessFlag oldFlag, TableAccessFlag newFlag) {
return Unsafe.getUnsafe().compareAndSetReference(this, accessFlagOffset, oldFlag, newFlag);
}
/**
* Returns the ID, or -1 when the object is not tracked.
*/
public long getIdExisting(Object obj) {
if (obj == null) {
return 0;
}
TableAccessFlag flag = accessFlag;
if (flag == null) {
return -1; // Have no tables yet
}
HashingTable table = flag.table();
int hash = System.identityHashCode(obj);
HashNode node = getIdExisting(table, obj, hash);
if (node != null) {
return node.getId();
} else {
return -1;
}
}
private static HashNode getIdExisting(HashingTable table, Object obj, int hash) {
for (HashingTable t = table; t != null; t = t.getNext()) {
HashNode node = t.getIdExisting(obj, hash);
if (node != null) {
return node;
}
}
return null;
}
public long getIdOrCreateWeak(Object obj) {
long id = getIdExisting(obj);
if (id != -1) {
return id;
}
int hash = System.identityHashCode(obj);
HashNode node = null;
boolean[] needsFinalizeId = new boolean[]{false};
do {
TableAccessFlag tableAccess = getTableAccess();
HashingTable table = tableAccess.table();
boolean needsResize = table.needsResize();
if (needsResize) {
int hashTableLength = table.hashToObjectTable.length;
if (tableAccess.resizeCount % (hashTableLength / RESIZE_ATTEMPT) == 0) {
// Let's try to do resize
int newSize = table.hashToObjectTable.length << 1;
HashingTable newTable = new HashingTable(newSize, table);
TableAccessFlag newTableAccess = new TableAccessFlag(0, newTable);
if (!setNewTableAccess(tableAccess, newTableAccess)) {
// The resize was not successful
continue;
} else {
// We're resized
table = newTable;
tableAccess = newTableAccess;
}
} else {
// Just increase the resize request count
TableAccessFlag newTableAccess = new TableAccessFlag(accessFlag.resizeCount + 1, table);
if (!setNewTableAccess(tableAccess, newTableAccess)) {
// Try next time
continue;
}
}
}
// Write the value to the table
node = table.getIdOrCreateWeak(obj, hash, needsFinalizeId);
if (needsFinalizeId[0]) {
// A new node was written to the table.
// We need to verify that the table wasn't resized in between
if (tableAccess.table == accessFlag.table) {
// We wrote it into the current table, great.
// Assign a new ID:
node.finalizeId(this, table);
} else {
// The table was resized in between. We can not be sure
// whether it wasn't put into the new table already
continue;
}
}
// We have the object's node. We exit the loop if the ID is set.
assert node != null;
} while (node == null || node.getId() < 0);
return node.getId();
}
public Object getObject(long id) {
if (id == 0) {
return null;
}
TableAccessFlag flag = accessFlag;
if (flag == null) {
return null; // Have no tables yet
}
for (HashingTable table = flag.table(); table != null; table = table.getNext()) {
Object obj = table.getObject(id);
if (obj != null) {
return obj;
}
}
return null;
}
private void dispose(HashNode node) {
TableAccessFlag flag = accessFlag;
if (flag == null) {
return; // Have no tables yet
}
disposeAll(flag.table(), node);
}
private static void disposeAll(HashingTable table, HashNode node) {
for (HashingTable t = table; t != null; t = t.getNext()) {
t.dispose(node);
}
}
boolean enableCollection(long id, int refCount, boolean disposeIfNotHold) {
if (refCount < 0) {
throw new IllegalArgumentException("Negative refCount not permitted: " + refCount);
}
TableAccessFlag flag = accessFlag;
if (flag == null) {
return false; // Have no tables yet
}
for (HashingTable table = flag.table(); table != null; table = table.getNext()) {
boolean sucess = table.enableCollection(id, refCount, disposeIfNotHold);
if (sucess) {
return sucess;
}
}
return false;
}
public boolean disableCollection(long id) {
TableAccessFlag flag = accessFlag;
if (flag == null) {
return false; // Have no tables yet
}
for (HashingTable table = flag.table(); table != null; table = table.getNext()) {
boolean sucess = table.disableCollection(id);
if (sucess) {
return sucess;
}
}
return false;
}
@SuppressFBWarnings(value = "NP_BOOLEAN_RETURN_NULL", justification = "Intentional.")
public Boolean isCollected(long id) {
if (id <= 0 || id > lastId) {
// Non-existing object
return null;
}
Object obj = getObject(id);
return null == obj;
}
/**
* This class contains the hashing table that stores the hash -> object mapping and also
* the ID -> hash code mapping for lookups by the ID.
* <p>
* <code>hashToObjectTable</code> is an array indexed by object's system hash code and
* contains arrays of <code>HashNode</code>. <br>
* <code>idToHashTable</code> is an array indexed by unique IDs and contains lists of
* object's hash codes.
*/
final class HashingTable {
private final HashNode[][] hashToObjectTable; // object table by a hash code index
private final HashListNode[] idToHashTable; // hash values by ID index
private final HashingTable next;
private volatile int size;
private static final long sizeOffset = Unsafe.getUnsafe().objectFieldOffset(HashingTable.class, "size");
HashingTable(int size, HashingTable next) {
idToHashTable = new HashListNode[size];
hashToObjectTable = new HashNode[size][];
this.next = next;
}
HashingTable getNext() {
return next;
}
HashNode getIdExisting(Object obj, int hash) {
int index = (hashToObjectTable.length - 1) & hash;
HashNode[] chain = getElementVolatile(hashToObjectTable, index);
if (chain != null) {
for (HashNode node : chain) {
if (obj == node.getObject()) {
return node;
}
}
}
return null;
}
private void incrementSize() {
changeSize(+1);
}
private void decrementSize() {
changeSize(-1);
}
private void changeSize(int increment) {
int oldSize = size;
int s;
do {
s = Unsafe.getUnsafe().compareAndExchangeInt(this, sizeOffset, oldSize, oldSize + increment);
if (s != oldSize) {
// Try again
oldSize = s;
} else {
break;
}
} while (true);
}
boolean needsResize() {
return size > hashToObjectTable.length && (hashToObjectTable.length << 1) > 0;
}
private HashNode getIdOrCreateWeak(Object obj, int hash, boolean[] needsFinalizeId) {
int index = (hashToObjectTable.length - 1) & hash;
HashNode[] oldChain;
HashNode[] newChain;
do {
oldChain = getElementVolatile(hashToObjectTable, index);
if (oldChain != null) {
// Search the old node for the object
for (HashNode node : oldChain) {
if (node.getObject() == obj) {
// The node is there already
needsFinalizeId[0] = false;
return node;
}
}
newChain = new HashNode[oldChain.length + 1];
System.arraycopy(oldChain, 0, newChain, 1, oldChain.length);
} else {
newChain = new HashNode[1];
}
newChain[0] = new HashNodeWeak(hash, obj, refQueue);
} while (!compareAndSetElement(hashToObjectTable, index, oldChain, newChain));
incrementSize();
// A new node with uninitialized ID
needsFinalizeId[0] = true;
return newChain[0];
}
void newId(HashNode node) {
// A node got a new ID assigned.
// We need to add that to our 'hashes' table.
long newId = node.getId();
int hash = node.getHash();
int hashIndex = (int) ((idToHashTable.length - 1) & newId);
HashListNode oldList;
HashListNode newList;
do {
oldList = getElementVolatile(idToHashTable, hashIndex);
newList = new HashListNode(hash, oldList);
} while (!compareAndSetElement(idToHashTable, hashIndex, oldList, newList));
}
public Object getObject(long id) {
if (id == 0) {
return null;
}
int hashIndex = (int) ((idToHashTable.length - 1) & id);
HashListNode list = getElementVolatile(idToHashTable, hashIndex);
while (list != null) {
int hash = list.hash();
int index = (hashToObjectTable.length - 1) & hash;
HashNode[] chain = getElementVolatile(hashToObjectTable, index);
if (chain != null) {
for (HashNode node : chain) {
if (id == node.getId()) {
return node.getObject();
}
}
}
list = list.next();
}
return null;
}
private void dispose(HashNode node) {
long id = node.getId();
int hash = node.getHash();
int index = (hashToObjectTable.length - 1) & hash;
retryLoop: do {
HashNode[] oldChain = getElementVolatile(hashToObjectTable, index);
if (oldChain != null) {
for (int i = 0; i < oldChain.length; i++) {
if (oldChain[i].getId() == id) {
// Remove 'i' element from the oldChain, forming a newChain
HashNode[] newChain;
if (oldChain.length == 1) {
newChain = null;
} else {
// Copy chain, skipping the removed node at position 'i'
newChain = removeNode(oldChain, i);
}
if (!compareAndSetElement(hashToObjectTable, index, oldChain, newChain)) {
// We failed to write the new chain, try again
continue retryLoop;
} else {
// Successfully removed.
// Node with the given ID is in the table just once.
disposeId(id, hash);
decrementSize();
break;
}
}
}
}
// not found
break;
} while (true);
}
private static HashNode[] removeNode(HashNode[] oldChain, int index) {
HashNode[] newChain = new HashNode[oldChain.length - 1];
if (index > 0) {
System.arraycopy(oldChain, 0, newChain, 0, index);
}
if (index < oldChain.length) {
System.arraycopy(oldChain, index + 1, newChain, index, newChain.length - index);
}
return newChain;
}
private static HashNode[] replaceNode(HashNode[] oldChain, int index, HashNode newNode) {
if (oldChain.length == 1) {
return new HashNode[]{newNode};
}
HashNode[] newChain = new HashNode[oldChain.length];
// Copy it all for simplicity
System.arraycopy(oldChain, 0, newChain, 0, oldChain.length);
newChain[index] = newNode;
return newChain;
}
private void disposeId(long id, int hash) {
int hashIndex = (int) ((idToHashTable.length - 1) & id);
HashListNode oldList;
retryLoop: do {
oldList = getElementVolatile(idToHashTable, hashIndex);
HashListNode nPrev = null;
for (HashListNode n = oldList; n != null; nPrev = n, n = n.next()) {
if (n.hash() == hash) {
// Remove 'n' from the chain
HashListNode newList;
if (nPrev == null) {
newList = n.next();
} else {
HashListNode end = n.next();
while (nPrev != null) {
HashListNode nn = new HashListNode(nPrev.hash(), end);
end = nn;
HashListNode lastPrev = nPrev;
// The chains are short, find the previous
nPrev = null;
for (HashListNode on = oldList; on != lastPrev; on = on.next()) {
nPrev = on;
}
}
newList = end;
}
if (compareAndSetElement(idToHashTable, hashIndex, oldList, newList)) {
// Disposed.
return;
} else {
// We failed to write the new chain, try again
continue retryLoop;
}
}
}
// not found
break;
} while (true);
}
boolean enableCollection(long id, int refCount, boolean disposeIfNotHold) {
int hashIndex = (int) ((idToHashTable.length - 1) & id);
HashListNode list = getElementVolatile(idToHashTable, hashIndex);
for (; list != null; list = list.next()) {
int index = (hashToObjectTable.length - 1) & list.hash();
retryLoop: do {
HashNode[] chain = getElementVolatile(hashToObjectTable, index);
if (chain != null) {
for (int i = 0; i < chain.length; i++) {
HashNode node = chain[i];
if (id == node.getId()) {
if (node instanceof HashNodeStrong strongNode) {
int holdCount = strongNode.changeHoldCount(-refCount);
if (holdCount == 0 || holdCount == Integer.MIN_VALUE) {
// The node needs to be replaced with a weak one.
if (disposeIfNotHold) {
disposeAll(this, node);
} else {
// Replace the strong node with a weak one
HashNode weak = new HashNodeWeak(node.getHash(), node.getObject(), refQueue, node.getId());
HashNode[] newChain = replaceNode(chain, i, weak);
if (compareAndSetElement(hashToObjectTable, index, chain, newChain)) {
// We changed the node. We must wipe out any
// occurrences of this ID from other tables
if (this.next != null) {
disposeAll(this.next, node);
}
} else {
continue retryLoop;
}
}
}
} else if (disposeIfNotHold) {
// We're weak already
disposeAll(this, node);
}
return true; // We found the ID
}
}
}
break; // not found
} while (true);
}
return false;
}
public boolean disableCollection(long id) {
int hashIndex = (int) ((idToHashTable.length - 1) & id);
HashListNode list = getElementVolatile(idToHashTable, hashIndex);
for (; list != null; list = list.next()) {
int index = (hashToObjectTable.length - 1) & list.hash();
retryLoop: do {
HashNode[] chain = getElementVolatile(hashToObjectTable, index);
if (chain != null) {
for (int i = 0; i < chain.length; i++) {
HashNode node = chain[i];
if (id == node.getId()) {
if (node instanceof HashNodeStrong strongNode) {
strongNode.changeHoldCount(+1);
} else {
Object obj = node.getObject();
if (obj == null) {
// GCed
return false;
}
// Replace node with HashNodeStrong
HashNode strong = new HashNodeStrong(node.getHash(), obj, node.getId());
HashNode[] newChain = replaceNode(chain, i, strong);
if (compareAndSetElement(hashToObjectTable, index, chain, newChain)) {
// We changed the node. We must wipe out any occurrences
// of this ID from other tables
if (this.next != null) {
disposeAll(this.next, node);
}
} else {
continue retryLoop;
}
}
return true; // We found the ID
}
}
}
break; // not found
} while (true);
}
return false;
}
}
private record TableAccessFlag(int resizeCount, HashingTable table) {
}
}
private sealed interface HashNode permits HashNodeWeak, HashNodeStrong {
/**
* Get a unique ID, or -1 if it was not assigned yet.
*/
long getId();
/**
* Assign a new unique ID.
*/
long finalizeId(LockFreeHashMap map, LockFreeHashMap.HashingTable table);
/**
* Get the object's hash code.
*/
int getHash();
/**
* Get the object, or {@code null} when collected.
*/
Object getObject();
}
private static final class HashNodeWeak extends WeakReference<Object> implements HashNode {
private volatile long id;
private final int hash;
HashNodeWeak(int hash, Object referent, ReferenceQueue<Object> refQueue) {
this(hash, referent, refQueue, -1);
}
HashNodeWeak(int hash, Object referent, ReferenceQueue<Object> refQueue, long id) {
super(referent, refQueue);
this.id = id;
this.hash = hash;
}
@Override
public long getId() {
return id;
}
@Override
public long finalizeId(LockFreeHashMap map, LockFreeHashMap.HashingTable table) {
long theId = id;
if (theId <= -1) {
theId = map.getNextId();
// Only the node's creator will finalize, pure set is safe
id = theId;
}
table.newId(this);
return theId;
}
@Override
public int getHash() {
return hash;
}
@Override
public Object getObject() {
return get();
}
}
private static final class HashNodeStrong implements HashNode {
private final long id;
private final int hash;
private final Object object;
private volatile int holdCount = 1;
private static final long holdCountOffset = Unsafe.getUnsafe().objectFieldOffset(HashNodeStrong.class, "holdCount");
HashNodeStrong(int hash, Object object, long id) {
this.id = id;
this.hash = hash;
this.object = object;
}
@Override
public long getId() {
return id;
}
@Override
public long finalizeId(LockFreeHashMap map, LockFreeHashMap.HashingTable table) {
// The strong nodes are used as a replacement of existing weak nodes,
// which have the ID initialized already.
throw new UnsupportedOperationException();
}
int changeHoldCount(int increment) {
int oldCount = holdCount;
if (oldCount == 0) {
// Locked when reached 0, the node is replaced with a weak one.
return Integer.MIN_VALUE;
}
int count;
do {
int newCount = oldCount + increment;
if (newCount < 0) {
newCount = 0;
}
count = Unsafe.getUnsafe().compareAndExchangeInt(this, holdCountOffset, oldCount, newCount);
if (count != oldCount) {
// Try again
oldCount = count;
} else {
return newCount;
}
} while (true);
}
@Override
public int getHash() {
return hash;
}
@Override
public Object getObject() {
return object;
}
}
private record HashListNode(int hash, HashListNode next) {
}
}
|
googleapis/google-cloud-java | 36,185 | java-retail/proto-google-cloud-retail-v2/src/main/java/com/google/cloud/retail/v2/PurgeProductsMetadata.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/retail/v2/purge_config.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.retail.v2;
/**
*
*
* <pre>
* Metadata related to the progress of the PurgeProducts operation.
* This will be returned by the google.longrunning.Operation.metadata field.
* </pre>
*
* Protobuf type {@code google.cloud.retail.v2.PurgeProductsMetadata}
*/
public final class PurgeProductsMetadata extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.retail.v2.PurgeProductsMetadata)
PurgeProductsMetadataOrBuilder {
private static final long serialVersionUID = 0L;
// Use PurgeProductsMetadata.newBuilder() to construct.
private PurgeProductsMetadata(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private PurgeProductsMetadata() {}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new PurgeProductsMetadata();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.retail.v2.PurgeConfigProto
.internal_static_google_cloud_retail_v2_PurgeProductsMetadata_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.retail.v2.PurgeConfigProto
.internal_static_google_cloud_retail_v2_PurgeProductsMetadata_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.retail.v2.PurgeProductsMetadata.class,
com.google.cloud.retail.v2.PurgeProductsMetadata.Builder.class);
}
private int bitField0_;
public static final int CREATE_TIME_FIELD_NUMBER = 1;
private com.google.protobuf.Timestamp createTime_;
/**
*
*
* <pre>
* Operation create time.
* </pre>
*
* <code>.google.protobuf.Timestamp create_time = 1;</code>
*
* @return Whether the createTime field is set.
*/
@java.lang.Override
public boolean hasCreateTime() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* Operation create time.
* </pre>
*
* <code>.google.protobuf.Timestamp create_time = 1;</code>
*
* @return The createTime.
*/
@java.lang.Override
public com.google.protobuf.Timestamp getCreateTime() {
return createTime_ == null ? com.google.protobuf.Timestamp.getDefaultInstance() : createTime_;
}
/**
*
*
* <pre>
* Operation create time.
* </pre>
*
* <code>.google.protobuf.Timestamp create_time = 1;</code>
*/
@java.lang.Override
public com.google.protobuf.TimestampOrBuilder getCreateTimeOrBuilder() {
return createTime_ == null ? com.google.protobuf.Timestamp.getDefaultInstance() : createTime_;
}
public static final int UPDATE_TIME_FIELD_NUMBER = 2;
private com.google.protobuf.Timestamp updateTime_;
/**
*
*
* <pre>
* Operation last update time. If the operation is done, this is also the
* finish time.
* </pre>
*
* <code>.google.protobuf.Timestamp update_time = 2;</code>
*
* @return Whether the updateTime field is set.
*/
@java.lang.Override
public boolean hasUpdateTime() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
*
*
* <pre>
* Operation last update time. If the operation is done, this is also the
* finish time.
* </pre>
*
* <code>.google.protobuf.Timestamp update_time = 2;</code>
*
* @return The updateTime.
*/
@java.lang.Override
public com.google.protobuf.Timestamp getUpdateTime() {
return updateTime_ == null ? com.google.protobuf.Timestamp.getDefaultInstance() : updateTime_;
}
/**
*
*
* <pre>
* Operation last update time. If the operation is done, this is also the
* finish time.
* </pre>
*
* <code>.google.protobuf.Timestamp update_time = 2;</code>
*/
@java.lang.Override
public com.google.protobuf.TimestampOrBuilder getUpdateTimeOrBuilder() {
return updateTime_ == null ? com.google.protobuf.Timestamp.getDefaultInstance() : updateTime_;
}
public static final int SUCCESS_COUNT_FIELD_NUMBER = 3;
private long successCount_ = 0L;
/**
*
*
* <pre>
* Count of entries that were deleted successfully.
* </pre>
*
* <code>int64 success_count = 3;</code>
*
* @return The successCount.
*/
@java.lang.Override
public long getSuccessCount() {
return successCount_;
}
public static final int FAILURE_COUNT_FIELD_NUMBER = 4;
private long failureCount_ = 0L;
/**
*
*
* <pre>
* Count of entries that encountered errors while processing.
* </pre>
*
* <code>int64 failure_count = 4;</code>
*
* @return The failureCount.
*/
@java.lang.Override
public long getFailureCount() {
return failureCount_;
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (((bitField0_ & 0x00000001) != 0)) {
output.writeMessage(1, getCreateTime());
}
if (((bitField0_ & 0x00000002) != 0)) {
output.writeMessage(2, getUpdateTime());
}
if (successCount_ != 0L) {
output.writeInt64(3, successCount_);
}
if (failureCount_ != 0L) {
output.writeInt64(4, failureCount_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) != 0)) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, getCreateTime());
}
if (((bitField0_ & 0x00000002) != 0)) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, getUpdateTime());
}
if (successCount_ != 0L) {
size += com.google.protobuf.CodedOutputStream.computeInt64Size(3, successCount_);
}
if (failureCount_ != 0L) {
size += com.google.protobuf.CodedOutputStream.computeInt64Size(4, failureCount_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.retail.v2.PurgeProductsMetadata)) {
return super.equals(obj);
}
com.google.cloud.retail.v2.PurgeProductsMetadata other =
(com.google.cloud.retail.v2.PurgeProductsMetadata) obj;
if (hasCreateTime() != other.hasCreateTime()) return false;
if (hasCreateTime()) {
if (!getCreateTime().equals(other.getCreateTime())) return false;
}
if (hasUpdateTime() != other.hasUpdateTime()) return false;
if (hasUpdateTime()) {
if (!getUpdateTime().equals(other.getUpdateTime())) return false;
}
if (getSuccessCount() != other.getSuccessCount()) return false;
if (getFailureCount() != other.getFailureCount()) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (hasCreateTime()) {
hash = (37 * hash) + CREATE_TIME_FIELD_NUMBER;
hash = (53 * hash) + getCreateTime().hashCode();
}
if (hasUpdateTime()) {
hash = (37 * hash) + UPDATE_TIME_FIELD_NUMBER;
hash = (53 * hash) + getUpdateTime().hashCode();
}
hash = (37 * hash) + SUCCESS_COUNT_FIELD_NUMBER;
hash = (53 * hash) + com.google.protobuf.Internal.hashLong(getSuccessCount());
hash = (37 * hash) + FAILURE_COUNT_FIELD_NUMBER;
hash = (53 * hash) + com.google.protobuf.Internal.hashLong(getFailureCount());
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.retail.v2.PurgeProductsMetadata parseFrom(java.nio.ByteBuffer data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.retail.v2.PurgeProductsMetadata parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.retail.v2.PurgeProductsMetadata parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.retail.v2.PurgeProductsMetadata parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.retail.v2.PurgeProductsMetadata parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.retail.v2.PurgeProductsMetadata parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.retail.v2.PurgeProductsMetadata parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.retail.v2.PurgeProductsMetadata parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.retail.v2.PurgeProductsMetadata parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.retail.v2.PurgeProductsMetadata parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.retail.v2.PurgeProductsMetadata parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.retail.v2.PurgeProductsMetadata parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.cloud.retail.v2.PurgeProductsMetadata prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Metadata related to the progress of the PurgeProducts operation.
* This will be returned by the google.longrunning.Operation.metadata field.
* </pre>
*
* Protobuf type {@code google.cloud.retail.v2.PurgeProductsMetadata}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.retail.v2.PurgeProductsMetadata)
com.google.cloud.retail.v2.PurgeProductsMetadataOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.retail.v2.PurgeConfigProto
.internal_static_google_cloud_retail_v2_PurgeProductsMetadata_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.retail.v2.PurgeConfigProto
.internal_static_google_cloud_retail_v2_PurgeProductsMetadata_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.retail.v2.PurgeProductsMetadata.class,
com.google.cloud.retail.v2.PurgeProductsMetadata.Builder.class);
}
// Construct using com.google.cloud.retail.v2.PurgeProductsMetadata.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {
getCreateTimeFieldBuilder();
getUpdateTimeFieldBuilder();
}
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
createTime_ = null;
if (createTimeBuilder_ != null) {
createTimeBuilder_.dispose();
createTimeBuilder_ = null;
}
updateTime_ = null;
if (updateTimeBuilder_ != null) {
updateTimeBuilder_.dispose();
updateTimeBuilder_ = null;
}
successCount_ = 0L;
failureCount_ = 0L;
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.retail.v2.PurgeConfigProto
.internal_static_google_cloud_retail_v2_PurgeProductsMetadata_descriptor;
}
@java.lang.Override
public com.google.cloud.retail.v2.PurgeProductsMetadata getDefaultInstanceForType() {
return com.google.cloud.retail.v2.PurgeProductsMetadata.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.retail.v2.PurgeProductsMetadata build() {
com.google.cloud.retail.v2.PurgeProductsMetadata result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.retail.v2.PurgeProductsMetadata buildPartial() {
com.google.cloud.retail.v2.PurgeProductsMetadata result =
new com.google.cloud.retail.v2.PurgeProductsMetadata(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(com.google.cloud.retail.v2.PurgeProductsMetadata result) {
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.createTime_ = createTimeBuilder_ == null ? createTime_ : createTimeBuilder_.build();
to_bitField0_ |= 0x00000001;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.updateTime_ = updateTimeBuilder_ == null ? updateTime_ : updateTimeBuilder_.build();
to_bitField0_ |= 0x00000002;
}
if (((from_bitField0_ & 0x00000004) != 0)) {
result.successCount_ = successCount_;
}
if (((from_bitField0_ & 0x00000008) != 0)) {
result.failureCount_ = failureCount_;
}
result.bitField0_ |= to_bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.retail.v2.PurgeProductsMetadata) {
return mergeFrom((com.google.cloud.retail.v2.PurgeProductsMetadata) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.retail.v2.PurgeProductsMetadata other) {
if (other == com.google.cloud.retail.v2.PurgeProductsMetadata.getDefaultInstance())
return this;
if (other.hasCreateTime()) {
mergeCreateTime(other.getCreateTime());
}
if (other.hasUpdateTime()) {
mergeUpdateTime(other.getUpdateTime());
}
if (other.getSuccessCount() != 0L) {
setSuccessCount(other.getSuccessCount());
}
if (other.getFailureCount() != 0L) {
setFailureCount(other.getFailureCount());
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
input.readMessage(getCreateTimeFieldBuilder().getBuilder(), extensionRegistry);
bitField0_ |= 0x00000001;
break;
} // case 10
case 18:
{
input.readMessage(getUpdateTimeFieldBuilder().getBuilder(), extensionRegistry);
bitField0_ |= 0x00000002;
break;
} // case 18
case 24:
{
successCount_ = input.readInt64();
bitField0_ |= 0x00000004;
break;
} // case 24
case 32:
{
failureCount_ = input.readInt64();
bitField0_ |= 0x00000008;
break;
} // case 32
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private com.google.protobuf.Timestamp createTime_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.Timestamp,
com.google.protobuf.Timestamp.Builder,
com.google.protobuf.TimestampOrBuilder>
createTimeBuilder_;
/**
*
*
* <pre>
* Operation create time.
* </pre>
*
* <code>.google.protobuf.Timestamp create_time = 1;</code>
*
* @return Whether the createTime field is set.
*/
public boolean hasCreateTime() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* Operation create time.
* </pre>
*
* <code>.google.protobuf.Timestamp create_time = 1;</code>
*
* @return The createTime.
*/
public com.google.protobuf.Timestamp getCreateTime() {
if (createTimeBuilder_ == null) {
return createTime_ == null
? com.google.protobuf.Timestamp.getDefaultInstance()
: createTime_;
} else {
return createTimeBuilder_.getMessage();
}
}
/**
*
*
* <pre>
* Operation create time.
* </pre>
*
* <code>.google.protobuf.Timestamp create_time = 1;</code>
*/
public Builder setCreateTime(com.google.protobuf.Timestamp value) {
if (createTimeBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
createTime_ = value;
} else {
createTimeBuilder_.setMessage(value);
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Operation create time.
* </pre>
*
* <code>.google.protobuf.Timestamp create_time = 1;</code>
*/
public Builder setCreateTime(com.google.protobuf.Timestamp.Builder builderForValue) {
if (createTimeBuilder_ == null) {
createTime_ = builderForValue.build();
} else {
createTimeBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Operation create time.
* </pre>
*
* <code>.google.protobuf.Timestamp create_time = 1;</code>
*/
public Builder mergeCreateTime(com.google.protobuf.Timestamp value) {
if (createTimeBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0)
&& createTime_ != null
&& createTime_ != com.google.protobuf.Timestamp.getDefaultInstance()) {
getCreateTimeBuilder().mergeFrom(value);
} else {
createTime_ = value;
}
} else {
createTimeBuilder_.mergeFrom(value);
}
if (createTime_ != null) {
bitField0_ |= 0x00000001;
onChanged();
}
return this;
}
/**
*
*
* <pre>
* Operation create time.
* </pre>
*
* <code>.google.protobuf.Timestamp create_time = 1;</code>
*/
public Builder clearCreateTime() {
bitField0_ = (bitField0_ & ~0x00000001);
createTime_ = null;
if (createTimeBuilder_ != null) {
createTimeBuilder_.dispose();
createTimeBuilder_ = null;
}
onChanged();
return this;
}
/**
*
*
* <pre>
* Operation create time.
* </pre>
*
* <code>.google.protobuf.Timestamp create_time = 1;</code>
*/
public com.google.protobuf.Timestamp.Builder getCreateTimeBuilder() {
bitField0_ |= 0x00000001;
onChanged();
return getCreateTimeFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* Operation create time.
* </pre>
*
* <code>.google.protobuf.Timestamp create_time = 1;</code>
*/
public com.google.protobuf.TimestampOrBuilder getCreateTimeOrBuilder() {
if (createTimeBuilder_ != null) {
return createTimeBuilder_.getMessageOrBuilder();
} else {
return createTime_ == null
? com.google.protobuf.Timestamp.getDefaultInstance()
: createTime_;
}
}
/**
*
*
* <pre>
* Operation create time.
* </pre>
*
* <code>.google.protobuf.Timestamp create_time = 1;</code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.Timestamp,
com.google.protobuf.Timestamp.Builder,
com.google.protobuf.TimestampOrBuilder>
getCreateTimeFieldBuilder() {
if (createTimeBuilder_ == null) {
createTimeBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.Timestamp,
com.google.protobuf.Timestamp.Builder,
com.google.protobuf.TimestampOrBuilder>(
getCreateTime(), getParentForChildren(), isClean());
createTime_ = null;
}
return createTimeBuilder_;
}
private com.google.protobuf.Timestamp updateTime_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.Timestamp,
com.google.protobuf.Timestamp.Builder,
com.google.protobuf.TimestampOrBuilder>
updateTimeBuilder_;
/**
*
*
* <pre>
* Operation last update time. If the operation is done, this is also the
* finish time.
* </pre>
*
* <code>.google.protobuf.Timestamp update_time = 2;</code>
*
* @return Whether the updateTime field is set.
*/
public boolean hasUpdateTime() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
*
*
* <pre>
* Operation last update time. If the operation is done, this is also the
* finish time.
* </pre>
*
* <code>.google.protobuf.Timestamp update_time = 2;</code>
*
* @return The updateTime.
*/
public com.google.protobuf.Timestamp getUpdateTime() {
if (updateTimeBuilder_ == null) {
return updateTime_ == null
? com.google.protobuf.Timestamp.getDefaultInstance()
: updateTime_;
} else {
return updateTimeBuilder_.getMessage();
}
}
/**
*
*
* <pre>
* Operation last update time. If the operation is done, this is also the
* finish time.
* </pre>
*
* <code>.google.protobuf.Timestamp update_time = 2;</code>
*/
public Builder setUpdateTime(com.google.protobuf.Timestamp value) {
if (updateTimeBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
updateTime_ = value;
} else {
updateTimeBuilder_.setMessage(value);
}
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Operation last update time. If the operation is done, this is also the
* finish time.
* </pre>
*
* <code>.google.protobuf.Timestamp update_time = 2;</code>
*/
public Builder setUpdateTime(com.google.protobuf.Timestamp.Builder builderForValue) {
if (updateTimeBuilder_ == null) {
updateTime_ = builderForValue.build();
} else {
updateTimeBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Operation last update time. If the operation is done, this is also the
* finish time.
* </pre>
*
* <code>.google.protobuf.Timestamp update_time = 2;</code>
*/
public Builder mergeUpdateTime(com.google.protobuf.Timestamp value) {
if (updateTimeBuilder_ == null) {
if (((bitField0_ & 0x00000002) != 0)
&& updateTime_ != null
&& updateTime_ != com.google.protobuf.Timestamp.getDefaultInstance()) {
getUpdateTimeBuilder().mergeFrom(value);
} else {
updateTime_ = value;
}
} else {
updateTimeBuilder_.mergeFrom(value);
}
if (updateTime_ != null) {
bitField0_ |= 0x00000002;
onChanged();
}
return this;
}
/**
*
*
* <pre>
* Operation last update time. If the operation is done, this is also the
* finish time.
* </pre>
*
* <code>.google.protobuf.Timestamp update_time = 2;</code>
*/
public Builder clearUpdateTime() {
bitField0_ = (bitField0_ & ~0x00000002);
updateTime_ = null;
if (updateTimeBuilder_ != null) {
updateTimeBuilder_.dispose();
updateTimeBuilder_ = null;
}
onChanged();
return this;
}
/**
*
*
* <pre>
* Operation last update time. If the operation is done, this is also the
* finish time.
* </pre>
*
* <code>.google.protobuf.Timestamp update_time = 2;</code>
*/
public com.google.protobuf.Timestamp.Builder getUpdateTimeBuilder() {
bitField0_ |= 0x00000002;
onChanged();
return getUpdateTimeFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* Operation last update time. If the operation is done, this is also the
* finish time.
* </pre>
*
* <code>.google.protobuf.Timestamp update_time = 2;</code>
*/
public com.google.protobuf.TimestampOrBuilder getUpdateTimeOrBuilder() {
if (updateTimeBuilder_ != null) {
return updateTimeBuilder_.getMessageOrBuilder();
} else {
return updateTime_ == null
? com.google.protobuf.Timestamp.getDefaultInstance()
: updateTime_;
}
}
/**
*
*
* <pre>
* Operation last update time. If the operation is done, this is also the
* finish time.
* </pre>
*
* <code>.google.protobuf.Timestamp update_time = 2;</code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.Timestamp,
com.google.protobuf.Timestamp.Builder,
com.google.protobuf.TimestampOrBuilder>
getUpdateTimeFieldBuilder() {
if (updateTimeBuilder_ == null) {
updateTimeBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.Timestamp,
com.google.protobuf.Timestamp.Builder,
com.google.protobuf.TimestampOrBuilder>(
getUpdateTime(), getParentForChildren(), isClean());
updateTime_ = null;
}
return updateTimeBuilder_;
}
private long successCount_;
/**
*
*
* <pre>
* Count of entries that were deleted successfully.
* </pre>
*
* <code>int64 success_count = 3;</code>
*
* @return The successCount.
*/
@java.lang.Override
public long getSuccessCount() {
return successCount_;
}
/**
*
*
* <pre>
* Count of entries that were deleted successfully.
* </pre>
*
* <code>int64 success_count = 3;</code>
*
* @param value The successCount to set.
* @return This builder for chaining.
*/
public Builder setSuccessCount(long value) {
successCount_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
*
*
* <pre>
* Count of entries that were deleted successfully.
* </pre>
*
* <code>int64 success_count = 3;</code>
*
* @return This builder for chaining.
*/
public Builder clearSuccessCount() {
bitField0_ = (bitField0_ & ~0x00000004);
successCount_ = 0L;
onChanged();
return this;
}
private long failureCount_;
/**
*
*
* <pre>
* Count of entries that encountered errors while processing.
* </pre>
*
* <code>int64 failure_count = 4;</code>
*
* @return The failureCount.
*/
@java.lang.Override
public long getFailureCount() {
return failureCount_;
}
/**
*
*
* <pre>
* Count of entries that encountered errors while processing.
* </pre>
*
* <code>int64 failure_count = 4;</code>
*
* @param value The failureCount to set.
* @return This builder for chaining.
*/
public Builder setFailureCount(long value) {
failureCount_ = value;
bitField0_ |= 0x00000008;
onChanged();
return this;
}
/**
*
*
* <pre>
* Count of entries that encountered errors while processing.
* </pre>
*
* <code>int64 failure_count = 4;</code>
*
* @return This builder for chaining.
*/
public Builder clearFailureCount() {
bitField0_ = (bitField0_ & ~0x00000008);
failureCount_ = 0L;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.retail.v2.PurgeProductsMetadata)
}
// @@protoc_insertion_point(class_scope:google.cloud.retail.v2.PurgeProductsMetadata)
private static final com.google.cloud.retail.v2.PurgeProductsMetadata DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.retail.v2.PurgeProductsMetadata();
}
public static com.google.cloud.retail.v2.PurgeProductsMetadata getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<PurgeProductsMetadata> PARSER =
new com.google.protobuf.AbstractParser<PurgeProductsMetadata>() {
@java.lang.Override
public PurgeProductsMetadata parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<PurgeProductsMetadata> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<PurgeProductsMetadata> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.retail.v2.PurgeProductsMetadata getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleapis/google-cloud-java | 36,186 | java-eventarc/proto-google-cloud-eventarc-v1/src/main/java/com/google/cloud/eventarc/v1/ListChannelsRequest.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/eventarc/v1/eventarc.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.eventarc.v1;
/**
*
*
* <pre>
* The request message for the ListChannels method.
* </pre>
*
* Protobuf type {@code google.cloud.eventarc.v1.ListChannelsRequest}
*/
public final class ListChannelsRequest extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.eventarc.v1.ListChannelsRequest)
ListChannelsRequestOrBuilder {
private static final long serialVersionUID = 0L;
// Use ListChannelsRequest.newBuilder() to construct.
private ListChannelsRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private ListChannelsRequest() {
parent_ = "";
pageToken_ = "";
orderBy_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new ListChannelsRequest();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.eventarc.v1.EventarcProto
.internal_static_google_cloud_eventarc_v1_ListChannelsRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.eventarc.v1.EventarcProto
.internal_static_google_cloud_eventarc_v1_ListChannelsRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.eventarc.v1.ListChannelsRequest.class,
com.google.cloud.eventarc.v1.ListChannelsRequest.Builder.class);
}
public static final int PARENT_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private volatile java.lang.Object parent_ = "";
/**
*
*
* <pre>
* Required. The parent collection to list channels on.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The parent.
*/
@java.lang.Override
public java.lang.String getParent() {
java.lang.Object ref = parent_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
parent_ = s;
return s;
}
}
/**
*
*
* <pre>
* Required. The parent collection to list channels on.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for parent.
*/
@java.lang.Override
public com.google.protobuf.ByteString getParentBytes() {
java.lang.Object ref = parent_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
parent_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int PAGE_SIZE_FIELD_NUMBER = 2;
private int pageSize_ = 0;
/**
*
*
* <pre>
* The maximum number of channels to return on each page.
*
* Note: The service may send fewer.
* </pre>
*
* <code>int32 page_size = 2;</code>
*
* @return The pageSize.
*/
@java.lang.Override
public int getPageSize() {
return pageSize_;
}
public static final int PAGE_TOKEN_FIELD_NUMBER = 3;
@SuppressWarnings("serial")
private volatile java.lang.Object pageToken_ = "";
/**
*
*
* <pre>
* The page token; provide the value from the `next_page_token` field in a
* previous `ListChannels` call to retrieve the subsequent page.
*
* When paginating, all other parameters provided to `ListChannels` must
* match the call that provided the page token.
* </pre>
*
* <code>string page_token = 3;</code>
*
* @return The pageToken.
*/
@java.lang.Override
public java.lang.String getPageToken() {
java.lang.Object ref = pageToken_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
pageToken_ = s;
return s;
}
}
/**
*
*
* <pre>
* The page token; provide the value from the `next_page_token` field in a
* previous `ListChannels` call to retrieve the subsequent page.
*
* When paginating, all other parameters provided to `ListChannels` must
* match the call that provided the page token.
* </pre>
*
* <code>string page_token = 3;</code>
*
* @return The bytes for pageToken.
*/
@java.lang.Override
public com.google.protobuf.ByteString getPageTokenBytes() {
java.lang.Object ref = pageToken_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
pageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int ORDER_BY_FIELD_NUMBER = 4;
@SuppressWarnings("serial")
private volatile java.lang.Object orderBy_ = "";
/**
*
*
* <pre>
* The sorting order of the resources returned. Value should be a
* comma-separated list of fields. The default sorting order is ascending. To
* specify descending order for a field, append a `desc` suffix; for example:
* `name desc, channel_id`.
* </pre>
*
* <code>string order_by = 4;</code>
*
* @return The orderBy.
*/
@java.lang.Override
public java.lang.String getOrderBy() {
java.lang.Object ref = orderBy_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
orderBy_ = s;
return s;
}
}
/**
*
*
* <pre>
* The sorting order of the resources returned. Value should be a
* comma-separated list of fields. The default sorting order is ascending. To
* specify descending order for a field, append a `desc` suffix; for example:
* `name desc, channel_id`.
* </pre>
*
* <code>string order_by = 4;</code>
*
* @return The bytes for orderBy.
*/
@java.lang.Override
public com.google.protobuf.ByteString getOrderByBytes() {
java.lang.Object ref = orderBy_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
orderBy_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 1, parent_);
}
if (pageSize_ != 0) {
output.writeInt32(2, pageSize_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(pageToken_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 3, pageToken_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(orderBy_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 4, orderBy_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, parent_);
}
if (pageSize_ != 0) {
size += com.google.protobuf.CodedOutputStream.computeInt32Size(2, pageSize_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(pageToken_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(3, pageToken_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(orderBy_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(4, orderBy_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.eventarc.v1.ListChannelsRequest)) {
return super.equals(obj);
}
com.google.cloud.eventarc.v1.ListChannelsRequest other =
(com.google.cloud.eventarc.v1.ListChannelsRequest) obj;
if (!getParent().equals(other.getParent())) return false;
if (getPageSize() != other.getPageSize()) return false;
if (!getPageToken().equals(other.getPageToken())) return false;
if (!getOrderBy().equals(other.getOrderBy())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + PARENT_FIELD_NUMBER;
hash = (53 * hash) + getParent().hashCode();
hash = (37 * hash) + PAGE_SIZE_FIELD_NUMBER;
hash = (53 * hash) + getPageSize();
hash = (37 * hash) + PAGE_TOKEN_FIELD_NUMBER;
hash = (53 * hash) + getPageToken().hashCode();
hash = (37 * hash) + ORDER_BY_FIELD_NUMBER;
hash = (53 * hash) + getOrderBy().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.eventarc.v1.ListChannelsRequest parseFrom(java.nio.ByteBuffer data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.eventarc.v1.ListChannelsRequest parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.eventarc.v1.ListChannelsRequest parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.eventarc.v1.ListChannelsRequest parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.eventarc.v1.ListChannelsRequest parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.eventarc.v1.ListChannelsRequest parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.eventarc.v1.ListChannelsRequest parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.eventarc.v1.ListChannelsRequest parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.eventarc.v1.ListChannelsRequest parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.eventarc.v1.ListChannelsRequest parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.eventarc.v1.ListChannelsRequest parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.eventarc.v1.ListChannelsRequest parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.cloud.eventarc.v1.ListChannelsRequest prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* The request message for the ListChannels method.
* </pre>
*
* Protobuf type {@code google.cloud.eventarc.v1.ListChannelsRequest}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.eventarc.v1.ListChannelsRequest)
com.google.cloud.eventarc.v1.ListChannelsRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.eventarc.v1.EventarcProto
.internal_static_google_cloud_eventarc_v1_ListChannelsRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.eventarc.v1.EventarcProto
.internal_static_google_cloud_eventarc_v1_ListChannelsRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.eventarc.v1.ListChannelsRequest.class,
com.google.cloud.eventarc.v1.ListChannelsRequest.Builder.class);
}
// Construct using com.google.cloud.eventarc.v1.ListChannelsRequest.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
parent_ = "";
pageSize_ = 0;
pageToken_ = "";
orderBy_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.eventarc.v1.EventarcProto
.internal_static_google_cloud_eventarc_v1_ListChannelsRequest_descriptor;
}
@java.lang.Override
public com.google.cloud.eventarc.v1.ListChannelsRequest getDefaultInstanceForType() {
return com.google.cloud.eventarc.v1.ListChannelsRequest.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.eventarc.v1.ListChannelsRequest build() {
com.google.cloud.eventarc.v1.ListChannelsRequest result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.eventarc.v1.ListChannelsRequest buildPartial() {
com.google.cloud.eventarc.v1.ListChannelsRequest result =
new com.google.cloud.eventarc.v1.ListChannelsRequest(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(com.google.cloud.eventarc.v1.ListChannelsRequest result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.parent_ = parent_;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.pageSize_ = pageSize_;
}
if (((from_bitField0_ & 0x00000004) != 0)) {
result.pageToken_ = pageToken_;
}
if (((from_bitField0_ & 0x00000008) != 0)) {
result.orderBy_ = orderBy_;
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.eventarc.v1.ListChannelsRequest) {
return mergeFrom((com.google.cloud.eventarc.v1.ListChannelsRequest) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.eventarc.v1.ListChannelsRequest other) {
if (other == com.google.cloud.eventarc.v1.ListChannelsRequest.getDefaultInstance())
return this;
if (!other.getParent().isEmpty()) {
parent_ = other.parent_;
bitField0_ |= 0x00000001;
onChanged();
}
if (other.getPageSize() != 0) {
setPageSize(other.getPageSize());
}
if (!other.getPageToken().isEmpty()) {
pageToken_ = other.pageToken_;
bitField0_ |= 0x00000004;
onChanged();
}
if (!other.getOrderBy().isEmpty()) {
orderBy_ = other.orderBy_;
bitField0_ |= 0x00000008;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
parent_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000001;
break;
} // case 10
case 16:
{
pageSize_ = input.readInt32();
bitField0_ |= 0x00000002;
break;
} // case 16
case 26:
{
pageToken_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000004;
break;
} // case 26
case 34:
{
orderBy_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000008;
break;
} // case 34
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.lang.Object parent_ = "";
/**
*
*
* <pre>
* Required. The parent collection to list channels on.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The parent.
*/
public java.lang.String getParent() {
java.lang.Object ref = parent_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
parent_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Required. The parent collection to list channels on.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for parent.
*/
public com.google.protobuf.ByteString getParentBytes() {
java.lang.Object ref = parent_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
parent_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Required. The parent collection to list channels on.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The parent to set.
* @return This builder for chaining.
*/
public Builder setParent(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
parent_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The parent collection to list channels on.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return This builder for chaining.
*/
public Builder clearParent() {
parent_ = getDefaultInstance().getParent();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The parent collection to list channels on.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The bytes for parent to set.
* @return This builder for chaining.
*/
public Builder setParentBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
parent_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
private int pageSize_;
/**
*
*
* <pre>
* The maximum number of channels to return on each page.
*
* Note: The service may send fewer.
* </pre>
*
* <code>int32 page_size = 2;</code>
*
* @return The pageSize.
*/
@java.lang.Override
public int getPageSize() {
return pageSize_;
}
/**
*
*
* <pre>
* The maximum number of channels to return on each page.
*
* Note: The service may send fewer.
* </pre>
*
* <code>int32 page_size = 2;</code>
*
* @param value The pageSize to set.
* @return This builder for chaining.
*/
public Builder setPageSize(int value) {
pageSize_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* The maximum number of channels to return on each page.
*
* Note: The service may send fewer.
* </pre>
*
* <code>int32 page_size = 2;</code>
*
* @return This builder for chaining.
*/
public Builder clearPageSize() {
bitField0_ = (bitField0_ & ~0x00000002);
pageSize_ = 0;
onChanged();
return this;
}
private java.lang.Object pageToken_ = "";
/**
*
*
* <pre>
* The page token; provide the value from the `next_page_token` field in a
* previous `ListChannels` call to retrieve the subsequent page.
*
* When paginating, all other parameters provided to `ListChannels` must
* match the call that provided the page token.
* </pre>
*
* <code>string page_token = 3;</code>
*
* @return The pageToken.
*/
public java.lang.String getPageToken() {
java.lang.Object ref = pageToken_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
pageToken_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* The page token; provide the value from the `next_page_token` field in a
* previous `ListChannels` call to retrieve the subsequent page.
*
* When paginating, all other parameters provided to `ListChannels` must
* match the call that provided the page token.
* </pre>
*
* <code>string page_token = 3;</code>
*
* @return The bytes for pageToken.
*/
public com.google.protobuf.ByteString getPageTokenBytes() {
java.lang.Object ref = pageToken_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
pageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* The page token; provide the value from the `next_page_token` field in a
* previous `ListChannels` call to retrieve the subsequent page.
*
* When paginating, all other parameters provided to `ListChannels` must
* match the call that provided the page token.
* </pre>
*
* <code>string page_token = 3;</code>
*
* @param value The pageToken to set.
* @return This builder for chaining.
*/
public Builder setPageToken(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
pageToken_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
*
*
* <pre>
* The page token; provide the value from the `next_page_token` field in a
* previous `ListChannels` call to retrieve the subsequent page.
*
* When paginating, all other parameters provided to `ListChannels` must
* match the call that provided the page token.
* </pre>
*
* <code>string page_token = 3;</code>
*
* @return This builder for chaining.
*/
public Builder clearPageToken() {
pageToken_ = getDefaultInstance().getPageToken();
bitField0_ = (bitField0_ & ~0x00000004);
onChanged();
return this;
}
/**
*
*
* <pre>
* The page token; provide the value from the `next_page_token` field in a
* previous `ListChannels` call to retrieve the subsequent page.
*
* When paginating, all other parameters provided to `ListChannels` must
* match the call that provided the page token.
* </pre>
*
* <code>string page_token = 3;</code>
*
* @param value The bytes for pageToken to set.
* @return This builder for chaining.
*/
public Builder setPageTokenBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
pageToken_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
private java.lang.Object orderBy_ = "";
/**
*
*
* <pre>
* The sorting order of the resources returned. Value should be a
* comma-separated list of fields. The default sorting order is ascending. To
* specify descending order for a field, append a `desc` suffix; for example:
* `name desc, channel_id`.
* </pre>
*
* <code>string order_by = 4;</code>
*
* @return The orderBy.
*/
public java.lang.String getOrderBy() {
java.lang.Object ref = orderBy_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
orderBy_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* The sorting order of the resources returned. Value should be a
* comma-separated list of fields. The default sorting order is ascending. To
* specify descending order for a field, append a `desc` suffix; for example:
* `name desc, channel_id`.
* </pre>
*
* <code>string order_by = 4;</code>
*
* @return The bytes for orderBy.
*/
public com.google.protobuf.ByteString getOrderByBytes() {
java.lang.Object ref = orderBy_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
orderBy_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* The sorting order of the resources returned. Value should be a
* comma-separated list of fields. The default sorting order is ascending. To
* specify descending order for a field, append a `desc` suffix; for example:
* `name desc, channel_id`.
* </pre>
*
* <code>string order_by = 4;</code>
*
* @param value The orderBy to set.
* @return This builder for chaining.
*/
public Builder setOrderBy(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
orderBy_ = value;
bitField0_ |= 0x00000008;
onChanged();
return this;
}
/**
*
*
* <pre>
* The sorting order of the resources returned. Value should be a
* comma-separated list of fields. The default sorting order is ascending. To
* specify descending order for a field, append a `desc` suffix; for example:
* `name desc, channel_id`.
* </pre>
*
* <code>string order_by = 4;</code>
*
* @return This builder for chaining.
*/
public Builder clearOrderBy() {
orderBy_ = getDefaultInstance().getOrderBy();
bitField0_ = (bitField0_ & ~0x00000008);
onChanged();
return this;
}
/**
*
*
* <pre>
* The sorting order of the resources returned. Value should be a
* comma-separated list of fields. The default sorting order is ascending. To
* specify descending order for a field, append a `desc` suffix; for example:
* `name desc, channel_id`.
* </pre>
*
* <code>string order_by = 4;</code>
*
* @param value The bytes for orderBy to set.
* @return This builder for chaining.
*/
public Builder setOrderByBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
orderBy_ = value;
bitField0_ |= 0x00000008;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.eventarc.v1.ListChannelsRequest)
}
// @@protoc_insertion_point(class_scope:google.cloud.eventarc.v1.ListChannelsRequest)
private static final com.google.cloud.eventarc.v1.ListChannelsRequest DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.eventarc.v1.ListChannelsRequest();
}
public static com.google.cloud.eventarc.v1.ListChannelsRequest getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<ListChannelsRequest> PARSER =
new com.google.protobuf.AbstractParser<ListChannelsRequest>() {
@java.lang.Override
public ListChannelsRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<ListChannelsRequest> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<ListChannelsRequest> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.eventarc.v1.ListChannelsRequest getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
oracle/coherence | 36,169 | prj/coherence-core/src/main/java/com/oracle/coherence/common/internal/net/socketbus/SocketBusDriver.java | /*
* Copyright (c) 2000, 2025, Oracle and/or its affiliates.
*
* Licensed under the Universal Permissive License v 1.0 as shown at
* https://oss.oracle.com/licenses/upl.
*/
package com.oracle.coherence.common.internal.net.socketbus;
import com.oracle.coherence.common.base.Hasher;
import com.oracle.coherence.common.net.SelectionService;
import com.oracle.coherence.common.net.SocketProvider;
import com.oracle.coherence.common.net.SelectionServices;
import com.oracle.coherence.common.net.InetSocketAddressHasher;
import com.oracle.coherence.common.net.exabus.Bus;
import com.oracle.coherence.common.net.exabus.Depot;
import com.oracle.coherence.common.net.exabus.EndPoint;
import com.oracle.coherence.common.net.exabus.util.UrlEndPoint;
import com.oracle.coherence.common.net.exabus.spi.Driver;
import com.oracle.coherence.common.io.BufferManager;
import com.oracle.coherence.common.io.BufferManagers;
import com.oracle.coherence.common.util.Duration;
import com.oracle.coherence.common.util.MemorySize;
import com.tangosol.coherence.config.Config;
import java.io.IOException;
import java.net.SocketAddress;
import java.net.ServerSocket;
import java.net.SocketOptions;
import java.net.SocketException;
import java.util.logging.Logger;
/**
* SocketDriver is a base implementation for socket based busses.
*
* @author mf 2010.12.27
*/
public class SocketBusDriver
implements Driver
{
// ----- constructors ---------------------------------------------------
/**
* Construct a SocketDriver.
*
* @param deps the driver's dependencies
*/
public SocketBusDriver(Dependencies deps)
{
m_dependencies = copyDependencies(deps).validate();
}
// ----- Driver interface -----------------------------------------------
/**
* {@inheritDoc}
*/
public void setDepot(Depot depot)
{
m_depot = depot;
}
/**
* {@inheritDoc}
*/
public Depot getDepot()
{
return m_depot;
}
/**
* {@inheritDoc}
*/
public EndPoint resolveEndPoint(String sName)
{
if (sName == null)
{
return null;
}
Dependencies deps = getDependencies();
String sMsg = deps.getMessageBusProtocol();
String sMem = deps.getMemoryBusProtocol();
if (sName.startsWith(sMsg) || sName.startsWith(sMem))
{
try
{
UrlEndPoint point = resolveSocketEndPoint(sName);
String sProtocol = point.getProtocol();
if (sProtocol.equals(sMsg) || sProtocol.equals(sMem))
{
return point;
}
}
catch (IllegalArgumentException e)
{
if (sName.startsWith(sMsg + UrlEndPoint.PROTOCOL_DELIMITER) ||
sName.startsWith(sMem + UrlEndPoint.PROTOCOL_DELIMITER))
{
throw e;
}
}
}
return null;
}
/**
* {@inheritDoc}
*/
public boolean isSupported(EndPoint point)
{
if (point == null)
{
return true;
}
else if (point instanceof UrlEndPoint)
{
Dependencies deps = getDependencies();
String sProtocol = ((UrlEndPoint) point).getProtocol();
return sProtocol.equals(deps.getMessageBusProtocol()) ||
sProtocol.equals(deps.getMemoryBusProtocol());
}
else
{
return false;
}
}
/**
* {@inheritDoc}
*/
public Bus createBus(EndPoint pointLocal)
{
if (isSupported(pointLocal))
{
try
{
Dependencies deps = getDependencies();
UrlEndPoint pointSocket = (UrlEndPoint) pointLocal;
String sProtocol = pointSocket.getProtocol();
if (sProtocol.equals(deps.getMessageBusProtocol()))
{
return new SocketMessageBus(this, pointSocket);
}
// else; fall through
}
catch (IOException e)
{
throw new RuntimeException("Error creating SocketBus " +
"instance for " + pointLocal, e);
}
}
throw new IllegalArgumentException("unsupported EndPoint " + pointLocal);
}
// ----- helpers -------------------------------------------------------
/**
* Resolve the supplied canonical name into a SocketEndPoint.
*
* @param sName the endpoint name
*
* @return the resolved EndPoint
*
* @throws IllegalArgumentException if the name is not a valid SocketEndPoint
*/
public UrlEndPoint resolveSocketEndPoint(String sName)
{
Dependencies deps = getDependencies();
return new UrlEndPoint(sName, deps.getSocketProvider(),
deps.getSocketAddressHasher());
}
/**
* Resolve the EndPoint which the specified service socket is bound to.
*
* @param pointLocal the requested bind point
* @param socket the bound socket
*
* @return the EndPoint
*/
public UrlEndPoint resolveBindPoint(UrlEndPoint pointLocal, ServerSocket socket)
{
Dependencies deps = getDependencies();
SocketProvider provider = deps.getSocketProvider();
String sQuery = pointLocal.getQueryString();
return new UrlEndPoint(pointLocal.getProtocol() + UrlEndPoint.PROTOCOL_DELIMITER +
provider.getAddressString(socket) + (sQuery == null ? "" : "?" + sQuery), provider,
deps.getSocketAddressHasher());
}
/**
* Return the driver's Dependencies.
*
* @return the driver's Dependencies
*/
public Dependencies getDependencies()
{
return m_dependencies;
}
/**
* Produce a shallow copy of the supplied dependencies.
*
* @param deps the dependencies to copy
*
* @return the dependencies
*/
protected DefaultDependencies copyDependencies(Dependencies deps)
{
return new DefaultDependencies(deps);
}
// ----- inner interface: Dependencies ----------------------------------
/**
* Dependencies provides an interface by which the SocketBusDriver can
* be provided with its external dependencies.
*/
public interface Dependencies
{
/**
* Return the MessageBus protocol prefix.
*
* @return the MessageBus protocol prefix
*/
public String getMessageBusProtocol();
/**
* Return the MemoryBus protocol prefix.
*
* @return the MemoryBus protocol prefix
*/
public String getMemoryBusProtocol();
/**
* Return the SelectionService used to run this driver.
*
* @return the SelectionService.
*/
public SelectionService getSelectionService();
/**
* Return the SocketProvider to use in producing sockets for this
* driver.
*
* @return the SocketProvider
*/
public SocketProvider getSocketProvider();
/**
* Return the SocketAddress Hasher to use in comparing SocketAddresses.
*
* @return the SocketAddress Hasher
*/
public Hasher<? super SocketAddress> getSocketAddressHasher();
/**
* Return the SocketOptions to utilize in this driver.
*
* @return the SocketOptions
*/
public SocketOptions getSocketOptions();
/**
* Return the BufferManager to use in creating temporary buffers.
*
* @return the BufferManager
*/
public BufferManager getBufferManager();
/**
* Return the Logger to use.
*
* @return the logger
*/
public Logger getLogger();
/**
* Max time after which the receipt acks will be sent to the peer.
*
* @return receipt ack delay in millis
*/
public long getMaximumReceiptDelayMillis();
/**
* Return the number of milliseconds after which a connection is considered to be bad due to a missing
* acknowledgement and the connection should be reestablished.
*
* @return the timeout in milliseconds, or 0 for indefinite
*/
public long getAckTimeoutMillis();
/**
* Return the default value of {@link #getAckTimeoutMillis()}.
*
* @return the default timeout in milliseconds
*/
default public long getDefaultAckTimeoutMillis()
{
return getAckTimeoutMillis();
};
/**
* Return the number of milliseconds after which a connection is considered to be unrecoverable due to a missing
* acknowledgement and the connection an unsolicited DISCONNECT event should be emitted
*
* @return the timeout in milliseconds, or 0 for indefinite
*/
public long getAckFatalTimeoutMillis();
/**
* Return the interval between reconnect attempts
*
* @return socket reconnect interval in millis
*/
public long getSocketReconnectDelayMillis();
/**
* Return the maximum number sequential reconnect attempts to make
*
* @return the reconnect limit
*/
public int getSocketReconnectLimit();
/**
* Return the maximum time a connection may be left idle before
* the bus automatically injects traffic (hidden to the application)
* in order to ensure that the underlying network infrastructure
* does not disconnect the socket due to an idle timeout.
*
* Note: This is not SO_KEEPALIVE, this is a higher level construct
* which ensures the connection is used often enough to not appear
* idle. While SO_KEEPALIVE operates similarly there are a few
* key differences. First the interval is not controllable by the
* user. Second when SO_KEEPALIVE actually encourages termination
* of connections due to temporary network outages, that is it also
* serves as a time based disconnect detection. SO_KEEPALIVE is
* most often used to try to keep an idel connection from being
* terminated by the network infrastructure. This is a misguided use
* of the feature and often doesn't work as intended, hense this
* higher level version.
*
* @return the idle timeout in milliseconds
*/
public long getHeartbeatMillis();
/**
* Threshold at which a send/signal operation should perform
* an auto-flush of the unflushed write batch.
*
* @return the number of queue'd bytes at which to auto-flush
*/
public long getAutoFlushThreshold();
/**
* Threshold at which to request an immediate receipt from the peer.
*
* @return the number of bytes to send before requesting immediate receipts
*/
public long getReceiptRequestThreshold();
/**
* Return the maximum number of threads which should concurrently attempt direct writes.
* <p>
* A direct write is a write performed on the calling thread rather then on a background thread. Direct writes
* tend to reduce latency when contention is low. When contention is high though better throughput and latency
* may be achieved by allowing writes to be offloaded to background threads.
* </p>
*
* @return the thread count
*/
public int getDirectWriteThreadThreshold();
/**
* For the purposes of testing, this method specifies a percentage of read operations which
* should result in an underlying connection failure. Specifically the socket's input and output
* streams will be shutdown.
*
* @return a drop ratio
*/
public int getDropRatio();
/**
* For the purposes of testing, this method specifies a percentage of read operations which
* should result in a bit flip of the stream. This is to simulate data corruption from wire, should
* result in connection migration.
*
* @return a corruption ratio
*/
public int getCorruptionRatio();
/**
* Return true if CRC validation is enabled.
*
* @return true if CRC validation is enabled
*/
public boolean isCrcEnabled();
}
// ----- inner class: DefaultDependencies -------------------------------
/**
* SimpleDependencies provides a basic Dependencies implementation as well
* as default values where applicable.
*/
public static class DefaultDependencies
implements Dependencies
{
/**
* Construct a DefaultDependencies object.
*/
public DefaultDependencies()
{
}
/**
* Construct a DefaultDependencies object copying the values from the
* specified dependencies object
*
* @param deps the dependencies to copy, or null
*/
public DefaultDependencies(Dependencies deps)
{
if (deps != null)
{
m_provider = deps.getSocketProvider();
m_hasher = deps.getSocketAddressHasher();
m_service = deps.getSelectionService();
m_sProtocolMessageBus = deps.getMessageBusProtocol();
m_sProtocolMemoryBus = deps.getMemoryBusProtocol();
m_options = deps.getSocketOptions();
m_bufferManager = deps.getBufferManager();
m_logger = deps.getLogger();
m_cMaxReceiptDelayMillis = deps.getMaximumReceiptDelayMillis();
m_cReconnectLimit = deps.getSocketReconnectLimit();
m_cReconnectDelayMillis = deps.getSocketReconnectDelayMillis();
m_cHeartbeatDelayMillis = deps.getHeartbeatMillis();
m_cAckTimeoutMillis = deps.getAckTimeoutMillis();
m_cDefaultAckTimeoutMillis = deps.getDefaultAckTimeoutMillis();
m_cAckFatalTimeoutMillis = deps.getAckFatalTimeoutMillis();
m_cbAutoFlush = deps.getAutoFlushThreshold();
m_cbReceiptRequest = deps.getReceiptRequestThreshold();
m_cThreadsDirect = deps.getDirectWriteThreadThreshold();
m_nDropRatio = deps.getDropRatio();
m_nCorruptionRatio = deps.getCorruptionRatio();
m_fCrc = deps.isCrcEnabled();
}
}
/**
* {@inheritDoc}
*/
@Override
public String getMessageBusProtocol()
{
return m_sProtocolMessageBus;
}
/**
* Specify the message bus protcol name
*
* @param sProtocol the message bus protocol name
*
* @return this object
*/
public DefaultDependencies setMessageBusProtocol(String sProtocol)
{
m_sProtocolMessageBus = sProtocol;
return this;
}
/**
* {@inheritDoc}
*/
@Override
public String getMemoryBusProtocol()
{
return m_sProtocolMemoryBus;
}
/**
* Specify the memory bus protcol name
*
* @param sProtocol the memory bus protocol name
*
* @return this object
*/
public DefaultDependencies setMemoryBusProtocol(String sProtocol)
{
m_sProtocolMemoryBus = sProtocol;
return this;
}
/**
* {@inheritDoc}
*/
@Override
public SelectionService getSelectionService()
{
SelectionService svc = m_service;
return svc == null ? SelectionServices.getDefaultService() : svc;
}
/**
* Specify the SelectionService to be used by this driver.
*
* @param service the SelectionService
*
* @return this object
*/
public DefaultDependencies setSelectionService(SelectionService service)
{
m_service = service;
return this;
}
/**
* {@inheritDoc}
*/
@Override
public SocketProvider getSocketProvider()
{
return m_provider;
}
/**
* Specify the SocketProvider to use.
*
* @param provider the SocketProvider to use.
*
* @return this object
*/
public DefaultDependencies setSocketProvider(SocketProvider provider)
{
m_provider = provider;
return this;
}
/**
* {@inheritDoc}
*/
@Override
public Hasher<? super SocketAddress> getSocketAddressHasher()
{
Hasher<? super SocketAddress> hasher = m_hasher;
if (hasher == null)
{
return InetSocketAddressHasher.INSTANCE;
}
return hasher;
}
/**
* Specify the SocketAddress Hasher to be used in comparing addresses.
*
* @param hasher the hasher
*
* @return this object
*/
public DefaultDependencies setSocketAddressHahser(Hasher<? super SocketAddress> hasher)
{
m_hasher = hasher;
return this;
}
/**
* {@inheritDoc}
*/
@Override
public SocketOptions getSocketOptions()
{
SocketOptions options = m_options;
return options == null ? DEFAULT_OPTIONS : options;
}
/**
* Specify the SocketOptions to use.
*
* @param options the options
*
* @return this object
*/
public DefaultDependencies setSocketOptions(SocketOptions options)
{
m_options = options;
return this;
}
/**
* {@inheritDoc}
*/
@Override
public BufferManager getBufferManager()
{
BufferManager manager = m_bufferManager;
return manager == null ? DEFAULT_BUFFER_MANAGER : manager;
}
/**
* Specify the BufferManager to be used by this driver.
*
* @param manager the buffer manager
*
* @return this object
*/
public DefaultDependencies setBufferManager(BufferManager manager)
{
m_bufferManager = manager;
return this;
}
/**
* {@inheritDoc}
*/
@Override
public Logger getLogger()
{
Logger logger = m_logger;
return logger == null ? LOGGER : logger;
}
/**
* Specify the Logger to use.
*
* @param logger the logger
*
* @return this object
*/
public DefaultDependencies setLogger(Logger logger)
{
m_logger = logger;
return this;
}
/**
* {@inheritDoc}
*/
@Override
public long getMaximumReceiptDelayMillis()
{
return m_cMaxReceiptDelayMillis;
}
/**
* Set maximum receipt ack delay
*
* @param cDelayMillis Max receipt ack delay in millis
*
* @return this object
*/
public DefaultDependencies setMaximumReceiptDelayMillis(long cDelayMillis)
{
m_cMaxReceiptDelayMillis = cDelayMillis;
return this;
}
/**
* {@inheritDoc}
*/
@Override
public long getAckTimeoutMillis()
{
return m_cAckTimeoutMillis;
}
/**
* Set ack timeout
*
* @param cAckTimeoutMillis ack timeout, or 0 for indefinite
*
* @return this object
*/
public DefaultDependencies setAckTimeoutMillis(long cAckTimeoutMillis)
{
m_cAckTimeoutMillis = cAckTimeoutMillis;
return this;
}
/**
* {@inheritDoc}
*/
@Override
public long getAckFatalTimeoutMillis()
{
return m_cAckFatalTimeoutMillis;
}
/**
* Set ack timeout
*
* @param cAckFatalTimeoutMillis ack timeout, or 0 for indefinite
*
* @return this object
*/
public DefaultDependencies setAckFatalTimeoutMillis(long cAckFatalTimeoutMillis)
{
m_cAckFatalTimeoutMillis = cAckFatalTimeoutMillis;
return this;
}
/**
* Return default value of ack timeout
*
* @return default value of ack timeout
*/
public long getDefaultAckTimeoutMillis()
{
return m_cDefaultAckTimeoutMillis;
}
/**
* Set default ack timeout
*
* @param cAckTimeoutMillis default ack timeout
*
* @return this object
*/
public DefaultDependencies setDefaultAckTimeoutMillis(long cAckTimeoutMillis)
{
m_cDefaultAckTimeoutMillis = cAckTimeoutMillis;
return this;
}
/**
* {@inheritDoc}
*/
@Override
public long getSocketReconnectDelayMillis()
{
return m_cReconnectDelayMillis;
}
/**
* Set reconnect interval for the Connection
*
* @param cDelayMillis reconnect delay in millis
*
* @return this object
*/
public DefaultDependencies setSocketReconnectDelayMillis(long cDelayMillis)
{
m_cReconnectDelayMillis = cDelayMillis;
return this;
}
/**
* {@inheritDoc}
*/
@Override
public int getSocketReconnectLimit()
{
return m_cReconnectLimit;
}
/**
* Set the reconnect limit
*
* @param cReconnectLimit the reconnect limit
*
* @return this object
*/
public DefaultDependencies setSocketReconnectLimit(int cReconnectLimit)
{
m_cReconnectLimit = cReconnectLimit;
return this;
}
@Override
public long getHeartbeatMillis()
{
return m_cHeartbeatDelayMillis;
}
/**
* Set the heartbeat interface for the connection.
*
* @param cMillis the heartbeat interval
*
* @return this object
*/
public DefaultDependencies setHeartbeatMillis(long cMillis)
{
m_cHeartbeatDelayMillis = cMillis;
return this;
}
/**
* {@inheritDoc}
*/
@Override
public long getAutoFlushThreshold()
{
return m_cbAutoFlush;
}
/**
* Set threshold for auto flush
*
* @param cbThreshold auto flush threshold
*
* @return this object
*/
public DefaultDependencies setAutoFlushThreshold(long cbThreshold)
{
m_cbAutoFlush = cbThreshold;
return this;
}
/**
* {@inheritDoc}
*/
@Override
public long getReceiptRequestThreshold()
{
return m_cbReceiptRequest;
}
/**
* Set threshold for receipt requests
*
* @param cbRequest receipt request threshold
*
* @return this object
*/
public DefaultDependencies setReceiptRequestThreshold(long cbRequest)
{
m_cbReceiptRequest = cbRequest;
return this;
}
@Override
public int getDirectWriteThreadThreshold()
{
return m_cThreadsDirect;
}
/**
* Specify the direct write threshold.
*
* @param cThreads the number of threads
*
* @return this object
*/
public DefaultDependencies setDirectWriteThreadThreshold(int cThreads)
{
m_cThreadsDirect = cThreads;
return this;
}
@Override
public int getDropRatio()
{
return m_nDropRatio;
}
/**
* Specify the drop rate.
*
* @param nDropRatio the ration to drop, i.e. 1:nDropRatio
*
* @return this object
*/
public DefaultDependencies setDropRatio(int nDropRatio)
{
m_nDropRatio = nDropRatio;
return this;
}
@Override
public int getCorruptionRatio()
{
return m_nCorruptionRatio;
}
/**
* Specify the drop rate.
*
* @param nCorruptionRatio the ration to drop, i.e. 1:nCorruptionRatio
*
* @return this object
*/
public DefaultDependencies setCorruptionRatio(int nCorruptionRatio)
{
m_nCorruptionRatio = nCorruptionRatio;
return this;
}
@Override
public boolean isCrcEnabled()
{
return m_fCrc;
}
/**
* Specify CRC validation should be enabled.
*
* @param fCrc true if CRC is enabled
*
* @return this object
*/
public DefaultDependencies isCrcEnabled(boolean fCrc)
{
m_fCrc = fCrc;
return this;
}
// ----- helpers ------------------------------------------------
/**
* Validate the supplied dependencies.
*
* @throws IllegalArgumentException if the dependencies are not valid
*
* @return this object
*/
protected DefaultDependencies validate()
{
ensureArgument(getMemoryBusProtocol(), "MemoryBusProtocol");
ensureArgument(getMessageBusProtocol(), "MessageBusProtocol");
ensureArgument(getSelectionService(), "SelectionService");
ensureArgument(getSocketAddressHasher(), "SocketAddressHasher");
ensureArgument(getSocketProvider(), "SocketProvider");
if (getMemoryBusProtocol().equals(getMessageBusProtocol()))
{
throw new IllegalArgumentException(
"memory and mess bus protocols cannot use the sane names");
}
m_cAckTimeoutMillis = Config.getDuration(SocketBusDriver.class.getName() + ".ackTimeoutMillis",
new Duration(getDefaultAckTimeoutMillis(), Duration.Magnitude.MILLI),
Duration.Magnitude.MILLI).as(Duration.Magnitude.MILLI);
return this;
}
/**
* Ensure that the specified object is non-null
*
* @param o the object to ensure
* @param sName the name of the corresponding parameter
*
* @throws IllegalArgumentException if o is null
*/
protected static void ensureArgument(Object o, String sName)
{
if (o == null)
{
throw new IllegalArgumentException(sName + " cannot be null");
}
}
// ----- data members -------------------------------------------
/**
* The SocketProvider to use when producing sockets.
*/
protected SocketProvider m_provider;
/**
* The SocketAddress hasher.
*/
protected Hasher<? super SocketAddress> m_hasher;
/**
* The SelectionService the busses will use for IO processing.
*/
protected SelectionService m_service;
/**
* The message bus protocol prefix.
*/
protected String m_sProtocolMessageBus;
/**
* The message bus protocol prefix.
*/
protected String m_sProtocolMemoryBus;
/**
* The SocketOptions.
*/
protected SocketOptions m_options;
/**
* The BufferManager.
*/
protected BufferManager m_bufferManager;
/**
* The Logger.
*/
protected Logger m_logger;
/**
* Reconnect interval for the Connection in millis
*/
protected long m_cReconnectDelayMillis =
Config.getDuration(SocketBusDriver.class.getName() + ".reconnectDelayMillis",
new Duration(200, Duration.Magnitude.MILLI),
Duration.Magnitude.MILLI).as(Duration.Magnitude.MILLI);
/**
* The maximum number of sequential reconnects to attempt.
* <p>
* A value of -1 indicates that no reconnects should be attempted.
*/
protected int m_cReconnectLimit = Config.getInteger(SocketBusDriver.class.getName()+".reconnectLimit", 3);
/**
* Maximum receipt ack delay in millis
*/
protected long m_cMaxReceiptDelayMillis = Config.getDuration(SocketBusDriver.class.getName()+".maxReceiptDelayMillis",
new Duration(500, Duration.Magnitude.MILLI),
Duration.Magnitude.MILLI).as(Duration.Magnitude.MILLI);
/**
* Ack timeout in millis
*/
protected long m_cAckTimeoutMillis;
/**
* Default ack timeout in millis
*/
protected long m_cDefaultAckTimeoutMillis = 10_000L;
/**
* Fatal ack timeout in millis
*/
protected long m_cAckFatalTimeoutMillis = Config.getDuration(SocketBusDriver.class.getName()+".fatalTimeoutMillis",
new Duration(10, Duration.Magnitude.MINUTE),
Duration.Magnitude.MILLI).as(Duration.Magnitude.MILLI);
/**
* Heartbeat interval in millis, disabled by default now that we support reconnects
*/
protected long m_cHeartbeatDelayMillis = Config.getDuration(SocketBusDriver.class.getName()+".heartbeatInterval",
new Duration("0s")).as(Duration.Magnitude.MILLI);
/**
* Auto flush threshold
*/
protected long m_cbAutoFlush = getSafeMemorySize(SocketBusDriver.class.getName()+".autoFlushThreshold");
/**
* Threshold after which to request receipts.
*/
protected long m_cbReceiptRequest = getSafeMemorySize(SocketBusDriver.class.getName()+".receiptRequestThreshold");
/**
* The maximum number of concurrent writers on which to attempt direct writes.
*/
protected int m_cThreadsDirect = Config.getInteger(
SocketBusDriver.class.getName() + ".directWriteThreadThreshold", 4);
/**
* The drop ratio.
*/
protected int m_nDropRatio = Config.getInteger(
SocketBusDriver.class.getName() + ".dropRatio", 0);
/**
* The force corruption ratio.
*/
protected int m_nCorruptionRatio = Config.getInteger(
SocketBusDriver.class.getName() + ".corruptionRatio", 0);
/**
* True iff CRC validation is enabled
*/
protected boolean m_fCrc = Config.getBoolean(SocketBusDriver.class.getName() + ".crc", false);
private static long getSafeMemorySize(String sName)
{
MemorySize size = Config.getMemorySize(sName);
return size == null ? -1 : size.getByteCount();
}
// ----- constants ----------------------------------------------
/**
* Default BufferManager.
*/
public static final BufferManager DEFAULT_BUFFER_MANAGER;
static
{
String sManager = Config.getProperty(SocketBusDriver.class.getName() + ".bufferManager", "network");
switch (sManager)
{
case "heap":
DEFAULT_BUFFER_MANAGER = BufferManagers.getHeapManager();
break;
case "direct":
DEFAULT_BUFFER_MANAGER = BufferManagers.getDirectManager();
break;
case "network":
DEFAULT_BUFFER_MANAGER = BufferManagers.getNetworkDirectManager();
break;
default:
throw new IllegalArgumentException("unknown BufferManager: " + sManager);
}
}
/**
* Default SocketOptions.
*/
public static final SocketOptions DEFAULT_OPTIONS = new SocketOptions()
{
@Override
public void setOption(int optID, Object value)
throws SocketException
{
throw new UnsupportedOperationException();
}
@Override
public Object getOption(int optID)
throws SocketException
{
switch (optID)
{
case TCP_NODELAY:
return true;
case SO_LINGER:
return 0;
case SO_RCVBUF:
return RX_BUFFER_SIZE == -1 ? null : RX_BUFFER_SIZE;
case SO_SNDBUF:
return TX_BUFFER_SIZE == -1 ? null : TX_BUFFER_SIZE;
default:
return null;
}
}
final int RX_BUFFER_SIZE = (int) getSafeMemorySize(
SocketBusDriver.class.getName()+".socketRxBuffer");
final int TX_BUFFER_SIZE = (int) getSafeMemorySize(
SocketBusDriver.class.getName()+".socketTxBuffer");
};
}
// ----- constants ------------------------------------------------------
/**
* The default Logger for the driver.
*/
private static Logger LOGGER = Logger.getLogger(SocketBusDriver.class.getName());
// ----- data members ---------------------------------------------------
/**
* The Depot managing this driver.
*/
protected Depot m_depot;
/**
* The driver's dependencies.
*/
protected Dependencies m_dependencies;
}
|
apache/felix-dev | 36,143 | ipojo/runtime/core-it/ipojo-core-service-dependency-test/src/test/java/org/apache/felix/ipojo/runtime/core/test/dependencies/filter/TestOptionalMultipleFilterDependencies.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.felix.ipojo.runtime.core.test.dependencies.filter;
import org.apache.felix.ipojo.ComponentInstance;
import org.apache.felix.ipojo.architecture.Architecture;
import org.apache.felix.ipojo.architecture.InstanceDescription;
import org.apache.felix.ipojo.runtime.core.test.dependencies.Common;
import org.apache.felix.ipojo.runtime.core.test.services.CheckService;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import org.osgi.framework.ServiceReference;
import java.util.Properties;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
public class TestOptionalMultipleFilterDependencies extends Common {
ComponentInstance instance1, instance2, instance3;
ComponentInstance fooProvider1, fooProvider2;
@Before
public void setUp() {
try {
Properties prov = new Properties();
prov.put("instance.name", "FooProvider1");
fooProvider1 = ipojoHelper.getFactory("SimpleFilterCheckServiceProvider").createComponentInstance(prov);
fooProvider1.stop();
prov = new Properties();
prov.put("instance.name", "FooProvider2");
fooProvider2 = ipojoHelper.getFactory("SimpleFilterCheckServiceProvider").createComponentInstance(prov);
fooProvider2.stop();
Properties i1 = new Properties();
i1.put("instance.name", "Subscriber1");
instance1 = ipojoHelper.getFactory("OptionalMultipleFilterCheckServiceSubscriber").createComponentInstance(i1);
Properties i2 = new Properties();
i2.put("instance.name", "Subscriber2");
Properties ii2 = new Properties();
ii2.put("id2", "(toto=A)");
i2.put("requires.filters", ii2);
instance2 = ipojoHelper.getFactory("OptionalMultipleFilterCheckServiceSubscriber2").createComponentInstance(i2);
Properties i3 = new Properties();
i3.put("instance.name", "Subscriber3");
Properties ii3 = new Properties();
ii3.put("id1", "(toto=A)");
i3.put("requires.filters", ii3);
instance3 = ipojoHelper.getFactory("OptionalMultipleFilterCheckServiceSubscriber").createComponentInstance(i3);
} catch (Exception e) {
e.printStackTrace();
fail(e.getMessage());
}
}
@After
public void tearDown() {
instance1.dispose();
instance2.dispose();
instance3.dispose();
fooProvider1.dispose();
fooProvider2.dispose();
instance1 = null;
instance2 = null;
instance3 = null;
fooProvider1 = null;
fooProvider2 = null;
}
@Test
public void testMultipleNotMatch() {
instance1.start();
ServiceReference arch_ref = ipojoHelper.getServiceReferenceByName(Architecture.class.getName(), instance1.getInstanceName());
assertNotNull("Check architecture availability", arch_ref);
InstanceDescription id = ((Architecture) getContext().getService(arch_ref)).getInstanceDescription();
assertTrue("Check instance validity - 1", id.getState() == ComponentInstance.VALID);
ServiceReference cs_instance_ref = ipojoHelper.getServiceReferenceByName(CheckService.class.getName(), instance1.getInstanceName());
assertNotNull("Check CheckService availability", cs_instance_ref);
CheckService cs_instance = (CheckService) getContext().getService(cs_instance_ref);
assertTrue("Check service invocation", cs_instance.check());
assertTrue("Check service Binding - 1", ((Integer) cs_instance.getProps().get("Bind")).equals(new Integer(0)));
assertTrue("Check Array size - 1", ((Integer) cs_instance.getProps().get("Size")).equals(new Integer(0)));
fooProvider1.start();
id = ((Architecture) getContext().getService(arch_ref)).getInstanceDescription();
assertTrue("Check instance validity - 2", id.getState() == ComponentInstance.VALID);
assertTrue("Check service invocation", cs_instance.check());
assertTrue("Check service Binding - 2", ((Integer) cs_instance.getProps().get("Bind")).equals(new Integer(0)));
assertTrue("Check Array size - 2", ((Integer) cs_instance.getProps().get("Size")).equals(new Integer(0)));
ServiceReference cs_ref = ipojoHelper.getServiceReferenceByName(CheckService.class.getName(), fooProvider1.getInstanceName());
assertNotNull("Check CheckService availability", cs_ref);
CheckService cs = (CheckService) getContext().getService(cs_ref);
// change the value of the property toto
cs.check();
id = ((Architecture) getContext().getService(arch_ref)).getInstanceDescription();
assertTrue("Check instance validity - 3", id.getState() == ComponentInstance.VALID);
cs_instance_ref = ipojoHelper.getServiceReferenceByName(CheckService.class.getName(), instance1.getInstanceName());
assertNotNull("Check CheckService availability", cs_instance_ref);
cs_instance = (CheckService) getContext().getService(cs_instance_ref);
assertTrue("Check service invocation", cs_instance.check());
assertTrue("Check Array size - 3", ((Integer) cs_instance.getProps().get("Size")).equals(new Integer(1)));
assertTrue("Check service Binding - 3", ((Integer) cs_instance.getProps().get("Bind")).equals(new Integer(1)));
fooProvider2.start();
id = ((Architecture) getContext().getService(arch_ref)).getInstanceDescription();
assertTrue("Check instance validity - 4", id.getState() == ComponentInstance.VALID);
cs_instance_ref = ipojoHelper.getServiceReferenceByName(CheckService.class.getName(), instance1.getInstanceName());
assertNotNull("Check CheckService availability", cs_instance_ref);
cs_instance = (CheckService) getContext().getService(cs_instance_ref);
assertTrue("Check service invocation", cs_instance.check());
assertTrue("Check Array size - 4", ((Integer) cs_instance.getProps().get("Size")).equals(new Integer(1)));
assertTrue("Check service Binding - 4", ((Integer) cs_instance.getProps().get("Bind")).equals(new Integer(1)));
ServiceReference cs_ref2 = ipojoHelper.getServiceReferenceByName(CheckService.class.getName(), fooProvider2.getInstanceName());
assertNotNull("Check CheckService availability", cs_ref2);
CheckService cs2 = (CheckService) getContext().getService(cs_ref2);
// change the value of the property toto
cs2.check();
assertTrue("Check service invocation", cs_instance.check());
assertTrue("Check Array size - 5", ((Integer) cs_instance.getProps().get("Size")).equals(new Integer(2)));
assertTrue("Check service Binding - 5", ((Integer) cs_instance.getProps().get("Bind")).equals(new Integer(2)));
// change the value of the property toto
cs.check();
id = ((Architecture) getContext().getService(arch_ref)).getInstanceDescription();
assertTrue("Check instance validity - 6", id.getState() == ComponentInstance.VALID);
assertTrue("Check service Binding - 6", ((Integer) cs_instance.getProps().get("Bind")).equals(new Integer(1)));
assertTrue("Check Array size - 6", ((Integer) cs_instance.getProps().get("Size")).equals(new Integer(1)));
fooProvider2.stop();
id = ((Architecture) getContext().getService(arch_ref)).getInstanceDescription();
assertTrue("Check instance validity - 7", id.getState() == ComponentInstance.VALID);
assertTrue("Check service Binding - 7", ((Integer) cs_instance.getProps().get("Bind")).equals(new Integer(0)));
assertTrue("Check Array size - 7", ((Integer) cs_instance.getProps().get("Size")).equals(new Integer(0)));
fooProvider2.start();
cs_ref2 = ipojoHelper.getServiceReferenceByName(CheckService.class.getName(), fooProvider2.getInstanceName());
assertNotNull("Check CheckService availability", cs_ref2);
cs2 = (CheckService) getContext().getService(cs_ref2);
// change the value of the property toto
cs2.check();
id = ((Architecture) getContext().getService(arch_ref)).getInstanceDescription();
assertTrue("Check instance validity - 8", id.getState() == ComponentInstance.VALID);
assertTrue("Check service Binding - 8", ((Integer) cs_instance.getProps().get("Bind")).equals(new Integer(1)));
assertTrue("Check Array size - 8", ((Integer) cs_instance.getProps().get("Size")).equals(new Integer(1)));
fooProvider2.stop();
id = ((Architecture) getContext().getService(arch_ref)).getInstanceDescription();
assertTrue("Check instance validity - 9", id.getState() == ComponentInstance.VALID);
assertTrue("Check service Binding - 9", ((Integer) cs_instance.getProps().get("Bind")).equals(new Integer(0)));
assertTrue("Check Array size - 9", ((Integer) cs_instance.getProps().get("Size")).equals(new Integer(0)));
id = null;
cs = null;
cs2 = null;
cs_instance = null;
getContext().ungetService(cs_instance_ref);
getContext().ungetService(arch_ref);
getContext().ungetService(cs_ref);
getContext().ungetService(cs_ref2);
}
@Test
public void testMultipleMatch() {
fooProvider1.start();
fooProvider2.start();
ServiceReference cs_ref = ipojoHelper.getServiceReferenceByName(CheckService.class.getName(), fooProvider1.getInstanceName());
assertNotNull("Check CheckService availability", cs_ref);
CheckService cs = (CheckService) getContext().getService(cs_ref);
// change the value of the property toto
cs.check();
ServiceReference cs_ref2 = ipojoHelper.getServiceReferenceByName(CheckService.class.getName(), fooProvider2.getInstanceName());
assertNotNull("Check CheckService availability", cs_ref2);
CheckService cs2 = (CheckService) getContext().getService(cs_ref2);
// change the value of the property toto
cs2.check();
instance1.start();
ServiceReference arch_ref = ipojoHelper.getServiceReferenceByName(Architecture.class.getName(), instance1.getInstanceName());
assertNotNull("Check architecture availability", arch_ref);
InstanceDescription id = ((Architecture) getContext().getService(arch_ref)).getInstanceDescription();
assertTrue("Check instance validity - 1", id.getState() == ComponentInstance.VALID);
ServiceReference cs_instance_ref = ipojoHelper.getServiceReferenceByName(CheckService.class.getName(), instance1.getInstanceName());
assertNotNull("Check CheckService availability", cs_instance_ref);
CheckService cs_instance = (CheckService) getContext().getService(cs_instance_ref);
assertTrue("Check service invocation", cs_instance.check());
assertTrue("Check service Binding - 1", ((Integer) cs_instance.getProps().get("Bind")).equals(new Integer(2)));
assertTrue("Check Array size - 1", ((Integer) cs_instance.getProps().get("Size")).equals(new Integer(2)));
// change the value of the property toto
cs.check();
id = ((Architecture) getContext().getService(arch_ref)).getInstanceDescription();
assertTrue("Check instance validity - 2", id.getState() == ComponentInstance.VALID);
assertTrue("Check service invocation", cs_instance.check());
assertTrue("Check service Binding - 2", ((Integer) cs_instance.getProps().get("Bind")).equals(new Integer(1)));
assertTrue("Check Array size - 2", ((Integer) cs_instance.getProps().get("Size")).equals(new Integer(1)));
fooProvider2.stop();
id = ((Architecture) getContext().getService(arch_ref)).getInstanceDescription();
assertTrue("Check instance validity - 3", id.getState() == ComponentInstance.VALID);
assertTrue("Check service invocation", cs_instance.check());
assertTrue("Check service Binding - 3", ((Integer) cs_instance.getProps().get("Bind")).equals(new Integer(0)));
assertTrue("Check Array size - 3", ((Integer) cs_instance.getProps().get("Size")).equals(new Integer(0)));
fooProvider2.start();
cs_ref2 = ipojoHelper.getServiceReferenceByName(CheckService.class.getName(), fooProvider2.getInstanceName());
assertNotNull("Check CheckService availability", cs_ref2);
cs2 = (CheckService) getContext().getService(cs_ref2);
// change the value of the property toto
cs2.check();
id = ((Architecture) getContext().getService(arch_ref)).getInstanceDescription();
assertTrue("Check instance validity - 4", id.getState() == ComponentInstance.VALID);
assertTrue("Check service invocation", cs_instance.check());
assertTrue("Check service Binding - 4", ((Integer) cs_instance.getProps().get("Bind")).equals(new Integer(1)));
assertTrue("Check Array size - 4", ((Integer) cs_instance.getProps().get("Size")).equals(new Integer(1)));
cs.check();
id = ((Architecture) getContext().getService(arch_ref)).getInstanceDescription();
assertTrue("Check instance validity - 5", id.getState() == ComponentInstance.VALID);
assertTrue("Check service invocation", cs_instance.check());
assertTrue("Check service Binding - 5", ((Integer) cs_instance.getProps().get("Bind")).equals(new Integer(2)));
assertTrue("Check Array size - 5", ((Integer) cs_instance.getProps().get("Size")).equals(new Integer(2)));
id = null;
cs = null;
cs2 = null;
cs_instance = null;
getContext().ungetService(cs_instance_ref);
getContext().ungetService(arch_ref);
getContext().ungetService(cs_ref);
getContext().ungetService(cs_ref2);
}
@Test
public void testMultipleNotMatchInstance() {
instance3.start();
ServiceReference arch_ref = ipojoHelper.getServiceReferenceByName(Architecture.class.getName(), instance3.getInstanceName());
assertNotNull("Check architecture availability", arch_ref);
InstanceDescription id = ((Architecture) getContext().getService(arch_ref)).getInstanceDescription();
assertTrue("Check instance validity - 1", id.getState() == ComponentInstance.VALID);
ServiceReference cs_instance_ref = ipojoHelper.getServiceReferenceByName(CheckService.class.getName(), instance3.getInstanceName());
assertNotNull("Check CheckService availability", cs_instance_ref);
CheckService cs_instance = (CheckService) getContext().getService(cs_instance_ref);
assertTrue("Check service invocation", cs_instance.check());
assertTrue("Check service Binding - 1", ((Integer) cs_instance.getProps().get("Bind")).equals(new Integer(0)));
assertTrue("Check Array size - 1", ((Integer) cs_instance.getProps().get("Size")).equals(new Integer(0)));
fooProvider1.start();
ServiceReference cs_ref = ipojoHelper.getServiceReferenceByName(CheckService.class.getName(), fooProvider1.getInstanceName());
assertNotNull("Check CheckService availability", cs_ref);
CheckService cs = (CheckService) getContext().getService(cs_ref);
// change the value of the property toto
cs.check();
id = ((Architecture) getContext().getService(arch_ref)).getInstanceDescription();
assertTrue("Check instance validity - 2", id.getState() == ComponentInstance.VALID);
assertTrue("Check service invocation", cs_instance.check());
assertTrue("Check service Binding - 2", ((Integer) cs_instance.getProps().get("Bind")).equals(new Integer(0)));
assertTrue("Check Array size - 2", ((Integer) cs_instance.getProps().get("Size")).equals(new Integer(0)));
cs_ref = ipojoHelper.getServiceReferenceByName(CheckService.class.getName(), fooProvider1.getInstanceName());
assertNotNull("Check CheckService availability", cs_ref);
cs = (CheckService) getContext().getService(cs_ref);
// change the value of the property toto
cs.check();
id = ((Architecture) getContext().getService(arch_ref)).getInstanceDescription();
assertTrue("Check instance validity - 3", id.getState() == ComponentInstance.VALID);
cs_instance_ref = ipojoHelper.getServiceReferenceByName(CheckService.class.getName(), instance3.getInstanceName());
assertNotNull("Check CheckService availability", cs_instance_ref);
cs_instance = (CheckService) getContext().getService(cs_instance_ref);
assertTrue("Check service invocation", cs_instance.check());
assertTrue("Check Array size - 3", ((Integer) cs_instance.getProps().get("Size")).equals(new Integer(1)));
assertTrue("Check service Binding - 3", ((Integer) cs_instance.getProps().get("Bind")).equals(new Integer(1)));
fooProvider2.start();
ServiceReference cs_ref2 = ipojoHelper.getServiceReferenceByName(CheckService.class.getName(), fooProvider2.getInstanceName());
assertNotNull("Check CheckService availability", cs_ref2);
CheckService cs2 = (CheckService) getContext().getService(cs_ref2);
// change the value of the property toto
cs2.check();
id = ((Architecture) getContext().getService(arch_ref)).getInstanceDescription();
assertTrue("Check instance validity - 4", id.getState() == ComponentInstance.VALID);
cs_instance_ref = ipojoHelper.getServiceReferenceByName(CheckService.class.getName(), instance3.getInstanceName());
assertNotNull("Check CheckService availability", cs_instance_ref);
cs_instance = (CheckService) getContext().getService(cs_instance_ref);
assertTrue("Check service invocation", cs_instance.check());
assertTrue("Check Array size - 4", ((Integer) cs_instance.getProps().get("Size")).equals(new Integer(1)));
assertTrue("Check service Binding - 4", ((Integer) cs_instance.getProps().get("Bind")).equals(new Integer(1)));
// change the value of the property toto
cs2.check();
assertTrue("Check service invocation", cs_instance.check());
assertTrue("Check Array size - 5", ((Integer) cs_instance.getProps().get("Size")).equals(new Integer(2)));
assertTrue("Check service Binding - 5", ((Integer) cs_instance.getProps().get("Bind")).equals(new Integer(2)));
// change the value of the property toto
cs.check();
id = ((Architecture) getContext().getService(arch_ref)).getInstanceDescription();
assertTrue("Check instance validity - 6", id.getState() == ComponentInstance.VALID);
assertTrue("Check service Binding - 6", ((Integer) cs_instance.getProps().get("Bind")).equals(new Integer(1)));
assertTrue("Check Array size - 6", ((Integer) cs_instance.getProps().get("Size")).equals(new Integer(1)));
fooProvider2.stop();
id = ((Architecture) getContext().getService(arch_ref)).getInstanceDescription();
assertTrue("Check instance validity - 7", id.getState() == ComponentInstance.VALID);
assertTrue("Check service Binding - 7", ((Integer) cs_instance.getProps().get("Bind")).equals(new Integer(0)));
assertTrue("Check Array size - 7", ((Integer) cs_instance.getProps().get("Size")).equals(new Integer(0)));
fooProvider2.start();
id = ((Architecture) getContext().getService(arch_ref)).getInstanceDescription();
assertTrue("Check instance validity - 8", id.getState() == ComponentInstance.VALID);
assertTrue("Check service Binding - 8", ((Integer) cs_instance.getProps().get("Bind")).equals(new Integer(1)));
assertTrue("Check Array size - 8", ((Integer) cs_instance.getProps().get("Size")).equals(new Integer(1)));
fooProvider2.stop();
id = ((Architecture) getContext().getService(arch_ref)).getInstanceDescription();
assertTrue("Check instance validity - 9", id.getState() == ComponentInstance.VALID);
assertTrue("Check service Binding - 9", ((Integer) cs_instance.getProps().get("Bind")).equals(new Integer(0)));
assertTrue("Check Array size - 9", ((Integer) cs_instance.getProps().get("Size")).equals(new Integer(0)));
id = null;
cs = null;
cs2 = null;
cs_instance = null;
getContext().ungetService(cs_instance_ref);
getContext().ungetService(arch_ref);
getContext().ungetService(cs_ref);
getContext().ungetService(cs_ref2);
}
@Test
public void testMultipleMatchInstance() {
fooProvider1.start();
fooProvider2.start();
instance3.start();
ServiceReference cs_ref = ipojoHelper.getServiceReferenceByName(CheckService.class.getName(), fooProvider1.getInstanceName());
assertNotNull("Check CheckService availability", cs_ref);
CheckService cs = (CheckService) getContext().getService(cs_ref);
ServiceReference cs_ref2 = ipojoHelper.getServiceReferenceByName(CheckService.class.getName(), fooProvider2.getInstanceName());
assertNotNull("Check CheckService availability", cs_ref2);
ServiceReference arch_ref = ipojoHelper.getServiceReferenceByName(Architecture.class.getName(), instance3.getInstanceName());
assertNotNull("Check architecture availability", arch_ref);
InstanceDescription id = ((Architecture) getContext().getService(arch_ref)).getInstanceDescription();
assertTrue("Check instance validity - 1", id.getState() == ComponentInstance.VALID);
ServiceReference cs_instance_ref = ipojoHelper.getServiceReferenceByName(CheckService.class.getName(), instance3.getInstanceName());
assertNotNull("Check CheckService availability", cs_instance_ref);
CheckService cs_instance = (CheckService) getContext().getService(cs_instance_ref);
assertTrue("Check service invocation", cs_instance.check());
assertTrue("Check service Binding - 1", ((Integer) cs_instance.getProps().get("Bind")).equals(new Integer(2)));
assertTrue("Check Array size - 1", ((Integer) cs_instance.getProps().get("Size")).equals(new Integer(2)));
// change the value of the property toto
cs.check();
id = ((Architecture) getContext().getService(arch_ref)).getInstanceDescription();
assertTrue("Check instance validity - 2", id.getState() == ComponentInstance.VALID);
assertTrue("Check service invocation", cs_instance.check());
assertTrue("Check service Binding - 2", ((Integer) cs_instance.getProps().get("Bind")).equals(new Integer(1)));
assertTrue("Check Array size - 2", ((Integer) cs_instance.getProps().get("Size")).equals(new Integer(1)));
fooProvider2.stop();
id = ((Architecture) getContext().getService(arch_ref)).getInstanceDescription();
assertTrue("Check instance validity - 3", id.getState() == ComponentInstance.VALID);
assertTrue("Check service invocation", cs_instance.check());
assertTrue("Check service Binding - 3", ((Integer) cs_instance.getProps().get("Bind")).equals(new Integer(0)));
assertTrue("Check Array size - 3", ((Integer) cs_instance.getProps().get("Size")).equals(new Integer(0)));
fooProvider2.start();
id = ((Architecture) getContext().getService(arch_ref)).getInstanceDescription();
assertTrue("Check instance validity - 4", id.getState() == ComponentInstance.VALID);
assertTrue("Check service invocation", cs_instance.check());
assertTrue("Check service Binding - 4", ((Integer) cs_instance.getProps().get("Bind")).equals(new Integer(1)));
assertTrue("Check Array size - 4", ((Integer) cs_instance.getProps().get("Size")).equals(new Integer(1)));
cs.check();
id = ((Architecture) getContext().getService(arch_ref)).getInstanceDescription();
assertTrue("Check instance validity - 5", id.getState() == ComponentInstance.VALID);
assertTrue("Check service invocation", cs_instance.check());
assertTrue("Check service Binding - 5", ((Integer) cs_instance.getProps().get("Bind")).equals(new Integer(2)));
assertTrue("Check Array size - 5", ((Integer) cs_instance.getProps().get("Size")).equals(new Integer(2)));
id = null;
cs = null;
cs_instance = null;
getContext().ungetService(cs_instance_ref);
getContext().ungetService(arch_ref);
getContext().ungetService(cs_ref);
getContext().ungetService(cs_ref2);
}
@Test
public void testMultipleNotMatchInstanceWithoutFilter() {
instance2.start();
ServiceReference arch_ref = ipojoHelper.getServiceReferenceByName(Architecture.class.getName(), instance2.getInstanceName());
assertNotNull("Check architecture availability", arch_ref);
InstanceDescription id = ((Architecture) getContext().getService(arch_ref)).getInstanceDescription();
assertTrue("Check instance validity - 1", id.getState() == ComponentInstance.VALID);
ServiceReference cs_instance_ref = ipojoHelper.getServiceReferenceByName(CheckService.class.getName(), instance2.getInstanceName());
assertNotNull("Check CheckService availability", cs_instance_ref);
CheckService cs_instance = (CheckService) getContext().getService(cs_instance_ref);
assertTrue("Check service invocation", cs_instance.check());
assertTrue("Check service Binding - 1", ((Integer) cs_instance.getProps().get("Bind")).equals(new Integer(0)));
assertTrue("Check Array size - 1", ((Integer) cs_instance.getProps().get("Size")).equals(new Integer(0)));
fooProvider1.start();
ServiceReference cs_ref = ipojoHelper.getServiceReferenceByName(CheckService.class.getName(), fooProvider1.getInstanceName());
assertNotNull("Check CheckService availability", cs_ref);
CheckService cs = (CheckService) getContext().getService(cs_ref);
// change the value of the property toto
cs.check();
id = ((Architecture) getContext().getService(arch_ref)).getInstanceDescription();
assertTrue("Check instance validity - 2", id.getState() == ComponentInstance.VALID);
assertTrue("Check service invocation", cs_instance.check());
assertTrue("Check service Binding - 2", ((Integer) cs_instance.getProps().get("Bind")).equals(new Integer(0)));
assertTrue("Check Array size - 2", ((Integer) cs_instance.getProps().get("Size")).equals(new Integer(0)));
cs_ref = ipojoHelper.getServiceReferenceByName(CheckService.class.getName(), fooProvider1.getInstanceName());
assertNotNull("Check CheckService availability", cs_ref);
cs = (CheckService) getContext().getService(cs_ref);
// change the value of the property toto
cs.check();
id = ((Architecture) getContext().getService(arch_ref)).getInstanceDescription();
assertTrue("Check instance validity - 3", id.getState() == ComponentInstance.VALID);
cs_instance_ref = ipojoHelper.getServiceReferenceByName(CheckService.class.getName(), instance2.getInstanceName());
assertNotNull("Check CheckService availability", cs_instance_ref);
cs_instance = (CheckService) getContext().getService(cs_instance_ref);
assertTrue("Check service invocation", cs_instance.check());
assertTrue("Check Array size - 3", ((Integer) cs_instance.getProps().get("Size")).equals(new Integer(1)));
assertTrue("Check service Binding - 3", ((Integer) cs_instance.getProps().get("Bind")).equals(new Integer(1)));
fooProvider2.start();
ServiceReference cs_ref2 = ipojoHelper.getServiceReferenceByName(CheckService.class.getName(), fooProvider2.getInstanceName());
assertNotNull("Check CheckService availability", cs_ref2);
CheckService cs2 = (CheckService) getContext().getService(cs_ref2);
// change the value of the property toto
cs2.check();
id = ((Architecture) getContext().getService(arch_ref)).getInstanceDescription();
assertTrue("Check instance validity - 4", id.getState() == ComponentInstance.VALID);
cs_instance_ref = ipojoHelper.getServiceReferenceByName(CheckService.class.getName(), instance2.getInstanceName());
assertNotNull("Check CheckService availability", cs_instance_ref);
cs_instance = (CheckService) getContext().getService(cs_instance_ref);
assertTrue("Check service invocation", cs_instance.check());
assertTrue("Check Array size - 4", ((Integer) cs_instance.getProps().get("Size")).equals(new Integer(1)));
assertTrue("Check service Binding - 4", ((Integer) cs_instance.getProps().get("Bind")).equals(new Integer(1)));
// change the value of the property toto
cs2.check();
assertTrue("Check service invocation", cs_instance.check());
assertTrue("Check Array size - 5", ((Integer) cs_instance.getProps().get("Size")).equals(new Integer(2)));
assertTrue("Check service Binding - 5", ((Integer) cs_instance.getProps().get("Bind")).equals(new Integer(2)));
// change the value of the property toto
cs.check();
id = ((Architecture) getContext().getService(arch_ref)).getInstanceDescription();
assertTrue("Check instance validity - 6", id.getState() == ComponentInstance.VALID);
assertTrue("Check service Binding - 6", ((Integer) cs_instance.getProps().get("Bind")).equals(new Integer(1)));
assertTrue("Check Array size - 6", ((Integer) cs_instance.getProps().get("Size")).equals(new Integer(1)));
fooProvider2.stop();
id = ((Architecture) getContext().getService(arch_ref)).getInstanceDescription();
assertTrue("Check instance validity - 7", id.getState() == ComponentInstance.VALID);
assertTrue("Check service Binding - 7", ((Integer) cs_instance.getProps().get("Bind")).equals(new Integer(0)));
assertTrue("Check Array size - 7", ((Integer) cs_instance.getProps().get("Size")).equals(new Integer(0)));
fooProvider2.start();
id = ((Architecture) getContext().getService(arch_ref)).getInstanceDescription();
assertTrue("Check instance validity - 8", id.getState() == ComponentInstance.VALID);
assertTrue("Check service Binding - 8", ((Integer) cs_instance.getProps().get("Bind")).equals(new Integer(1)));
assertTrue("Check Array size - 8", ((Integer) cs_instance.getProps().get("Size")).equals(new Integer(1)));
fooProvider2.stop();
id = ((Architecture) getContext().getService(arch_ref)).getInstanceDescription();
assertTrue("Check instance validity - 9", id.getState() == ComponentInstance.VALID);
assertTrue("Check service Binding - 9", ((Integer) cs_instance.getProps().get("Bind")).equals(new Integer(0)));
assertTrue("Check Array size - 9", ((Integer) cs_instance.getProps().get("Size")).equals(new Integer(0)));
id = null;
cs = null;
cs2 = null;
cs_instance = null;
getContext().ungetService(cs_instance_ref);
getContext().ungetService(arch_ref);
getContext().ungetService(cs_ref);
getContext().ungetService(cs_ref2);
}
@Test
public void testMultipleMatchInstanceWithoutFilter() {
fooProvider1.start();
fooProvider2.start();
instance2.start();
ServiceReference cs_ref = ipojoHelper.getServiceReferenceByName(CheckService.class.getName(), fooProvider1.getInstanceName());
assertNotNull("Check CheckService availability", cs_ref);
CheckService cs = (CheckService) getContext().getService(cs_ref);
ServiceReference cs_ref2 = ipojoHelper.getServiceReferenceByName(CheckService.class.getName(), fooProvider2.getInstanceName());
assertNotNull("Check CheckService availability", cs_ref2);
ServiceReference arch_ref = ipojoHelper.getServiceReferenceByName(Architecture.class.getName(), instance2.getInstanceName());
assertNotNull("Check architecture availability", arch_ref);
InstanceDescription id = ((Architecture) getContext().getService(arch_ref)).getInstanceDescription();
assertTrue("Check instance validity - 1", id.getState() == ComponentInstance.VALID);
ServiceReference cs_instance_ref = ipojoHelper.getServiceReferenceByName(CheckService.class.getName(), instance2.getInstanceName());
assertNotNull("Check CheckService availability", cs_instance_ref);
CheckService cs_instance = (CheckService) getContext().getService(cs_instance_ref);
assertTrue("Check service invocation", cs_instance.check());
assertTrue("Check service Binding - 1", ((Integer) cs_instance.getProps().get("Bind")).equals(new Integer(2)));
assertTrue("Check Array size - 1", ((Integer) cs_instance.getProps().get("Size")).equals(new Integer(2)));
// change the value of the property toto
cs.check();
id = ((Architecture) getContext().getService(arch_ref)).getInstanceDescription();
assertTrue("Check instance validity - 2", id.getState() == ComponentInstance.VALID);
assertTrue("Check service invocation", cs_instance.check());
assertTrue("Check service Binding - 2", ((Integer) cs_instance.getProps().get("Bind")).equals(new Integer(1)));
assertTrue("Check Array size - 2", ((Integer) cs_instance.getProps().get("Size")).equals(new Integer(1)));
fooProvider2.stop();
id = ((Architecture) getContext().getService(arch_ref)).getInstanceDescription();
assertTrue("Check instance validity - 3", id.getState() == ComponentInstance.VALID);
assertTrue("Check service invocation", cs_instance.check());
assertTrue("Check service Binding - 3", ((Integer) cs_instance.getProps().get("Bind")).equals(new Integer(0)));
assertTrue("Check Array size - 3", ((Integer) cs_instance.getProps().get("Size")).equals(new Integer(0)));
fooProvider2.start();
id = ((Architecture) getContext().getService(arch_ref)).getInstanceDescription();
assertTrue("Check instance validity - 4", id.getState() == ComponentInstance.VALID);
assertTrue("Check service invocation", cs_instance.check());
assertTrue("Check service Binding - 4", ((Integer) cs_instance.getProps().get("Bind")).equals(new Integer(1)));
assertTrue("Check Array size - 4", ((Integer) cs_instance.getProps().get("Size")).equals(new Integer(1)));
cs.check();
id = ((Architecture) getContext().getService(arch_ref)).getInstanceDescription();
assertTrue("Check instance validity - 5", id.getState() == ComponentInstance.VALID);
assertTrue("Check service invocation", cs_instance.check());
assertTrue("Check service Binding - 5", ((Integer) cs_instance.getProps().get("Bind")).equals(new Integer(2)));
assertTrue("Check Array size - 5", ((Integer) cs_instance.getProps().get("Size")).equals(new Integer(2)));
id = null;
cs = null;
cs_instance = null;
getContext().ungetService(cs_instance_ref);
getContext().ungetService(arch_ref);
getContext().ungetService(cs_ref);
getContext().ungetService(cs_ref2);
}
}
|
googleapis/google-cloud-java | 36,205 | java-alloydb/proto-google-cloud-alloydb-v1beta/src/main/java/com/google/cloud/alloydb/v1beta/ListDatabasesRequest.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/alloydb/v1beta/service.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.alloydb.v1beta;
/**
*
*
* <pre>
* Message for ListDatabases request.
* </pre>
*
* Protobuf type {@code google.cloud.alloydb.v1beta.ListDatabasesRequest}
*/
public final class ListDatabasesRequest extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.alloydb.v1beta.ListDatabasesRequest)
ListDatabasesRequestOrBuilder {
private static final long serialVersionUID = 0L;
// Use ListDatabasesRequest.newBuilder() to construct.
private ListDatabasesRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private ListDatabasesRequest() {
parent_ = "";
pageToken_ = "";
filter_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new ListDatabasesRequest();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.alloydb.v1beta.ServiceProto
.internal_static_google_cloud_alloydb_v1beta_ListDatabasesRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.alloydb.v1beta.ServiceProto
.internal_static_google_cloud_alloydb_v1beta_ListDatabasesRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.alloydb.v1beta.ListDatabasesRequest.class,
com.google.cloud.alloydb.v1beta.ListDatabasesRequest.Builder.class);
}
public static final int PARENT_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private volatile java.lang.Object parent_ = "";
/**
*
*
* <pre>
* Required. Parent value for ListDatabasesRequest.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The parent.
*/
@java.lang.Override
public java.lang.String getParent() {
java.lang.Object ref = parent_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
parent_ = s;
return s;
}
}
/**
*
*
* <pre>
* Required. Parent value for ListDatabasesRequest.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for parent.
*/
@java.lang.Override
public com.google.protobuf.ByteString getParentBytes() {
java.lang.Object ref = parent_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
parent_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int PAGE_SIZE_FIELD_NUMBER = 2;
private int pageSize_ = 0;
/**
*
*
* <pre>
* Optional. The maximum number of databases to return. The service may return
* fewer than this value. If unspecified, 2000 is the default page_size. The
* max value of page_size will be 4000, values above max will be coerced to
* max.
* </pre>
*
* <code>int32 page_size = 2 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The pageSize.
*/
@java.lang.Override
public int getPageSize() {
return pageSize_;
}
public static final int PAGE_TOKEN_FIELD_NUMBER = 3;
@SuppressWarnings("serial")
private volatile java.lang.Object pageToken_ = "";
/**
*
*
* <pre>
* Optional. A page token, received from a previous `ListDatabases` call.
* This should be provided to retrieve the subsequent page.
* This field is currently not supported, its value will be ignored if passed.
* </pre>
*
* <code>string page_token = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The pageToken.
*/
@java.lang.Override
public java.lang.String getPageToken() {
java.lang.Object ref = pageToken_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
pageToken_ = s;
return s;
}
}
/**
*
*
* <pre>
* Optional. A page token, received from a previous `ListDatabases` call.
* This should be provided to retrieve the subsequent page.
* This field is currently not supported, its value will be ignored if passed.
* </pre>
*
* <code>string page_token = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The bytes for pageToken.
*/
@java.lang.Override
public com.google.protobuf.ByteString getPageTokenBytes() {
java.lang.Object ref = pageToken_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
pageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int FILTER_FIELD_NUMBER = 4;
@SuppressWarnings("serial")
private volatile java.lang.Object filter_ = "";
/**
*
*
* <pre>
* Optional. Filtering results.
* This field is currently not supported, its value will be ignored if passed.
* </pre>
*
* <code>string filter = 4 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The filter.
*/
@java.lang.Override
public java.lang.String getFilter() {
java.lang.Object ref = filter_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
filter_ = s;
return s;
}
}
/**
*
*
* <pre>
* Optional. Filtering results.
* This field is currently not supported, its value will be ignored if passed.
* </pre>
*
* <code>string filter = 4 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The bytes for filter.
*/
@java.lang.Override
public com.google.protobuf.ByteString getFilterBytes() {
java.lang.Object ref = filter_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
filter_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 1, parent_);
}
if (pageSize_ != 0) {
output.writeInt32(2, pageSize_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(pageToken_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 3, pageToken_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(filter_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 4, filter_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, parent_);
}
if (pageSize_ != 0) {
size += com.google.protobuf.CodedOutputStream.computeInt32Size(2, pageSize_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(pageToken_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(3, pageToken_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(filter_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(4, filter_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.alloydb.v1beta.ListDatabasesRequest)) {
return super.equals(obj);
}
com.google.cloud.alloydb.v1beta.ListDatabasesRequest other =
(com.google.cloud.alloydb.v1beta.ListDatabasesRequest) obj;
if (!getParent().equals(other.getParent())) return false;
if (getPageSize() != other.getPageSize()) return false;
if (!getPageToken().equals(other.getPageToken())) return false;
if (!getFilter().equals(other.getFilter())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + PARENT_FIELD_NUMBER;
hash = (53 * hash) + getParent().hashCode();
hash = (37 * hash) + PAGE_SIZE_FIELD_NUMBER;
hash = (53 * hash) + getPageSize();
hash = (37 * hash) + PAGE_TOKEN_FIELD_NUMBER;
hash = (53 * hash) + getPageToken().hashCode();
hash = (37 * hash) + FILTER_FIELD_NUMBER;
hash = (53 * hash) + getFilter().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.alloydb.v1beta.ListDatabasesRequest parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.alloydb.v1beta.ListDatabasesRequest parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.alloydb.v1beta.ListDatabasesRequest parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.alloydb.v1beta.ListDatabasesRequest parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.alloydb.v1beta.ListDatabasesRequest parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.alloydb.v1beta.ListDatabasesRequest parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.alloydb.v1beta.ListDatabasesRequest parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.alloydb.v1beta.ListDatabasesRequest parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.alloydb.v1beta.ListDatabasesRequest parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.alloydb.v1beta.ListDatabasesRequest parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.alloydb.v1beta.ListDatabasesRequest parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.alloydb.v1beta.ListDatabasesRequest parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.cloud.alloydb.v1beta.ListDatabasesRequest prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Message for ListDatabases request.
* </pre>
*
* Protobuf type {@code google.cloud.alloydb.v1beta.ListDatabasesRequest}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.alloydb.v1beta.ListDatabasesRequest)
com.google.cloud.alloydb.v1beta.ListDatabasesRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.alloydb.v1beta.ServiceProto
.internal_static_google_cloud_alloydb_v1beta_ListDatabasesRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.alloydb.v1beta.ServiceProto
.internal_static_google_cloud_alloydb_v1beta_ListDatabasesRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.alloydb.v1beta.ListDatabasesRequest.class,
com.google.cloud.alloydb.v1beta.ListDatabasesRequest.Builder.class);
}
// Construct using com.google.cloud.alloydb.v1beta.ListDatabasesRequest.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
parent_ = "";
pageSize_ = 0;
pageToken_ = "";
filter_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.alloydb.v1beta.ServiceProto
.internal_static_google_cloud_alloydb_v1beta_ListDatabasesRequest_descriptor;
}
@java.lang.Override
public com.google.cloud.alloydb.v1beta.ListDatabasesRequest getDefaultInstanceForType() {
return com.google.cloud.alloydb.v1beta.ListDatabasesRequest.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.alloydb.v1beta.ListDatabasesRequest build() {
com.google.cloud.alloydb.v1beta.ListDatabasesRequest result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.alloydb.v1beta.ListDatabasesRequest buildPartial() {
com.google.cloud.alloydb.v1beta.ListDatabasesRequest result =
new com.google.cloud.alloydb.v1beta.ListDatabasesRequest(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(com.google.cloud.alloydb.v1beta.ListDatabasesRequest result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.parent_ = parent_;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.pageSize_ = pageSize_;
}
if (((from_bitField0_ & 0x00000004) != 0)) {
result.pageToken_ = pageToken_;
}
if (((from_bitField0_ & 0x00000008) != 0)) {
result.filter_ = filter_;
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.alloydb.v1beta.ListDatabasesRequest) {
return mergeFrom((com.google.cloud.alloydb.v1beta.ListDatabasesRequest) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.alloydb.v1beta.ListDatabasesRequest other) {
if (other == com.google.cloud.alloydb.v1beta.ListDatabasesRequest.getDefaultInstance())
return this;
if (!other.getParent().isEmpty()) {
parent_ = other.parent_;
bitField0_ |= 0x00000001;
onChanged();
}
if (other.getPageSize() != 0) {
setPageSize(other.getPageSize());
}
if (!other.getPageToken().isEmpty()) {
pageToken_ = other.pageToken_;
bitField0_ |= 0x00000004;
onChanged();
}
if (!other.getFilter().isEmpty()) {
filter_ = other.filter_;
bitField0_ |= 0x00000008;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
parent_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000001;
break;
} // case 10
case 16:
{
pageSize_ = input.readInt32();
bitField0_ |= 0x00000002;
break;
} // case 16
case 26:
{
pageToken_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000004;
break;
} // case 26
case 34:
{
filter_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000008;
break;
} // case 34
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.lang.Object parent_ = "";
/**
*
*
* <pre>
* Required. Parent value for ListDatabasesRequest.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The parent.
*/
public java.lang.String getParent() {
java.lang.Object ref = parent_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
parent_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Required. Parent value for ListDatabasesRequest.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for parent.
*/
public com.google.protobuf.ByteString getParentBytes() {
java.lang.Object ref = parent_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
parent_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Required. Parent value for ListDatabasesRequest.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The parent to set.
* @return This builder for chaining.
*/
public Builder setParent(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
parent_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. Parent value for ListDatabasesRequest.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return This builder for chaining.
*/
public Builder clearParent() {
parent_ = getDefaultInstance().getParent();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. Parent value for ListDatabasesRequest.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The bytes for parent to set.
* @return This builder for chaining.
*/
public Builder setParentBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
parent_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
private int pageSize_;
/**
*
*
* <pre>
* Optional. The maximum number of databases to return. The service may return
* fewer than this value. If unspecified, 2000 is the default page_size. The
* max value of page_size will be 4000, values above max will be coerced to
* max.
* </pre>
*
* <code>int32 page_size = 2 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The pageSize.
*/
@java.lang.Override
public int getPageSize() {
return pageSize_;
}
/**
*
*
* <pre>
* Optional. The maximum number of databases to return. The service may return
* fewer than this value. If unspecified, 2000 is the default page_size. The
* max value of page_size will be 4000, values above max will be coerced to
* max.
* </pre>
*
* <code>int32 page_size = 2 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @param value The pageSize to set.
* @return This builder for chaining.
*/
public Builder setPageSize(int value) {
pageSize_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. The maximum number of databases to return. The service may return
* fewer than this value. If unspecified, 2000 is the default page_size. The
* max value of page_size will be 4000, values above max will be coerced to
* max.
* </pre>
*
* <code>int32 page_size = 2 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return This builder for chaining.
*/
public Builder clearPageSize() {
bitField0_ = (bitField0_ & ~0x00000002);
pageSize_ = 0;
onChanged();
return this;
}
private java.lang.Object pageToken_ = "";
/**
*
*
* <pre>
* Optional. A page token, received from a previous `ListDatabases` call.
* This should be provided to retrieve the subsequent page.
* This field is currently not supported, its value will be ignored if passed.
* </pre>
*
* <code>string page_token = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The pageToken.
*/
public java.lang.String getPageToken() {
java.lang.Object ref = pageToken_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
pageToken_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Optional. A page token, received from a previous `ListDatabases` call.
* This should be provided to retrieve the subsequent page.
* This field is currently not supported, its value will be ignored if passed.
* </pre>
*
* <code>string page_token = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The bytes for pageToken.
*/
public com.google.protobuf.ByteString getPageTokenBytes() {
java.lang.Object ref = pageToken_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
pageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Optional. A page token, received from a previous `ListDatabases` call.
* This should be provided to retrieve the subsequent page.
* This field is currently not supported, its value will be ignored if passed.
* </pre>
*
* <code>string page_token = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @param value The pageToken to set.
* @return This builder for chaining.
*/
public Builder setPageToken(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
pageToken_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. A page token, received from a previous `ListDatabases` call.
* This should be provided to retrieve the subsequent page.
* This field is currently not supported, its value will be ignored if passed.
* </pre>
*
* <code>string page_token = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return This builder for chaining.
*/
public Builder clearPageToken() {
pageToken_ = getDefaultInstance().getPageToken();
bitField0_ = (bitField0_ & ~0x00000004);
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. A page token, received from a previous `ListDatabases` call.
* This should be provided to retrieve the subsequent page.
* This field is currently not supported, its value will be ignored if passed.
* </pre>
*
* <code>string page_token = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @param value The bytes for pageToken to set.
* @return This builder for chaining.
*/
public Builder setPageTokenBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
pageToken_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
private java.lang.Object filter_ = "";
/**
*
*
* <pre>
* Optional. Filtering results.
* This field is currently not supported, its value will be ignored if passed.
* </pre>
*
* <code>string filter = 4 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The filter.
*/
public java.lang.String getFilter() {
java.lang.Object ref = filter_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
filter_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Optional. Filtering results.
* This field is currently not supported, its value will be ignored if passed.
* </pre>
*
* <code>string filter = 4 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The bytes for filter.
*/
public com.google.protobuf.ByteString getFilterBytes() {
java.lang.Object ref = filter_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
filter_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Optional. Filtering results.
* This field is currently not supported, its value will be ignored if passed.
* </pre>
*
* <code>string filter = 4 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @param value The filter to set.
* @return This builder for chaining.
*/
public Builder setFilter(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
filter_ = value;
bitField0_ |= 0x00000008;
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. Filtering results.
* This field is currently not supported, its value will be ignored if passed.
* </pre>
*
* <code>string filter = 4 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return This builder for chaining.
*/
public Builder clearFilter() {
filter_ = getDefaultInstance().getFilter();
bitField0_ = (bitField0_ & ~0x00000008);
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. Filtering results.
* This field is currently not supported, its value will be ignored if passed.
* </pre>
*
* <code>string filter = 4 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @param value The bytes for filter to set.
* @return This builder for chaining.
*/
public Builder setFilterBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
filter_ = value;
bitField0_ |= 0x00000008;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.alloydb.v1beta.ListDatabasesRequest)
}
// @@protoc_insertion_point(class_scope:google.cloud.alloydb.v1beta.ListDatabasesRequest)
private static final com.google.cloud.alloydb.v1beta.ListDatabasesRequest DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.alloydb.v1beta.ListDatabasesRequest();
}
public static com.google.cloud.alloydb.v1beta.ListDatabasesRequest getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<ListDatabasesRequest> PARSER =
new com.google.protobuf.AbstractParser<ListDatabasesRequest>() {
@java.lang.Override
public ListDatabasesRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<ListDatabasesRequest> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<ListDatabasesRequest> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.alloydb.v1beta.ListDatabasesRequest getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
apache/fluss | 36,544 | fluss-common/src/main/java/org/apache/fluss/utils/concurrent/FutureUtils.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.fluss.utils.concurrent;
import org.apache.fluss.annotation.VisibleForTesting;
import org.apache.fluss.utils.ExceptionUtils;
import org.apache.fluss.utils.FatalExitExceptionHandler;
import org.apache.fluss.utils.function.RunnableWithException;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import javax.annotation.Nullable;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.Optional;
import java.util.concurrent.Callable;
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.CompletionException;
import java.util.concurrent.CompletionStage;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.Executor;
import java.util.concurrent.ForkJoinPool;
import java.util.concurrent.RunnableFuture;
import java.util.concurrent.ScheduledFuture;
import java.util.concurrent.ScheduledThreadPoolExecutor;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.TimeoutException;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.function.BiConsumer;
import java.util.function.BiFunction;
import java.util.function.Function;
import java.util.function.Supplier;
import static org.apache.fluss.utils.Preconditions.checkNotNull;
/* This file is based on source code of Apache Flink Project (https://flink.apache.org/), licensed by the Apache
* Software Foundation (ASF) under the Apache License, Version 2.0. See the NOTICE file distributed with this work for
* additional information regarding copyright ownership. */
/** A collection of utilities that expand the usage of {@link CompletableFuture}. */
public class FutureUtils {
private static final Logger LOG = LoggerFactory.getLogger(FutureUtils.class);
private FutureUtils() {}
private static final CompletableFuture<Void> COMPLETED_VOID_FUTURE =
CompletableFuture.completedFuture(null);
/**
* Returns a completed future of type {@link Void}.
*
* @return a completed future of type {@link Void}
*/
public static CompletableFuture<Void> completedVoidFuture() {
return COMPLETED_VOID_FUTURE;
}
/**
* Fakes asynchronous execution by immediately executing the operation and completing the
* supplied future either normally or exceptionally.
*
* @param operation to executed
* @param <T> type of the result
*/
public static <T> void completeFromCallable(
CompletableFuture<T> future, Callable<T> operation) {
try {
future.complete(operation.call());
} catch (Exception e) {
future.completeExceptionally(e);
}
}
/** Runnable to complete the given future with a {@link TimeoutException}. */
private static final class Timeout implements Runnable {
private final CompletableFuture<?> future;
private final String timeoutMsg;
private Timeout(CompletableFuture<?> future, @Nullable String timeoutMsg) {
this.future = checkNotNull(future);
this.timeoutMsg = timeoutMsg;
}
@Override
public void run() {
future.completeExceptionally(new TimeoutException(timeoutMsg));
}
}
/**
* Delay scheduler used to timeout futures.
*
* <p>This class creates a singleton scheduler used to run the provided actions.
*/
private enum Delayer {
;
static final ScheduledThreadPoolExecutor DELAYER =
new ScheduledThreadPoolExecutor(
1, new ExecutorThreadFactory("fluss-completable-future-delay-scheduler"));
/**
* Delay the given action by the given delay.
*
* @param runnable to execute after the given delay
* @param delay after which to execute the runnable
* @param timeUnit time unit of the delay
* @return Future of the scheduled action
*/
private static ScheduledFuture<?> delay(Runnable runnable, long delay, TimeUnit timeUnit) {
checkNotNull(runnable);
checkNotNull(timeUnit);
return DELAYER.schedule(runnable, delay, timeUnit);
}
}
/**
* Times the given future out after the timeout.
*
* @param future to time out
* @param timeout after which the given future is timed out
* @param timeUnit time unit of the timeout
* @param timeoutMsg timeout message for exception
* @param <T> type of the given future
* @return The timeout enriched future
*/
public static <T> CompletableFuture<T> orTimeout(
CompletableFuture<T> future,
long timeout,
TimeUnit timeUnit,
@Nullable String timeoutMsg) {
return orTimeout(future, timeout, timeUnit, Executors.directExecutor(), timeoutMsg);
}
/**
* Times the given future out after the timeout.
*
* @param future to time out
* @param timeout after which the given future is timed out
* @param timeUnit time unit of the timeout
* @param timeoutFailExecutor executor that will complete the future exceptionally after the
* timeout is reached
* @param timeoutMsg timeout message for exception
* @param <T> type of the given future
* @return The timeout enriched future
*/
public static <T> CompletableFuture<T> orTimeout(
CompletableFuture<T> future,
long timeout,
TimeUnit timeUnit,
Executor timeoutFailExecutor,
@Nullable String timeoutMsg) {
if (!future.isDone()) {
final ScheduledFuture<?> timeoutFuture =
Delayer.delay(
() -> timeoutFailExecutor.execute(new Timeout(future, timeoutMsg)),
timeout,
timeUnit);
future.whenComplete(
(T value, Throwable throwable) -> {
if (!timeoutFuture.isDone()) {
timeoutFuture.cancel(false);
}
});
}
return future;
}
// ------------------------------------------------------------------------
// Future actions
// ------------------------------------------------------------------------
/**
* Run the given {@code RunnableFuture} if it is not done, and then retrieves its result.
*
* @param future to run if not done and get
* @param <T> type of the result
* @return the result after running the future
* @throws ExecutionException if a problem occurred
* @throws InterruptedException if the current thread has been interrupted
*/
public static <T> T runIfNotDoneAndGet(RunnableFuture<T> future)
throws ExecutionException, InterruptedException {
if (null == future) {
return null;
}
if (!future.isDone()) {
future.run();
}
return future.get();
}
// ------------------------------------------------------------------------
// composing futures
// ------------------------------------------------------------------------
/**
* Run the given asynchronous action after the completion of the given future. The given future
* can be completed normally or exceptionally. In case of an exceptional completion, the
* asynchronous action's exception will be added to the initial exception.
*
* @param future to wait for its completion
* @param composedAction asynchronous action which is triggered after the future's completion
* @return Future which is completed after the asynchronous action has completed. This future
* can contain an exception if an error occurred in the given future or asynchronous action.
*/
public static CompletableFuture<Void> composeAfterwards(
CompletableFuture<?> future, Supplier<CompletableFuture<?>> composedAction) {
final CompletableFuture<Void> resultFuture = new CompletableFuture<>();
future.whenComplete(
(Object outerIgnored, Throwable outerThrowable) -> {
final CompletableFuture<?> composedActionFuture = composedAction.get();
composedActionFuture.whenComplete(
(Object innerIgnored, Throwable innerThrowable) -> {
if (innerThrowable != null) {
resultFuture.completeExceptionally(
ExceptionUtils.firstOrSuppressed(
innerThrowable, outerThrowable));
} else if (outerThrowable != null) {
resultFuture.completeExceptionally(outerThrowable);
} else {
resultFuture.complete(null);
}
});
});
return resultFuture;
}
/**
* Creates a future that is complete once multiple other futures completed. The future fails
* (completes exceptionally) once one of the futures in the conjunction fails. Upon successful
* completion, the future returns the collection of the futures' results.
*
* <p>The ConjunctFuture gives access to how many Futures in the conjunction have already
* completed successfully, via {@link ConjunctFuture#getNumFuturesCompleted()}.
*
* @param futures The futures that make up the conjunction. No null entries are allowed.
* @return The ConjunctFuture that completes once all given futures are complete (or one fails).
*/
public static <T> ConjunctFuture<Collection<T>> combineAll(
Collection<? extends CompletableFuture<? extends T>> futures) {
checkNotNull(futures, "futures");
return new ResultConjunctFuture<>(futures);
}
/**
* Creates a future that is complete once all of the given futures have completed. The future
* fails (completes exceptionally) once one of the given futures fails.
*
* <p>The ConjunctFuture gives access to how many Futures have already completed successfully,
* via {@link ConjunctFuture#getNumFuturesCompleted()}.
*
* @param futures The futures to wait on. No null entries are allowed.
* @return The WaitingFuture that completes once all given futures are complete (or one fails).
*/
public static ConjunctFuture<Void> waitForAll(
Collection<? extends CompletableFuture<?>> futures) {
checkNotNull(futures, "futures");
//noinspection unchecked,rawtypes
return new WaitingConjunctFuture(futures, (ignore, throwable) -> {});
}
/**
* Creates a future that is complete once all of the given futures have completed. The future
* fails (completes exceptionally) once one of the given futures fails.
*
* <p>The ConjunctFuture gives access to how many Futures have already completed successfully,
* via {@link ConjunctFuture#getNumFuturesCompleted()}.
*
* @param futures The futures to wait on. No null entries are allowed.
* @return The WaitingFuture that completes once all given futures are complete (or one fails).
*/
public static <T> ConjunctFuture<Void> waitForAll(
Collection<? extends CompletableFuture<T>> futures,
BiConsumer<T, Throwable> completeAction) {
checkNotNull(futures, "futures");
return new WaitingConjunctFuture<>(futures, completeAction);
}
/**
* Creates a {@link ConjunctFuture} which is only completed after all given futures have
* completed. Unlike {@link FutureUtils#waitForAll(Collection)}, the resulting future won't be
* completed directly if one of the given futures is completed exceptionally. Instead, all
* occurring exception will be collected and combined to a single exception. If at least on
* exception occurs, then the resulting future will be completed exceptionally.
*
* @param futuresToComplete futures to complete
* @return Future which is completed after all given futures have been completed.
*/
public static ConjunctFuture<Void> completeAll(
Collection<? extends CompletableFuture<?>> futuresToComplete) {
//noinspection unchecked,rawtypes
return new CompletionConjunctFuture(futuresToComplete, (ignored, throwable) -> {});
}
/**
* Creates a {@link ConjunctFuture} which is only completed after all given futures have
* completed. Unlike {@link FutureUtils#waitForAll(Collection)}, the resulting future won't be
* completed directly if one of the given futures is completed exceptionally. Instead, all
* occurring exception will be collected and combined to a single exception. If at least on
* exception occurs, then the resulting future will be completed exceptionally.
*
* @param futuresToComplete futures to complete
* @param completeAction action to be executed after the completion of each future
* @return Future which is completed after all given futures have been completed.
*/
public static <T> ConjunctFuture<Void> completeAll(
Collection<? extends CompletableFuture<T>> futuresToComplete,
BiConsumer<T, Throwable> completeAction) {
return new CompletionConjunctFuture<>(futuresToComplete, completeAction);
}
/**
* Run the given action after the completion of the given future. The given future can be
* completed normally or exceptionally. In case of an exceptional completion the, the action's
* exception will be added to the initial exception.
*
* @param future to wait for its completion
* @param runnable action which is triggered after the future's completion
* @return Future which is completed after the action has completed. This future can contain an
* exception, if an error occurred in the given future or action.
*/
public static CompletableFuture<Void> runAfterwards(
CompletableFuture<?> future, RunnableWithException runnable) {
return runAfterwardsAsync(future, runnable, Executors.directExecutor());
}
/**
* Run the given action after the completion of the given future. The given future can be
* completed normally or exceptionally. In case of an exceptional completion the, the action's
* exception will be added to the initial exception.
*
* @param future to wait for its completion
* @param runnable action which is triggered after the future's completion
* @return Future which is completed after the action has completed. This future can contain an
* exception, if an error occurred in the given future or action.
*/
public static CompletableFuture<Void> runAfterwardsAsync(
CompletableFuture<?> future, RunnableWithException runnable) {
return runAfterwardsAsync(future, runnable, ForkJoinPool.commonPool());
}
/**
* Run the given action after the completion of the given future. The given future can be
* completed normally or exceptionally. In case of an exceptional completion the action's
* exception will be added to the initial exception.
*
* @param future to wait for its completion
* @param runnable action which is triggered after the future's completion
* @param executor to run the given action
* @return Future which is completed after the action has completed. This future can contain an
* exception, if an error occurred in the given future or action.
*/
public static CompletableFuture<Void> runAfterwardsAsync(
CompletableFuture<?> future, RunnableWithException runnable, Executor executor) {
final CompletableFuture<Void> resultFuture = new CompletableFuture<>();
future.whenCompleteAsync(
(Object ignored, Throwable throwable) -> {
try {
runnable.run();
} catch (Throwable e) {
throwable = ExceptionUtils.firstOrSuppressed(e, throwable);
}
if (throwable != null) {
resultFuture.completeExceptionally(throwable);
} else {
resultFuture.complete(null);
}
},
executor);
return resultFuture;
}
/**
* A future that is complete once multiple other futures completed. The futures are not
* necessarily of the same type. The ConjunctFuture fails (completes exceptionally) once one of
* the Futures in the conjunction fails.
*
* <p>The advantage of using the ConjunctFuture over chaining all the futures (such as via
* {@link CompletableFuture#thenCombine(CompletionStage, BiFunction)}) is that ConjunctFuture
* also tracks how many of the Futures are already complete.
*/
public abstract static class ConjunctFuture<T> extends CompletableFuture<T> {
/**
* Gets the total number of Futures in the conjunction.
*
* @return The total number of Futures in the conjunction.
*/
public abstract int getNumFuturesTotal();
/**
* Gets the number of Futures in the conjunction that are already complete.
*
* @return The number of Futures in the conjunction that are already complete
*/
public abstract int getNumFuturesCompleted();
}
/**
* The implementation of the {@link ConjunctFuture} which returns its Futures' result as a
* collection.
*/
private static class ResultConjunctFuture<T> extends ConjunctFuture<Collection<T>> {
/** The total number of futures in the conjunction. */
private final int numTotal;
/** The number of futures in the conjunction that are already complete. */
private final AtomicInteger numCompleted = new AtomicInteger(0);
/** The set of collected results so far. */
private final T[] results;
/**
* The function that is attached to all futures in the conjunction. Once a future is
* complete, this function tracks the completion or fails the conjunct.
*/
private void handleCompletedFuture(int index, T value, Throwable throwable) {
if (throwable != null) {
completeExceptionally(throwable);
} else {
/**
* This {@link #results} update itself is not synchronised in any way and it's fine
* because:
*
* <ul>
* <li>There is a happens-before relationship for each thread (that is completing
* the future) between setting {@link #results} and incrementing {@link
* #numCompleted}.
* <li>Each thread is updating uniquely different field of the {@link #results}
* array.
* <li>There is a happens-before relationship between all of the writing threads
* and the last one thread (thanks to the {@code
* numCompleted.incrementAndGet() == numTotal} check.
* <li>The last thread will be completing the future, so it has transitively
* happens-before relationship with all of preceding updated/writes to {@link
* #results}.
* <li>{@link AtomicInteger#incrementAndGet} is an equivalent of both volatile
* read & write
* </ul>
*/
results[index] = value;
if (numCompleted.incrementAndGet() == numTotal) {
complete(Arrays.asList(results));
}
}
}
@SuppressWarnings("unchecked")
ResultConjunctFuture(Collection<? extends CompletableFuture<? extends T>> resultFutures) {
this.numTotal = resultFutures.size();
results = (T[]) new Object[numTotal];
if (resultFutures.isEmpty()) {
complete(Collections.emptyList());
} else {
int counter = 0;
for (CompletableFuture<? extends T> future : resultFutures) {
final int index = counter;
counter++;
future.whenComplete(
(value, throwable) -> handleCompletedFuture(index, value, throwable));
}
}
}
@Override
public int getNumFuturesTotal() {
return numTotal;
}
@Override
public int getNumFuturesCompleted() {
return numCompleted.get();
}
}
/**
* Implementation of the {@link ConjunctFuture} interface which waits only for the completion of
* its futures and does not return their values.
*/
private static final class WaitingConjunctFuture<T> extends ConjunctFuture<Void> {
/** Number of completed futures. */
private final AtomicInteger numCompleted = new AtomicInteger(0);
/** Total number of futures to wait on. */
private final int numTotal;
private final BiConsumer<T, Throwable> completeAction;
/**
* Method which increments the atomic completion counter and completes or fails the
* WaitingFutureImpl.
*/
private void handleCompletedFuture(T value, Throwable throwable) {
try {
completeAction.accept(value, throwable);
} catch (Exception e) {
// ignore
}
if (throwable == null) {
if (numTotal == numCompleted.incrementAndGet()) {
complete(null);
}
} else {
completeExceptionally(throwable);
}
}
private WaitingConjunctFuture(
Collection<? extends CompletableFuture<T>> futures,
BiConsumer<T, Throwable> completeAction) {
this.numTotal = futures.size();
this.completeAction = completeAction;
if (futures.isEmpty()) {
complete(null);
} else {
for (CompletableFuture<T> future : futures) {
future.whenComplete(this::handleCompletedFuture);
}
}
}
@Override
public int getNumFuturesTotal() {
return numTotal;
}
@Override
public int getNumFuturesCompleted() {
return numCompleted.get();
}
}
/**
* {@link ConjunctFuture} implementation which is completed after all the given futures have
* been completed. Exceptional completions of the input futures will be recorded but it won't
* trigger the early completion of this future.
*/
private static final class CompletionConjunctFuture<T> extends ConjunctFuture<Void> {
private final Object lock = new Object();
private final int numFuturesTotal;
private final BiConsumer<T, Throwable> completeAction;
private int futuresCompleted;
private Throwable globalThrowable;
private CompletionConjunctFuture(
Collection<? extends CompletableFuture<T>> futuresToComplete,
BiConsumer<T, Throwable> completeAction) {
this.numFuturesTotal = futuresToComplete.size();
this.completeAction = completeAction;
futuresCompleted = 0;
globalThrowable = null;
if (futuresToComplete.isEmpty()) {
complete(null);
} else {
for (CompletableFuture<T> completableFuture : futuresToComplete) {
completableFuture.whenComplete(this::completeFuture);
}
}
}
private void completeFuture(T value, Throwable throwable) {
synchronized (lock) {
try {
completeAction.accept(value, throwable);
} catch (Exception e) {
// ignore
}
futuresCompleted++;
if (throwable != null) {
globalThrowable = ExceptionUtils.firstOrSuppressed(throwable, globalThrowable);
}
if (futuresCompleted == numFuturesTotal) {
if (globalThrowable != null) {
completeExceptionally(globalThrowable);
} else {
complete(null);
}
}
}
}
@Override
public int getNumFuturesTotal() {
return numFuturesTotal;
}
@Override
public int getNumFuturesCompleted() {
synchronized (lock) {
return futuresCompleted;
}
}
}
// ------------------------------------------------------------------------
// Helper methods
// ------------------------------------------------------------------------
/**
* Returns an exceptionally completed {@link CompletableFuture}.
*
* @param cause to complete the future with
* @param <T> type of the future
* @return An exceptionally completed CompletableFuture
*/
public static <T> CompletableFuture<T> completedExceptionally(Throwable cause) {
CompletableFuture<T> result = new CompletableFuture<>();
result.completeExceptionally(cause);
return result;
}
/**
* Returns a future which is completed when {@link RunnableWithException} is finished.
*
* @param runnable represents the task
* @param executor to execute the runnable
* @return Future which is completed when runnable is finished
*/
public static CompletableFuture<Void> runAsync(
RunnableWithException runnable, Executor executor) {
return CompletableFuture.runAsync(
() -> {
try {
runnable.run();
} catch (Throwable e) {
throw new CompletionException(e);
}
},
executor);
}
/**
* Asserts that the given {@link CompletableFuture} is not completed exceptionally. If the
* future is completed exceptionally, then it will call the {@link FatalExitExceptionHandler}.
*
* @param completableFuture to assert for no exceptions
*/
public static void assertNoException(CompletableFuture<?> completableFuture) {
handleUncaughtException(completableFuture, FatalExitExceptionHandler.INSTANCE);
}
/**
* Checks that the given {@link CompletableFuture} is not completed exceptionally. If the future
* is completed exceptionally, then it will call the given uncaught exception handler.
*
* @param completableFuture to assert for no exceptions
* @param uncaughtExceptionHandler to call if the future is completed exceptionally
*/
public static void handleUncaughtException(
CompletableFuture<?> completableFuture,
Thread.UncaughtExceptionHandler uncaughtExceptionHandler) {
handleUncaughtException(
completableFuture, uncaughtExceptionHandler, FatalExitExceptionHandler.INSTANCE);
}
@VisibleForTesting
static void handleUncaughtException(
CompletableFuture<?> completableFuture,
Thread.UncaughtExceptionHandler uncaughtExceptionHandler,
Thread.UncaughtExceptionHandler fatalErrorHandler) {
checkNotNull(completableFuture)
.whenComplete(
(ignored, throwable) -> {
if (throwable != null) {
final Thread currentThread = Thread.currentThread();
try {
uncaughtExceptionHandler.uncaughtException(
currentThread, throwable);
} catch (Throwable t) {
final RuntimeException errorHandlerException =
new IllegalStateException(
"An error occurred while executing the error handling for a "
+ throwable.getClass().getSimpleName()
+ ".",
t);
errorHandlerException.addSuppressed(throwable);
fatalErrorHandler.uncaughtException(
currentThread, errorHandlerException);
}
}
});
}
/**
* Forwards the value from the source future to the target future using the provided executor.
*
* @param source future to forward the value from
* @param target future to forward the value to
* @param executor executor to forward the source value to the target future
* @param <T> type of the value
*/
public static <T> void forwardAsync(
CompletableFuture<T> source, CompletableFuture<T> target, Executor executor) {
source.whenCompleteAsync(forwardTo(target), executor);
}
private static <T> BiConsumer<T, Throwable> forwardTo(CompletableFuture<T> target) {
return (value, throwable) -> doForward(value, throwable, target);
}
/**
* Completes the given future with either the given value or throwable, depending on which
* parameter is not null.
*
* @param value value with which the future should be completed
* @param throwable throwable with which the future should be completed exceptionally
* @param target future to complete
* @param <T> completed future
*/
public static <T> void doForward(
@Nullable T value, @Nullable Throwable throwable, CompletableFuture<T> target) {
if (throwable != null) {
target.completeExceptionally(throwable);
} else {
target.complete(value);
}
}
public static Throwable unwrapCompletionException(Throwable t) {
if (t instanceof CompletionException) {
return unwrapCompletionException(t.getCause());
} else {
return t;
}
}
/**
* Core method to execute async-to-sync conversion with unified exception handling. This method
* contains all the common exception handling logic.
*
* @param futureSupplier supplier that provides the CompletableFuture
* @param operationName name of the operation for error messages
* @param <T> the type of the result
* @return the result directly from the CompletableFuture
* @throws Exception if any error occurs during the operation
*/
private static <T> T asyncToSyncInternal(
Supplier<CompletableFuture<T>> futureSupplier, String operationName) throws Exception {
try {
return futureSupplier.get().get();
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
throw new RuntimeException("Interrupted while waiting for " + operationName, e);
} catch (ExecutionException e) {
Throwable cause = e.getCause();
if (cause instanceof RuntimeException) {
throw (RuntimeException) cause;
} else if (cause instanceof Exception) {
throw (Exception) cause;
} else {
throw new RuntimeException("Unexpected error getting " + operationName, cause);
}
}
}
/**
* Template method to convert async CompletableFuture to synchronous Optional result.
*
* @param futureSupplier supplier that provides the CompletableFuture
* @param operationName name of the operation for error messages
* @param <T> the type of the result
* @return Optional containing the result, or empty if the result is null
* @throws Exception if any error occurs during the operation
*/
public static <T> Optional<T> asyncToSync(
Supplier<CompletableFuture<T>> futureSupplier, String operationName) throws Exception {
T result = asyncToSyncInternal(futureSupplier, operationName);
return Optional.ofNullable(result);
}
/**
* Template method to convert async CompletableFuture to synchronous Optional result with
* transformation.
*
* @param futureSupplier supplier that provides the CompletableFuture
* @param transformer function to transform the result before wrapping in Optional
* @param operationName name of the operation for error messages
* @param <T> the type of the future result
* @param <R> the type of the final result
* @return Optional containing the transformed result, or empty if the result is null
* @throws Exception if any error occurs during the operation
*/
public static <T, R> Optional<R> asyncToSync(
Supplier<CompletableFuture<T>> futureSupplier,
Function<T, R> transformer,
String operationName)
throws Exception {
T result = asyncToSyncInternal(futureSupplier, operationName);
R transformedResult = result == null ? null : transformer.apply(result);
return Optional.ofNullable(transformedResult);
}
/**
* Template method to convert async CompletableFuture to synchronous result directly (not
* wrapped in Optional).
*
* @param futureSupplier supplier that provides the CompletableFuture
* @param operationName name of the operation for error messages
* @param <T> the type of the result
* @return the result directly (can be null)
* @throws Exception if any error occurs during the operation
*/
public static <T> T asyncToSyncDirect(
Supplier<CompletableFuture<T>> futureSupplier, String operationName) throws Exception {
return asyncToSyncInternal(futureSupplier, operationName);
}
/**
* Wraps a Runnable so that throwables are caught and logged when a Runnable is run.
*
* <p>The main usecase for this method is to be used in {@link
* java.util.concurrent.ScheduledExecutorService#scheduleAtFixedRate(Runnable, long, long,
* TimeUnit)} calls to ensure that the scheduled task doesn't get cancelled as a result of an
* uncaught exception.
*
* @param runnable The runnable to wrap
* @return a wrapped Runnable
*/
public static Runnable catchingAndLoggingThrowables(Runnable runnable) {
return new CatchingAndLoggingRunnable(runnable);
}
private static final class CatchingAndLoggingRunnable implements Runnable {
private final Runnable runnable;
private CatchingAndLoggingRunnable(Runnable runnable) {
this.runnable = runnable;
}
@Override
public void run() {
try {
runnable.run();
} catch (Throwable t) {
LOG.error("Unexpected throwable caught", t);
}
}
}
}
|
apache/solr | 35,123 | solr/core/src/test/org/apache/solr/schema/CurrencyFieldTypeTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.solr.schema;
import com.carrotsearch.randomizedtesting.annotations.ParametersFactory;
import java.util.Arrays;
import java.util.Currency;
import java.util.List;
import java.util.Random;
import java.util.Set;
import org.apache.lucene.index.IndexableField;
import org.apache.solr.SolrTestCaseJ4;
import org.apache.solr.common.SolrException;
import org.apache.solr.common.params.SolrParams;
import org.apache.solr.core.SolrCore;
import org.apache.solr.util.RTimer;
import org.junit.Assume;
import org.junit.BeforeClass;
import org.junit.Ignore;
import org.junit.Test;
/** Tests CurrencyField and CurrencyFieldType. */
public class CurrencyFieldTypeTest extends SolrTestCaseJ4 {
private final String fieldName;
private final Class<? extends ExchangeRateProvider> expectedProviderClass;
public CurrencyFieldTypeTest(
String fieldName, Class<? extends ExchangeRateProvider> expectedProviderClass) {
this.fieldName = fieldName;
this.expectedProviderClass = expectedProviderClass;
}
@ParametersFactory
public static Iterable<Object[]> parameters() {
return Arrays.asList(
new Object[][] {
{"amount", FileExchangeRateProvider.class}, // CurrencyFieldType
{"mock_amount", MockExchangeRateProvider.class}, // CurrencyFieldType
{"oer_amount", OpenExchangeRatesOrgProvider.class} // CurrencyFieldType
});
}
/** "Assumes" that the specified list of currency codes are supported in this JVM */
public static void assumeCurrencySupport(String... codes) {
try {
// each JDK might have a diff list of supported currencies,
// these are the ones needed for this test to work.
for (String code : codes) {
Currency obj = Currency.getInstance(code);
assertNotNull(code, obj);
}
} catch (IllegalArgumentException e) {
Assume.assumeNoException(e);
}
}
@BeforeClass
public static void beforeClass() throws Exception {
assumeCurrencySupport("USD", "EUR", "MXN", "GBP", "JPY", "NOK");
initCore("solrconfig.xml", "schema.xml");
}
@Test
public void testCurrencySchema() {
IndexSchema schema = h.getCore().getLatestSchema();
SchemaField amount = schema.getField(fieldName);
assertNotNull(amount);
assertTrue(amount.isPolyField());
CurrencyFieldType type = (CurrencyFieldType) amount.getType();
String currencyDynamicField = "*" + type.fieldSuffixCurrency;
String amountDynamicField = "*" + type.fieldSuffixAmountRaw;
SchemaField[] dynFields = schema.getDynamicFieldPrototypes();
boolean seenCurrency = false;
boolean seenAmount = false;
for (SchemaField dynField : dynFields) {
if (dynField.getName().equals(amountDynamicField)) {
seenAmount = true;
}
if (dynField.getName().equals(currencyDynamicField)) {
seenCurrency = true;
}
}
assertTrue(
"Didn't find the expected currency code dynamic field " + currencyDynamicField,
seenCurrency);
assertTrue("Didn't find the expected value dynamic field " + amountDynamicField, seenAmount);
}
@Test
public void testCurrencyFieldType() {
assumeTrue(
"This test is only applicable to the XML file based exchange rate provider",
expectedProviderClass.equals(FileExchangeRateProvider.class));
SolrCore core = h.getCore();
IndexSchema schema = core.getLatestSchema();
SchemaField amount = schema.getField(fieldName);
assertNotNull(amount);
assertTrue(fieldName + " is not a poly field", amount.isPolyField());
FieldType tmp = amount.getType();
assertTrue(
fieldName + " is not an instance of CurrencyFieldType", tmp instanceof CurrencyFieldType);
String currencyValue = "1.50,EUR";
List<IndexableField> fields = amount.createFields(currencyValue);
assertEquals(fields.size(), 3);
// First field is currency code, second is value, third is stored.
for (int i = 0; i < 3; i++) {
boolean hasValue =
fields.get(i).readerValue() != null
|| fields.get(i).numericValue() != null
|| fields.get(i).stringValue() != null;
assertTrue("Doesn't have a value: " + fields.get(i), hasValue);
}
assertEquals(schema.getFieldTypeByName("string").toExternal(fields.get(2)), "1.50,EUR");
// A few tests on the provider directly
ExchangeRateProvider p = ((CurrencyFieldType) tmp).getProvider();
Set<String> availableCurrencies = p.listAvailableCurrencies();
assertEquals(5, availableCurrencies.size());
assertTrue(p.reload());
assertEquals(2.5, p.getExchangeRate("USD", "EUR"), 0.00000000001);
}
@Test
public void testMockExchangeRateProvider() {
assumeTrue(
"This test is only applicable to the mock exchange rate provider",
expectedProviderClass.equals(MockExchangeRateProvider.class));
SolrCore core = h.getCore();
IndexSchema schema = core.getLatestSchema();
SchemaField field = schema.getField(fieldName);
FieldType fieldType = field.getType();
ExchangeRateProvider provider = ((CurrencyFieldType) fieldType).getProvider();
// A few tests on the provider directly
assertEquals(3, provider.listAvailableCurrencies().size());
assertTrue(provider.reload());
assertEquals(0.8, provider.getExchangeRate("USD", "EUR"), 0.00000000001);
}
@Test
public void testCurrencyRangeSearch() {
assumeTrue(
"This test is only applicable to the XML file based exchange rate provider",
expectedProviderClass.equals(FileExchangeRateProvider.class));
clearIndex();
final int emptyDocs = atLeast(50); // times 2
final int negDocs = atLeast(5);
assertU(adoc("id", "0", fieldName, "0,USD")); // 0
// lots of docs w/o values
for (int i = 100; i <= 100 + emptyDocs; i++) {
assertU(adoc("id", "" + i));
}
// docs with values in ranges we'll query
for (int i = 1; i <= 10; i++) {
assertU(adoc("id", "" + i, fieldName, i + ",USD"));
}
// more docs w/o values
for (int i = 500; i <= 500 + emptyDocs; i++) {
assertU(adoc("id", "" + i));
}
// some negative values
for (int i = -100; i > -100 - negDocs; i--) {
assertU(adoc("id", "" + i, fieldName, i + ",USD"));
}
assertU(adoc("id", "40", fieldName, "0,USD")); // 0
assertU(commit());
assertQ(req("fl", "*,score", "q", fieldName + ":[2.00,USD TO 5.00,USD]"), "//*[@numFound='4']");
assertQ(req("fl", "*,score", "q", fieldName + ":[0.50,USD TO 1.00,USD]"), "//*[@numFound='1']");
assertQ(
req("fl", "*,score", "q", fieldName + ":[24.00,USD TO 25.00,USD]"), "//*[@numFound='0']");
// "GBP" currency code is 1/2 of a USD dollar, for testing.
assertQ(req("fl", "*,score", "q", fieldName + ":[0.50,GBP TO 1.00,GBP]"), "//*[@numFound='2']");
// "EUR" currency code is 2.5X of a USD dollar, for testing.
assertQ(
req("fl", "*,score", "q", fieldName + ":[24.00,EUR TO 25.00,EUR]"), "//*[@numFound='1']");
// Slight asymmetric rate should work.
assertQ(
req("fl", "*,score", "q", fieldName + ":[24.99,EUR TO 25.01,EUR]"), "//*[@numFound='1']");
// Open-ended ranges without currency
assertQ(
req("fl", "*,score", "q", fieldName + ":[* TO *]"),
"//*[@numFound='" + (2 + 10 + negDocs) + "']");
// Open-ended ranges without currency
assertQ(
req("fl", "*,score", "q", fieldName + ":*"), "//*[@numFound='" + (2 + 10 + negDocs) + "']");
// Open-ended ranges with currency
assertQ(
req("fl", "*,score", "q", fieldName + ":[*,EUR TO *,EUR]"),
"//*[@numFound='" + (2 + 10 + negDocs) + "']");
// Open-ended start range without currency
assertQ(
req("fl", "*,score", "q", fieldName + ":[* TO 5,USD]"),
"//*[@numFound='" + (2 + 5 + negDocs) + "']");
// Open-ended start range with currency (currency for the * won't matter)
assertQ(
req("fl", "*,score", "q", fieldName + ":[*,USD TO 5,USD]"),
"//*[@numFound='" + (2 + 5 + negDocs) + "']");
// Open-ended end range
assertQ(req("fl", "*,score", "q", fieldName + ":[3 TO *]"), "//*[@numFound='8']");
}
@Test
public void testBogusCurrency() {
ignoreException("HOSS");
// bogus currency
assertQEx(
"Expected exception for invalid currency",
req("fl", "*,score", "q", fieldName + ":[3,HOSS TO *]"),
400);
}
@Test
public void testCurrencyPointQuery() {
assumeTrue(
"This test is only applicable to the XML file based exchange rate provider",
expectedProviderClass.equals(FileExchangeRateProvider.class));
clearIndex();
assertU(adoc("id", "" + 1, fieldName, "10.00,USD"));
assertU(adoc("id", "" + 2, fieldName, "15.00,MXN"));
assertU(commit());
assertQ(req("fl", "*,score", "q", fieldName + ":10.00,USD"), "//str[@name='id']='1'");
assertQ(req("fl", "*,score", "q", fieldName + ":9.99,USD"), "//*[@numFound='0']");
assertQ(req("fl", "*,score", "q", fieldName + ":10.01,USD"), "//*[@numFound='0']");
assertQ(req("fl", "*,score", "q", fieldName + ":15.00,MXN"), "//str[@name='id']='2'");
assertQ(req("fl", "*,score", "q", fieldName + ":7.50,USD"), "//str[@name='id']='2'");
assertQ(req("fl", "*,score", "q", fieldName + ":7.49,USD"), "//*[@numFound='0']");
assertQ(req("fl", "*,score", "q", fieldName + ":7.51,USD"), "//*[@numFound='0']");
}
@Ignore
public void testPerformance() {
clearIndex();
Random r = random();
int initDocs = 200000;
for (int i = 1; i <= initDocs; i++) {
assertU(adoc("id", "" + i, fieldName, (r.nextInt(10) + 1.00) + ",USD"));
if (i % 1000 == 0) System.out.println(i);
}
assertU(commit());
for (int i = 0; i < 1000; i++) {
double lower = r.nextInt(10) + 1.00;
assertQ(
req(
"fl",
"*,score",
"q",
fieldName + ":[" + lower + ",USD TO " + (lower + 10.00) + ",USD]"),
"//*");
assertQ(
req(
"fl",
"*,score",
"q",
fieldName + ":[" + lower + ",EUR TO " + (lower + 10.00) + ",EUR]"),
"//*");
}
for (int j = 0; j < 3; j++) {
final RTimer timer = new RTimer();
for (int i = 0; i < 1000; i++) {
double lower = r.nextInt(10) + 1.00;
assertQ(
req(
"fl",
"*,score",
"q",
fieldName + ":[" + lower + ",USD TO " + (lower + (9.99 - (j * 0.01))) + ",USD]"),
"//*");
}
System.out.println(timer.getTime());
}
System.out.println("---");
for (int j = 0; j < 3; j++) {
final RTimer timer = new RTimer();
for (int i = 0; i < 1000; i++) {
double lower = r.nextInt(10) + 1.00;
assertQ(
req(
"fl",
"*,score",
"q",
fieldName + ":[" + lower + ",EUR TO " + (lower + (9.99 - (j * 0.01))) + ",EUR]"),
"//*");
}
System.out.println(timer.getTime());
}
}
@Test
public void testCurrencySort() {
assumeTrue(
"This test is only applicable to the XML file based exchange rate provider",
expectedProviderClass.equals(FileExchangeRateProvider.class));
clearIndex();
assertU(adoc("id", "" + 1, fieldName, "10.00,USD"));
assertU(adoc("id", "" + 2, fieldName, "15.00,EUR"));
assertU(adoc("id", "" + 3, fieldName, "7.00,EUR"));
assertU(adoc("id", "" + 4, fieldName, "6.00,GBP"));
assertU(adoc("id", "" + 5, fieldName, "2.00,GBP"));
assertU(commit());
assertQ(
req("fl", "*,score", "q", "*:*", "sort", fieldName + " desc", "limit", "1"),
"//str[@name='id']='4'");
assertQ(
req("fl", "*,score", "q", "*:*", "sort", fieldName + " asc", "limit", "1"),
"//str[@name='id']='3'");
}
public void testExpectedProvider() {
SolrCore core = h.getCore();
IndexSchema schema = core.getLatestSchema();
SchemaField field = schema.getField(fieldName);
FieldType fieldType = field.getType();
ExchangeRateProvider provider = ((CurrencyFieldType) fieldType).getProvider();
assertEquals(expectedProviderClass, provider.getClass());
}
public void testFunctionUsage() {
assumeTrue(
"This test is only applicable to the XML file based exchange rate provider",
expectedProviderClass.equals(FileExchangeRateProvider.class));
clearIndex();
for (int i = 1; i <= 8; i++) {
// "GBP" currency code is 1/2 of a USD dollar, for testing.
assertU(adoc("id", "" + i, fieldName, (((float) i) / 2) + ",GBP"));
}
for (int i = 9; i <= 11; i++) {
assertU(adoc("id", "" + i, fieldName, i + ",USD"));
}
assertU(commit());
// direct value source usage, gets "raw" form of default currency
// default==USD, so raw==pennies
assertQ(
req(
"fl", "id,func:field($f)",
"f", fieldName,
"q", "id:5"),
"//*[@numFound='1']",
"//doc/float[@name='func' and .=500]");
assertQ(
req(
"fl", "id,func:field($f)",
"f", fieldName,
"q", "id:10"),
"//*[@numFound='1']",
"//doc/float[@name='func' and .=1000]");
assertQ(
req(
"fl", "id,score," + fieldName,
"q", "{!frange u=500}" + fieldName),
"//*[@numFound='5']",
"//str[@name='id']='1'",
"//str[@name='id']='2'",
"//str[@name='id']='3'",
"//str[@name='id']='4'",
"//str[@name='id']='5'");
assertQ(
req(
"fl", "id,score," + fieldName,
"q", "{!frange l=500 u=1000}" + fieldName),
"//*[@numFound='6']",
"//str[@name='id']='5'",
"//str[@name='id']='6'",
"//str[@name='id']='7'",
"//str[@name='id']='8'",
"//str[@name='id']='9'",
"//str[@name='id']='10'");
// use the currency function to convert to default (USD)
assertQ(
req(
"fl", "id,func:currency($f)",
"f", fieldName,
"q", "id:10"),
"//*[@numFound='1']",
"//doc/float[@name='func' and .=10]");
assertQ(
req(
"fl", "id,func:currency($f)",
"f", fieldName,
"q", "id:5"),
"//*[@numFound='1']",
"//doc/float[@name='func' and .=5]");
assertQ(
req("fl", "id,score" + fieldName, "f", fieldName, "q", "{!frange u=5}currency($f)"),
"//*[@numFound='5']",
"//str[@name='id']='1'",
"//str[@name='id']='2'",
"//str[@name='id']='3'",
"//str[@name='id']='4'",
"//str[@name='id']='5'");
assertQ(
req("fl", "id,score" + fieldName, "f", fieldName, "q", "{!frange l=5 u=10}currency($f)"),
"//*[@numFound='6']",
"//str[@name='id']='5'",
"//str[@name='id']='6'",
"//str[@name='id']='7'",
"//str[@name='id']='8'",
"//str[@name='id']='9'",
"//str[@name='id']='10'");
// use the currency function to convert to MXN
assertQ(
req(
"fl", "id,func:currency($f,MXN)",
"f", fieldName,
"q", "id:5"),
"//*[@numFound='1']",
"//doc/float[@name='func' and .=10]");
assertQ(
req(
"fl", "id,func:currency($f,MXN)",
"f", fieldName,
"q", "id:10"),
"//*[@numFound='1']",
"//doc/float[@name='func' and .=20]");
assertQ(
req("fl", "*,score," + fieldName, "f", fieldName, "q", "{!frange u=10}currency($f,MXN)"),
"//*[@numFound='5']",
"//str[@name='id']='1'",
"//str[@name='id']='2'",
"//str[@name='id']='3'",
"//str[@name='id']='4'",
"//str[@name='id']='5'");
assertQ(
req(
"fl",
"*,score," + fieldName,
"f",
fieldName,
"q",
"{!frange l=10 u=20}currency($f,MXN)"),
"//*[@numFound='6']",
"//str[@name='id']='5'",
"//str[@name='id']='6'",
"//str[@name='id']='7'",
"//str[@name='id']='8'",
"//str[@name='id']='9'",
"//str[@name='id']='10'");
}
@Test
public void testStringValue() {
assertEquals("3.14,USD", new CurrencyValue(314, "USD").strValue());
assertEquals("-3.14,GBP", new CurrencyValue(-314, "GBP").strValue());
assertEquals("3.14,GBP", new CurrencyValue(314, "GBP").strValue());
CurrencyValue currencyValue = new CurrencyValue(314, "XYZ");
expectThrows(SolrException.class, currencyValue::strValue);
}
@Test
public void testRangeFacet() {
assumeTrue(
"This test is only applicable to the XML file based exchange rate provider "
+ "because it exercises the asymmetric exchange rates option it supports",
expectedProviderClass.equals(FileExchangeRateProvider.class));
clearIndex();
// NOTE: in our test conversions EUR uses an asymmetric exchange rate
// these are the equivalent values when converting to: USD EUR GBP
assertU(adoc("id", "" + 1, fieldName, "10.00,USD")); // 10.00,USD 25.00,EUR 5.00,GBP
assertU(adoc("id", "" + 2, fieldName, "15.00,EUR")); // 7.50,USD 15.00,EUR 7.50,GBP
assertU(adoc("id", "" + 3, fieldName, "6.00,GBP")); // 12.00,USD 12.00,EUR 6.00,GBP
assertU(adoc("id", "" + 4, fieldName, "7.00,EUR")); // 3.50,USD 7.00,EUR 3.50,GBP
assertU(adoc("id", "" + 5, fieldName, "2,GBP")); // 4.00,USD 4.00,EUR 2.00,GBP
assertU(commit());
for (String suffix : Arrays.asList("", ",USD")) {
assertQ(
"Ensure that we get correct facet counts back in USD (explicit or implicit default) (facet.range)",
req(
"fl",
"*,score",
"q",
"*:*",
"rows",
"0",
"facet",
"true",
"facet.range",
fieldName,
"f." + fieldName + ".facet.range.start",
"4.00" + suffix,
"f." + fieldName + ".facet.range.end",
"11.00" + suffix,
"f." + fieldName + ".facet.range.gap",
"1.00" + suffix,
"f." + fieldName + ".facet.range.other",
"all"),
"count(//lst[@name='counts']/int)=7",
"//lst[@name='counts']/int[@name='4.00,USD']='1'",
"//lst[@name='counts']/int[@name='5.00,USD']='0'",
"//lst[@name='counts']/int[@name='6.00,USD']='0'",
"//lst[@name='counts']/int[@name='7.00,USD']='1'",
"//lst[@name='counts']/int[@name='8.00,USD']='0'",
"//lst[@name='counts']/int[@name='9.00,USD']='0'",
"//lst[@name='counts']/int[@name='10.00,USD']='1'",
"//int[@name='after']='1'",
"//int[@name='before']='1'",
"//int[@name='between']='3'");
assertQ(
"Ensure that we get correct facet counts back in USD (explicit or implicit default) (json.facet)",
req(
"fl",
"*,score",
"q",
"*:*",
"rows",
"0",
"json.facet",
"{ xxx : { type:range, field:"
+ fieldName
+ ", "
+ " start:'4.00"
+ suffix
+ "', gap:'1.00"
+ suffix
+ "', end:'11.00"
+ suffix
+ "', other:all } }"),
"count(//lst[@name='xxx']/arr[@name='buckets']/lst)=7",
"//lst[@name='xxx']/arr[@name='buckets']/lst[long[@name='count'][.='1']][str[@name='val'][.='4.00,USD']]",
"//lst[@name='xxx']/arr[@name='buckets']/lst[long[@name='count'][.='0']][str[@name='val'][.='5.00,USD']]",
"//lst[@name='xxx']/arr[@name='buckets']/lst[long[@name='count'][.='0']][str[@name='val'][.='6.00,USD']]",
"//lst[@name='xxx']/arr[@name='buckets']/lst[long[@name='count'][.='1']][str[@name='val'][.='7.00,USD']]",
"//lst[@name='xxx']/arr[@name='buckets']/lst[long[@name='count'][.='0']][str[@name='val'][.='8.00,USD']]",
"//lst[@name='xxx']/arr[@name='buckets']/lst[long[@name='count'][.='0']][str[@name='val'][.='9.00,USD']]",
"//lst[@name='xxx']/arr[@name='buckets']/lst[long[@name='count'][.='1']][str[@name='val'][.='10.00,USD']]",
"//lst[@name='xxx']/lst[@name='before' ]/long[@name='count'][.='1']",
"//lst[@name='xxx']/lst[@name='after' ]/long[@name='count'][.='1']",
"//lst[@name='xxx']/lst[@name='between']/long[@name='count'][.='3']");
}
assertQ(
"Zero value as start range point + mincount (facet.range)",
req(
"fl",
"*,score",
"q",
"*:*",
"rows",
"0",
"facet",
"true",
"facet.mincount",
"1",
"facet.range",
fieldName,
"f." + fieldName + ".facet.range.start",
"0,USD",
"f." + fieldName + ".facet.range.end",
"11.00,USD",
"f." + fieldName + ".facet.range.gap",
"1.00,USD",
"f." + fieldName + ".facet.range.other",
"all"),
"count(//lst[@name='counts']/int)=4",
"//lst[@name='counts']/int[@name='3.00,USD']='1'",
"//lst[@name='counts']/int[@name='4.00,USD']='1'",
"//lst[@name='counts']/int[@name='7.00,USD']='1'",
"//lst[@name='counts']/int[@name='10.00,USD']='1'",
"//int[@name='before']='0'",
"//int[@name='after']='1'",
"//int[@name='between']='4'");
assertQ(
"Zero value as start range point + mincount (json.facet)",
req(
"fl",
"*,score",
"q",
"*:*",
"rows",
"0",
"json.facet",
"{ xxx : { type:range, mincount:1, field:"
+ fieldName
+ ", start:'0.00,USD', gap:'1.00,USD', end:'11.00,USD', other:all } }"),
"count(//lst[@name='xxx']/arr[@name='buckets']/lst)=4",
"//lst[@name='xxx']/arr[@name='buckets']/lst[long[@name='count'][.='1']][str[@name='val'][.='3.00,USD']]",
"//lst[@name='xxx']/arr[@name='buckets']/lst[long[@name='count'][.='1']][str[@name='val'][.='4.00,USD']]",
"//lst[@name='xxx']/arr[@name='buckets']/lst[long[@name='count'][.='1']][str[@name='val'][.='7.00,USD']]",
"//lst[@name='xxx']/arr[@name='buckets']/lst[long[@name='count'][.='1']][str[@name='val'][.='10.00,USD']]",
"//lst[@name='xxx']/lst[@name='before' ]/long[@name='count'][.='0']",
"//lst[@name='xxx']/lst[@name='after' ]/long[@name='count'][.='1']",
"//lst[@name='xxx']/lst[@name='between']/long[@name='count'][.='4']");
// NOTE: because of asymmetric EUR exchange rate, these buckets are diff from the similar
// looking
// USD based request above
// This request converts the values in each doc into EUR to decide what range buck it's in.
assertQ(
"Ensure that we get correct facet counts back in EUR (facet.range)",
req(
"fl",
"*,score",
"q",
"*:*",
"rows",
"0",
"facet",
"true",
"facet.range",
fieldName,
"f." + fieldName + ".facet.range.start",
"8.00,EUR",
"f." + fieldName + ".facet.range.end",
"22.00,EUR",
"f." + fieldName + ".facet.range.gap",
"2.00,EUR",
"f." + fieldName + ".facet.range.other",
"all"),
"count(//lst[@name='counts']/int)=7",
"//lst[@name='counts']/int[@name='8.00,EUR']='0'",
"//lst[@name='counts']/int[@name='10.00,EUR']='0'",
"//lst[@name='counts']/int[@name='12.00,EUR']='1'",
"//lst[@name='counts']/int[@name='14.00,EUR']='1'",
"//lst[@name='counts']/int[@name='16.00,EUR']='0'",
"//lst[@name='counts']/int[@name='18.00,EUR']='0'",
"//lst[@name='counts']/int[@name='20.00,EUR']='0'",
"//int[@name='before']='2'",
"//int[@name='after']='1'",
"//int[@name='between']='2'");
assertQ(
"Ensure that we get correct facet counts back in EUR (json.facet)",
req(
"fl",
"*,score",
"q",
"*:*",
"rows",
"0",
"json.facet",
"{ xxx : { type:range, field:"
+ fieldName
+ ", start:'8.00,EUR', gap:'2.00,EUR', end:'22.00,EUR', other:all } }"),
"count(//lst[@name='xxx']/arr[@name='buckets']/lst)=7",
"//lst[@name='xxx']/arr[@name='buckets']/lst[long[@name='count'][.='0']][str[@name='val'][.='8.00,EUR']]",
"//lst[@name='xxx']/arr[@name='buckets']/lst[long[@name='count'][.='0']][str[@name='val'][.='10.00,EUR']]",
"//lst[@name='xxx']/arr[@name='buckets']/lst[long[@name='count'][.='1']][str[@name='val'][.='12.00,EUR']]",
"//lst[@name='xxx']/arr[@name='buckets']/lst[long[@name='count'][.='1']][str[@name='val'][.='14.00,EUR']]",
"//lst[@name='xxx']/arr[@name='buckets']/lst[long[@name='count'][.='0']][str[@name='val'][.='16.00,EUR']]",
"//lst[@name='xxx']/arr[@name='buckets']/lst[long[@name='count'][.='0']][str[@name='val'][.='18.00,EUR']]",
"//lst[@name='xxx']/arr[@name='buckets']/lst[long[@name='count'][.='0']][str[@name='val'][.='20.00,EUR']]",
"//lst[@name='xxx']/lst[@name='before' ]/long[@name='count'][.='2']",
"//lst[@name='xxx']/lst[@name='after' ]/long[@name='count'][.='1']",
"//lst[@name='xxx']/lst[@name='between']/long[@name='count'][.='2']");
// GBP has a symmetric exchange rate with USD, so these counts are *similar* to the USD based
// request above... but the asymmetric EUR/USD rate means that when computing counts relative to
// GBP the EUR based docs wind up in diff buckets
assertQ(
"Ensure that we get correct facet counts back in GBP (facet.range)",
req(
"fl",
"*,score",
"q",
"*:*",
"rows",
"0",
"facet",
"true",
"facet.range",
fieldName,
"f." + fieldName + ".facet.range.start",
"2.00,GBP",
"f." + fieldName + ".facet.range.end",
"5.50,GBP",
"f." + fieldName + ".facet.range.gap",
"0.50,GBP",
"f." + fieldName + ".facet.range.other",
"all"),
"count(//lst[@name='counts']/int)=7",
"//lst[@name='counts']/int[@name='2.00,GBP']='1'",
"//lst[@name='counts']/int[@name='2.50,GBP']='0'",
"//lst[@name='counts']/int[@name='3.00,GBP']='0'",
"//lst[@name='counts']/int[@name='3.50,GBP']='1'",
"//lst[@name='counts']/int[@name='4.00,GBP']='0'",
"//lst[@name='counts']/int[@name='4.50,GBP']='0'",
"//lst[@name='counts']/int[@name='5.00,GBP']='1'",
"//int[@name='before']='0'",
"//int[@name='after']='2'",
"//int[@name='between']='3'");
assertQ(
"Ensure that we get correct facet counts back in GBP (json.facet)",
req(
"fl",
"*,score",
"q",
"*:*",
"rows",
"0",
"json.facet",
"{ xxx : { type:range, field:"
+ fieldName
+ ", start:'2.00,GBP', gap:'0.50,GBP', end:'5.50,GBP', other:all } }"),
"count(//lst[@name='xxx']/arr[@name='buckets']/lst)=7",
"//lst[@name='xxx']/arr[@name='buckets']/lst[long[@name='count'][.='1']][str[@name='val'][.='2.00,GBP']]",
"//lst[@name='xxx']/arr[@name='buckets']/lst[long[@name='count'][.='0']][str[@name='val'][.='2.50,GBP']]",
"//lst[@name='xxx']/arr[@name='buckets']/lst[long[@name='count'][.='0']][str[@name='val'][.='3.00,GBP']]",
"//lst[@name='xxx']/arr[@name='buckets']/lst[long[@name='count'][.='1']][str[@name='val'][.='3.50,GBP']]",
"//lst[@name='xxx']/arr[@name='buckets']/lst[long[@name='count'][.='0']][str[@name='val'][.='4.00,GBP']]",
"//lst[@name='xxx']/arr[@name='buckets']/lst[long[@name='count'][.='0']][str[@name='val'][.='4.50,GBP']]",
"//lst[@name='xxx']/arr[@name='buckets']/lst[long[@name='count'][.='1']][str[@name='val'][.='5.00,GBP']]",
"//lst[@name='xxx']/lst[@name='before' ]/long[@name='count'][.='0']",
"//lst[@name='xxx']/lst[@name='after' ]/long[@name='count'][.='2']",
"//lst[@name='xxx']/lst[@name='between']/long[@name='count'][.='3']");
assertQ(
"Ensure that we can set a gap in a currency other than the start and end currencies (facet.range)",
req(
"fl",
"*,score",
"q",
"*:*",
"rows",
"0",
"facet",
"true",
"facet.range",
fieldName,
"f." + fieldName + ".facet.range.start",
"4.00,USD",
"f." + fieldName + ".facet.range.end",
"11.00,USD",
"f." + fieldName + ".facet.range.gap",
"0.50,GBP",
"f." + fieldName + ".facet.range.other",
"all"),
"count(//lst[@name='counts']/int)=7",
"//lst[@name='counts']/int[@name='4.00,USD']='1'",
"//lst[@name='counts']/int[@name='5.00,USD']='0'",
"//lst[@name='counts']/int[@name='6.00,USD']='0'",
"//lst[@name='counts']/int[@name='7.00,USD']='1'",
"//lst[@name='counts']/int[@name='8.00,USD']='0'",
"//lst[@name='counts']/int[@name='9.00,USD']='0'",
"//lst[@name='counts']/int[@name='10.00,USD']='1'",
"//int[@name='before']='1'",
"//int[@name='after']='1'",
"//int[@name='between']='3'");
assertQ(
"Ensure that we can set a gap in a currency other than the start and end currencies (json.facet)",
req(
"fl",
"*,score",
"q",
"*:*",
"rows",
"0",
"json.facet",
"{ xxx : { type:range, field:"
+ fieldName
+ ", start:'4.00,USD', gap:'0.50,GBP', end:'11.00,USD', other:all } }"),
"count(//lst[@name='xxx']/arr[@name='buckets']/lst)=7",
"//lst[@name='xxx']/arr[@name='buckets']/lst[long[@name='count'][.='1']][str[@name='val'][.='4.00,USD']]",
"//lst[@name='xxx']/arr[@name='buckets']/lst[long[@name='count'][.='0']][str[@name='val'][.='5.00,USD']]",
"//lst[@name='xxx']/arr[@name='buckets']/lst[long[@name='count'][.='0']][str[@name='val'][.='6.00,USD']]",
"//lst[@name='xxx']/arr[@name='buckets']/lst[long[@name='count'][.='1']][str[@name='val'][.='7.00,USD']]",
"//lst[@name='xxx']/arr[@name='buckets']/lst[long[@name='count'][.='0']][str[@name='val'][.='8.00,USD']]",
"//lst[@name='xxx']/arr[@name='buckets']/lst[long[@name='count'][.='0']][str[@name='val'][.='9.00,USD']]",
"//lst[@name='xxx']/arr[@name='buckets']/lst[long[@name='count'][.='1']][str[@name='val'][.='10.00,USD']]",
"//lst[@name='xxx']/lst[@name='before' ]/long[@name='count'][.='1']",
"//lst[@name='xxx']/lst[@name='after' ]/long[@name='count'][.='1']",
"//lst[@name='xxx']/lst[@name='between']/long[@name='count'][.='3']");
for (SolrParams facet :
Arrays.asList(
params(
"facet",
"true",
"facet.range",
fieldName,
"f." + fieldName + ".facet.range.start",
"4.00,USD",
"f." + fieldName + ".facet.range.end",
"11.00,EUR",
"f." + fieldName + ".facet.range.gap",
"1.00,USD",
"f." + fieldName + ".facet.range.other",
"all"),
params(
"json.facet",
"{ xxx : { type:range, field:"
+ fieldName
+ ", start:'4.00,USD', "
+ " gap:'1.00,USD', end:'11.00,EUR', other:all } }"))) {
assertQEx(
"Ensure that we throw an error if we try to use different start and end currencies",
"Cannot compare CurrencyValues when their currencies are not equal",
req(facet, "q", "*:*"),
SolrException.ErrorCode.BAD_REQUEST);
}
}
@Test
public void testMockFieldType() {
assumeTrue(
"This test is only applicable to the mock exchange rate provider",
expectedProviderClass.equals(MockExchangeRateProvider.class));
clearIndex();
assertU(adoc("id", "1", fieldName, "1.00,USD"));
assertU(adoc("id", "2", fieldName, "1.00,EUR"));
assertU(adoc("id", "3", fieldName, "1.00,NOK"));
assertU(commit());
assertQ(
req("fl", "*,score", "q", fieldName + ":5.0,NOK"),
"//*[@numFound='1']",
"//str[@name='id']='1'");
assertQ(
req("fl", "*,score", "q", fieldName + ":1.2,USD"),
"//*[@numFound='1']",
"//str[@name='id']='2'");
assertQ(
req("fl", "*,score", "q", fieldName + ":0.2,USD"),
"//*[@numFound='1']",
"//str[@name='id']='3'");
assertQ(req("fl", "*,score", "q", fieldName + ":99,USD"), "//*[@numFound='0']");
}
@Test
public void testAsymmetricPointQuery() {
assumeTrue(
"This test is only applicable to the XML file based exchange rate provider",
expectedProviderClass.equals(FileExchangeRateProvider.class));
clearIndex();
assertU(adoc("id", "" + 1, fieldName, "10.00,USD"));
assertU(adoc("id", "" + 2, fieldName, "15.00,EUR"));
assertU(commit());
assertQ(req("fl", "*,score", "q", fieldName + ":15.00,EUR"), "//str[@name='id']='2'");
assertQ(req("fl", "*,score", "q", fieldName + ":7.50,USD"), "//str[@name='id']='2'");
assertQ(req("fl", "*,score", "q", fieldName + ":7.49,USD"), "//*[@numFound='0']");
assertQ(req("fl", "*,score", "q", fieldName + ":7.51,USD"), "//*[@numFound='0']");
}
}
|
googleapis/google-cloud-java | 36,198 | java-apigee-registry/proto-google-cloud-apigee-registry-v1/src/main/java/com/google/cloud/apigeeregistry/v1/CreateInstanceRequest.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/apigeeregistry/v1/provisioning_service.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.apigeeregistry.v1;
/**
*
*
* <pre>
* Request message for CreateInstance.
* </pre>
*
* Protobuf type {@code google.cloud.apigeeregistry.v1.CreateInstanceRequest}
*/
public final class CreateInstanceRequest extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.apigeeregistry.v1.CreateInstanceRequest)
CreateInstanceRequestOrBuilder {
private static final long serialVersionUID = 0L;
// Use CreateInstanceRequest.newBuilder() to construct.
private CreateInstanceRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private CreateInstanceRequest() {
parent_ = "";
instanceId_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new CreateInstanceRequest();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.apigeeregistry.v1.ProvisioningServiceProto
.internal_static_google_cloud_apigeeregistry_v1_CreateInstanceRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.apigeeregistry.v1.ProvisioningServiceProto
.internal_static_google_cloud_apigeeregistry_v1_CreateInstanceRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.apigeeregistry.v1.CreateInstanceRequest.class,
com.google.cloud.apigeeregistry.v1.CreateInstanceRequest.Builder.class);
}
private int bitField0_;
public static final int PARENT_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private volatile java.lang.Object parent_ = "";
/**
*
*
* <pre>
* Required. Parent resource of the Instance, of the form: `projects/*/locations/*`
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The parent.
*/
@java.lang.Override
public java.lang.String getParent() {
java.lang.Object ref = parent_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
parent_ = s;
return s;
}
}
/**
*
*
* <pre>
* Required. Parent resource of the Instance, of the form: `projects/*/locations/*`
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for parent.
*/
@java.lang.Override
public com.google.protobuf.ByteString getParentBytes() {
java.lang.Object ref = parent_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
parent_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int INSTANCE_ID_FIELD_NUMBER = 2;
@SuppressWarnings("serial")
private volatile java.lang.Object instanceId_ = "";
/**
*
*
* <pre>
* Required. Identifier to assign to the Instance. Must be unique within scope of the
* parent resource.
* </pre>
*
* <code>string instance_id = 2 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The instanceId.
*/
@java.lang.Override
public java.lang.String getInstanceId() {
java.lang.Object ref = instanceId_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
instanceId_ = s;
return s;
}
}
/**
*
*
* <pre>
* Required. Identifier to assign to the Instance. Must be unique within scope of the
* parent resource.
* </pre>
*
* <code>string instance_id = 2 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The bytes for instanceId.
*/
@java.lang.Override
public com.google.protobuf.ByteString getInstanceIdBytes() {
java.lang.Object ref = instanceId_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
instanceId_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int INSTANCE_FIELD_NUMBER = 3;
private com.google.cloud.apigeeregistry.v1.Instance instance_;
/**
*
*
* <pre>
* Required. The Instance.
* </pre>
*
* <code>
* .google.cloud.apigeeregistry.v1.Instance instance = 3 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the instance field is set.
*/
@java.lang.Override
public boolean hasInstance() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* Required. The Instance.
* </pre>
*
* <code>
* .google.cloud.apigeeregistry.v1.Instance instance = 3 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The instance.
*/
@java.lang.Override
public com.google.cloud.apigeeregistry.v1.Instance getInstance() {
return instance_ == null
? com.google.cloud.apigeeregistry.v1.Instance.getDefaultInstance()
: instance_;
}
/**
*
*
* <pre>
* Required. The Instance.
* </pre>
*
* <code>
* .google.cloud.apigeeregistry.v1.Instance instance = 3 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
@java.lang.Override
public com.google.cloud.apigeeregistry.v1.InstanceOrBuilder getInstanceOrBuilder() {
return instance_ == null
? com.google.cloud.apigeeregistry.v1.Instance.getDefaultInstance()
: instance_;
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 1, parent_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(instanceId_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 2, instanceId_);
}
if (((bitField0_ & 0x00000001) != 0)) {
output.writeMessage(3, getInstance());
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, parent_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(instanceId_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, instanceId_);
}
if (((bitField0_ & 0x00000001) != 0)) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(3, getInstance());
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.apigeeregistry.v1.CreateInstanceRequest)) {
return super.equals(obj);
}
com.google.cloud.apigeeregistry.v1.CreateInstanceRequest other =
(com.google.cloud.apigeeregistry.v1.CreateInstanceRequest) obj;
if (!getParent().equals(other.getParent())) return false;
if (!getInstanceId().equals(other.getInstanceId())) return false;
if (hasInstance() != other.hasInstance()) return false;
if (hasInstance()) {
if (!getInstance().equals(other.getInstance())) return false;
}
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + PARENT_FIELD_NUMBER;
hash = (53 * hash) + getParent().hashCode();
hash = (37 * hash) + INSTANCE_ID_FIELD_NUMBER;
hash = (53 * hash) + getInstanceId().hashCode();
if (hasInstance()) {
hash = (37 * hash) + INSTANCE_FIELD_NUMBER;
hash = (53 * hash) + getInstance().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.apigeeregistry.v1.CreateInstanceRequest parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.apigeeregistry.v1.CreateInstanceRequest parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.apigeeregistry.v1.CreateInstanceRequest parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.apigeeregistry.v1.CreateInstanceRequest parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.apigeeregistry.v1.CreateInstanceRequest parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.apigeeregistry.v1.CreateInstanceRequest parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.apigeeregistry.v1.CreateInstanceRequest parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.apigeeregistry.v1.CreateInstanceRequest parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.apigeeregistry.v1.CreateInstanceRequest parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.apigeeregistry.v1.CreateInstanceRequest parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.apigeeregistry.v1.CreateInstanceRequest parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.apigeeregistry.v1.CreateInstanceRequest parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.apigeeregistry.v1.CreateInstanceRequest prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Request message for CreateInstance.
* </pre>
*
* Protobuf type {@code google.cloud.apigeeregistry.v1.CreateInstanceRequest}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.apigeeregistry.v1.CreateInstanceRequest)
com.google.cloud.apigeeregistry.v1.CreateInstanceRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.apigeeregistry.v1.ProvisioningServiceProto
.internal_static_google_cloud_apigeeregistry_v1_CreateInstanceRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.apigeeregistry.v1.ProvisioningServiceProto
.internal_static_google_cloud_apigeeregistry_v1_CreateInstanceRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.apigeeregistry.v1.CreateInstanceRequest.class,
com.google.cloud.apigeeregistry.v1.CreateInstanceRequest.Builder.class);
}
// Construct using com.google.cloud.apigeeregistry.v1.CreateInstanceRequest.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {
getInstanceFieldBuilder();
}
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
parent_ = "";
instanceId_ = "";
instance_ = null;
if (instanceBuilder_ != null) {
instanceBuilder_.dispose();
instanceBuilder_ = null;
}
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.apigeeregistry.v1.ProvisioningServiceProto
.internal_static_google_cloud_apigeeregistry_v1_CreateInstanceRequest_descriptor;
}
@java.lang.Override
public com.google.cloud.apigeeregistry.v1.CreateInstanceRequest getDefaultInstanceForType() {
return com.google.cloud.apigeeregistry.v1.CreateInstanceRequest.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.apigeeregistry.v1.CreateInstanceRequest build() {
com.google.cloud.apigeeregistry.v1.CreateInstanceRequest result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.apigeeregistry.v1.CreateInstanceRequest buildPartial() {
com.google.cloud.apigeeregistry.v1.CreateInstanceRequest result =
new com.google.cloud.apigeeregistry.v1.CreateInstanceRequest(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(com.google.cloud.apigeeregistry.v1.CreateInstanceRequest result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.parent_ = parent_;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.instanceId_ = instanceId_;
}
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000004) != 0)) {
result.instance_ = instanceBuilder_ == null ? instance_ : instanceBuilder_.build();
to_bitField0_ |= 0x00000001;
}
result.bitField0_ |= to_bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.apigeeregistry.v1.CreateInstanceRequest) {
return mergeFrom((com.google.cloud.apigeeregistry.v1.CreateInstanceRequest) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.apigeeregistry.v1.CreateInstanceRequest other) {
if (other == com.google.cloud.apigeeregistry.v1.CreateInstanceRequest.getDefaultInstance())
return this;
if (!other.getParent().isEmpty()) {
parent_ = other.parent_;
bitField0_ |= 0x00000001;
onChanged();
}
if (!other.getInstanceId().isEmpty()) {
instanceId_ = other.instanceId_;
bitField0_ |= 0x00000002;
onChanged();
}
if (other.hasInstance()) {
mergeInstance(other.getInstance());
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
parent_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000001;
break;
} // case 10
case 18:
{
instanceId_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000002;
break;
} // case 18
case 26:
{
input.readMessage(getInstanceFieldBuilder().getBuilder(), extensionRegistry);
bitField0_ |= 0x00000004;
break;
} // case 26
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.lang.Object parent_ = "";
/**
*
*
* <pre>
* Required. Parent resource of the Instance, of the form: `projects/*/locations/*`
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The parent.
*/
public java.lang.String getParent() {
java.lang.Object ref = parent_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
parent_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Required. Parent resource of the Instance, of the form: `projects/*/locations/*`
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for parent.
*/
public com.google.protobuf.ByteString getParentBytes() {
java.lang.Object ref = parent_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
parent_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Required. Parent resource of the Instance, of the form: `projects/*/locations/*`
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The parent to set.
* @return This builder for chaining.
*/
public Builder setParent(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
parent_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. Parent resource of the Instance, of the form: `projects/*/locations/*`
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return This builder for chaining.
*/
public Builder clearParent() {
parent_ = getDefaultInstance().getParent();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. Parent resource of the Instance, of the form: `projects/*/locations/*`
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The bytes for parent to set.
* @return This builder for chaining.
*/
public Builder setParentBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
parent_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
private java.lang.Object instanceId_ = "";
/**
*
*
* <pre>
* Required. Identifier to assign to the Instance. Must be unique within scope of the
* parent resource.
* </pre>
*
* <code>string instance_id = 2 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The instanceId.
*/
public java.lang.String getInstanceId() {
java.lang.Object ref = instanceId_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
instanceId_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Required. Identifier to assign to the Instance. Must be unique within scope of the
* parent resource.
* </pre>
*
* <code>string instance_id = 2 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The bytes for instanceId.
*/
public com.google.protobuf.ByteString getInstanceIdBytes() {
java.lang.Object ref = instanceId_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
instanceId_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Required. Identifier to assign to the Instance. Must be unique within scope of the
* parent resource.
* </pre>
*
* <code>string instance_id = 2 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @param value The instanceId to set.
* @return This builder for chaining.
*/
public Builder setInstanceId(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
instanceId_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. Identifier to assign to the Instance. Must be unique within scope of the
* parent resource.
* </pre>
*
* <code>string instance_id = 2 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return This builder for chaining.
*/
public Builder clearInstanceId() {
instanceId_ = getDefaultInstance().getInstanceId();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. Identifier to assign to the Instance. Must be unique within scope of the
* parent resource.
* </pre>
*
* <code>string instance_id = 2 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @param value The bytes for instanceId to set.
* @return This builder for chaining.
*/
public Builder setInstanceIdBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
instanceId_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
private com.google.cloud.apigeeregistry.v1.Instance instance_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.apigeeregistry.v1.Instance,
com.google.cloud.apigeeregistry.v1.Instance.Builder,
com.google.cloud.apigeeregistry.v1.InstanceOrBuilder>
instanceBuilder_;
/**
*
*
* <pre>
* Required. The Instance.
* </pre>
*
* <code>
* .google.cloud.apigeeregistry.v1.Instance instance = 3 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the instance field is set.
*/
public boolean hasInstance() {
return ((bitField0_ & 0x00000004) != 0);
}
/**
*
*
* <pre>
* Required. The Instance.
* </pre>
*
* <code>
* .google.cloud.apigeeregistry.v1.Instance instance = 3 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The instance.
*/
public com.google.cloud.apigeeregistry.v1.Instance getInstance() {
if (instanceBuilder_ == null) {
return instance_ == null
? com.google.cloud.apigeeregistry.v1.Instance.getDefaultInstance()
: instance_;
} else {
return instanceBuilder_.getMessage();
}
}
/**
*
*
* <pre>
* Required. The Instance.
* </pre>
*
* <code>
* .google.cloud.apigeeregistry.v1.Instance instance = 3 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setInstance(com.google.cloud.apigeeregistry.v1.Instance value) {
if (instanceBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
instance_ = value;
} else {
instanceBuilder_.setMessage(value);
}
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The Instance.
* </pre>
*
* <code>
* .google.cloud.apigeeregistry.v1.Instance instance = 3 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setInstance(
com.google.cloud.apigeeregistry.v1.Instance.Builder builderForValue) {
if (instanceBuilder_ == null) {
instance_ = builderForValue.build();
} else {
instanceBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The Instance.
* </pre>
*
* <code>
* .google.cloud.apigeeregistry.v1.Instance instance = 3 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder mergeInstance(com.google.cloud.apigeeregistry.v1.Instance value) {
if (instanceBuilder_ == null) {
if (((bitField0_ & 0x00000004) != 0)
&& instance_ != null
&& instance_ != com.google.cloud.apigeeregistry.v1.Instance.getDefaultInstance()) {
getInstanceBuilder().mergeFrom(value);
} else {
instance_ = value;
}
} else {
instanceBuilder_.mergeFrom(value);
}
if (instance_ != null) {
bitField0_ |= 0x00000004;
onChanged();
}
return this;
}
/**
*
*
* <pre>
* Required. The Instance.
* </pre>
*
* <code>
* .google.cloud.apigeeregistry.v1.Instance instance = 3 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder clearInstance() {
bitField0_ = (bitField0_ & ~0x00000004);
instance_ = null;
if (instanceBuilder_ != null) {
instanceBuilder_.dispose();
instanceBuilder_ = null;
}
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The Instance.
* </pre>
*
* <code>
* .google.cloud.apigeeregistry.v1.Instance instance = 3 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.cloud.apigeeregistry.v1.Instance.Builder getInstanceBuilder() {
bitField0_ |= 0x00000004;
onChanged();
return getInstanceFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* Required. The Instance.
* </pre>
*
* <code>
* .google.cloud.apigeeregistry.v1.Instance instance = 3 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.cloud.apigeeregistry.v1.InstanceOrBuilder getInstanceOrBuilder() {
if (instanceBuilder_ != null) {
return instanceBuilder_.getMessageOrBuilder();
} else {
return instance_ == null
? com.google.cloud.apigeeregistry.v1.Instance.getDefaultInstance()
: instance_;
}
}
/**
*
*
* <pre>
* Required. The Instance.
* </pre>
*
* <code>
* .google.cloud.apigeeregistry.v1.Instance instance = 3 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.apigeeregistry.v1.Instance,
com.google.cloud.apigeeregistry.v1.Instance.Builder,
com.google.cloud.apigeeregistry.v1.InstanceOrBuilder>
getInstanceFieldBuilder() {
if (instanceBuilder_ == null) {
instanceBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.apigeeregistry.v1.Instance,
com.google.cloud.apigeeregistry.v1.Instance.Builder,
com.google.cloud.apigeeregistry.v1.InstanceOrBuilder>(
getInstance(), getParentForChildren(), isClean());
instance_ = null;
}
return instanceBuilder_;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.apigeeregistry.v1.CreateInstanceRequest)
}
// @@protoc_insertion_point(class_scope:google.cloud.apigeeregistry.v1.CreateInstanceRequest)
private static final com.google.cloud.apigeeregistry.v1.CreateInstanceRequest DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.apigeeregistry.v1.CreateInstanceRequest();
}
public static com.google.cloud.apigeeregistry.v1.CreateInstanceRequest getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<CreateInstanceRequest> PARSER =
new com.google.protobuf.AbstractParser<CreateInstanceRequest>() {
@java.lang.Override
public CreateInstanceRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<CreateInstanceRequest> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<CreateInstanceRequest> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.apigeeregistry.v1.CreateInstanceRequest getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleapis/google-cloud-java | 36,494 | java-compute/google-cloud-compute/src/main/java/com/google/cloud/compute/v1/stub/InterconnectAttachmentGroupsStubSettings.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.cloud.compute.v1.stub;
import static com.google.cloud.compute.v1.InterconnectAttachmentGroupsClient.ListPagedResponse;
import com.google.api.core.ApiFunction;
import com.google.api.core.ApiFuture;
import com.google.api.core.ObsoleteApi;
import com.google.api.gax.core.GaxProperties;
import com.google.api.gax.core.GoogleCredentialsProvider;
import com.google.api.gax.core.InstantiatingExecutorProvider;
import com.google.api.gax.httpjson.GaxHttpJsonProperties;
import com.google.api.gax.httpjson.HttpJsonTransportChannel;
import com.google.api.gax.httpjson.InstantiatingHttpJsonChannelProvider;
import com.google.api.gax.httpjson.ProtoOperationTransformers;
import com.google.api.gax.longrunning.OperationSnapshot;
import com.google.api.gax.longrunning.OperationTimedPollAlgorithm;
import com.google.api.gax.retrying.RetrySettings;
import com.google.api.gax.rpc.ApiCallContext;
import com.google.api.gax.rpc.ApiClientHeaderProvider;
import com.google.api.gax.rpc.ClientContext;
import com.google.api.gax.rpc.OperationCallSettings;
import com.google.api.gax.rpc.PageContext;
import com.google.api.gax.rpc.PagedCallSettings;
import com.google.api.gax.rpc.PagedListDescriptor;
import com.google.api.gax.rpc.PagedListResponseFactory;
import com.google.api.gax.rpc.StatusCode;
import com.google.api.gax.rpc.StubSettings;
import com.google.api.gax.rpc.TransportChannelProvider;
import com.google.api.gax.rpc.UnaryCallSettings;
import com.google.api.gax.rpc.UnaryCallable;
import com.google.cloud.compute.v1.DeleteInterconnectAttachmentGroupRequest;
import com.google.cloud.compute.v1.GetIamPolicyInterconnectAttachmentGroupRequest;
import com.google.cloud.compute.v1.GetInterconnectAttachmentGroupRequest;
import com.google.cloud.compute.v1.GetOperationalStatusInterconnectAttachmentGroupRequest;
import com.google.cloud.compute.v1.InsertInterconnectAttachmentGroupRequest;
import com.google.cloud.compute.v1.InterconnectAttachmentGroup;
import com.google.cloud.compute.v1.InterconnectAttachmentGroupsGetOperationalStatusResponse;
import com.google.cloud.compute.v1.InterconnectAttachmentGroupsListResponse;
import com.google.cloud.compute.v1.ListInterconnectAttachmentGroupsRequest;
import com.google.cloud.compute.v1.Operation;
import com.google.cloud.compute.v1.PatchInterconnectAttachmentGroupRequest;
import com.google.cloud.compute.v1.Policy;
import com.google.cloud.compute.v1.SetIamPolicyInterconnectAttachmentGroupRequest;
import com.google.cloud.compute.v1.TestIamPermissionsInterconnectAttachmentGroupRequest;
import com.google.cloud.compute.v1.TestPermissionsResponse;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.Lists;
import java.io.IOException;
import java.time.Duration;
import java.util.List;
import javax.annotation.Generated;
// AUTO-GENERATED DOCUMENTATION AND CLASS.
/**
* Settings class to configure an instance of {@link InterconnectAttachmentGroupsStub}.
*
* <p>The default instance has everything set to sensible defaults:
*
* <ul>
* <li>The default service address (compute.googleapis.com) and default port (443) are used.
* <li>Credentials are acquired automatically through Application Default Credentials.
* <li>Retries are configured for idempotent methods but not for non-idempotent methods.
* </ul>
*
* <p>The builder of this class is recursive, so contained classes are themselves builders. When
* build() is called, the tree of builders is called to create the complete settings object.
*
* <p>For example, to set the
* [RetrySettings](https://cloud.google.com/java/docs/reference/gax/latest/com.google.api.gax.retrying.RetrySettings)
* of get:
*
* <pre>{@code
* // This snippet has been automatically generated and should be regarded as a code template only.
* // It will require modifications to work:
* // - It may require correct/in-range values for request initialization.
* // - It may require specifying regional endpoints when creating the service client as shown in
* // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library
* InterconnectAttachmentGroupsStubSettings.Builder interconnectAttachmentGroupsSettingsBuilder =
* InterconnectAttachmentGroupsStubSettings.newBuilder();
* interconnectAttachmentGroupsSettingsBuilder
* .getSettings()
* .setRetrySettings(
* interconnectAttachmentGroupsSettingsBuilder
* .getSettings()
* .getRetrySettings()
* .toBuilder()
* .setInitialRetryDelayDuration(Duration.ofSeconds(1))
* .setInitialRpcTimeoutDuration(Duration.ofSeconds(5))
* .setMaxAttempts(5)
* .setMaxRetryDelayDuration(Duration.ofSeconds(30))
* .setMaxRpcTimeoutDuration(Duration.ofSeconds(60))
* .setRetryDelayMultiplier(1.3)
* .setRpcTimeoutMultiplier(1.5)
* .setTotalTimeoutDuration(Duration.ofSeconds(300))
* .build());
* InterconnectAttachmentGroupsStubSettings interconnectAttachmentGroupsSettings =
* interconnectAttachmentGroupsSettingsBuilder.build();
* }</pre>
*
* Please refer to the [Client Side Retry
* Guide](https://github.com/googleapis/google-cloud-java/blob/main/docs/client_retries.md) for
* additional support in setting retries.
*
* <p>To configure the RetrySettings of a Long Running Operation method, create an
* OperationTimedPollAlgorithm object and update the RPC's polling algorithm. For example, to
* configure the RetrySettings for delete:
*
* <pre>{@code
* // This snippet has been automatically generated and should be regarded as a code template only.
* // It will require modifications to work:
* // - It may require correct/in-range values for request initialization.
* // - It may require specifying regional endpoints when creating the service client as shown in
* // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library
* InterconnectAttachmentGroupsStubSettings.Builder interconnectAttachmentGroupsSettingsBuilder =
* InterconnectAttachmentGroupsStubSettings.newBuilder();
* TimedRetryAlgorithm timedRetryAlgorithm =
* OperationalTimedPollAlgorithm.create(
* RetrySettings.newBuilder()
* .setInitialRetryDelayDuration(Duration.ofMillis(500))
* .setRetryDelayMultiplier(1.5)
* .setMaxRetryDelayDuration(Duration.ofMillis(5000))
* .setTotalTimeoutDuration(Duration.ofHours(24))
* .build());
* interconnectAttachmentGroupsSettingsBuilder
* .createClusterOperationSettings()
* .setPollingAlgorithm(timedRetryAlgorithm)
* .build();
* }</pre>
*/
@Generated("by gapic-generator-java")
public class InterconnectAttachmentGroupsStubSettings
extends StubSettings<InterconnectAttachmentGroupsStubSettings> {
/** The default scopes of the service. */
private static final ImmutableList<String> DEFAULT_SERVICE_SCOPES =
ImmutableList.<String>builder()
.add("https://www.googleapis.com/auth/compute")
.add("https://www.googleapis.com/auth/cloud-platform")
.build();
private final UnaryCallSettings<DeleteInterconnectAttachmentGroupRequest, Operation>
deleteSettings;
private final OperationCallSettings<
DeleteInterconnectAttachmentGroupRequest, Operation, Operation>
deleteOperationSettings;
private final UnaryCallSettings<
GetInterconnectAttachmentGroupRequest, InterconnectAttachmentGroup>
getSettings;
private final UnaryCallSettings<GetIamPolicyInterconnectAttachmentGroupRequest, Policy>
getIamPolicySettings;
private final UnaryCallSettings<
GetOperationalStatusInterconnectAttachmentGroupRequest,
InterconnectAttachmentGroupsGetOperationalStatusResponse>
getOperationalStatusSettings;
private final UnaryCallSettings<InsertInterconnectAttachmentGroupRequest, Operation>
insertSettings;
private final OperationCallSettings<
InsertInterconnectAttachmentGroupRequest, Operation, Operation>
insertOperationSettings;
private final PagedCallSettings<
ListInterconnectAttachmentGroupsRequest,
InterconnectAttachmentGroupsListResponse,
ListPagedResponse>
listSettings;
private final UnaryCallSettings<PatchInterconnectAttachmentGroupRequest, Operation> patchSettings;
private final OperationCallSettings<PatchInterconnectAttachmentGroupRequest, Operation, Operation>
patchOperationSettings;
private final UnaryCallSettings<SetIamPolicyInterconnectAttachmentGroupRequest, Policy>
setIamPolicySettings;
private final UnaryCallSettings<
TestIamPermissionsInterconnectAttachmentGroupRequest, TestPermissionsResponse>
testIamPermissionsSettings;
private static final PagedListDescriptor<
ListInterconnectAttachmentGroupsRequest,
InterconnectAttachmentGroupsListResponse,
InterconnectAttachmentGroup>
LIST_PAGE_STR_DESC =
new PagedListDescriptor<
ListInterconnectAttachmentGroupsRequest,
InterconnectAttachmentGroupsListResponse,
InterconnectAttachmentGroup>() {
@Override
public String emptyToken() {
return "";
}
@Override
public ListInterconnectAttachmentGroupsRequest injectToken(
ListInterconnectAttachmentGroupsRequest payload, String token) {
return ListInterconnectAttachmentGroupsRequest.newBuilder(payload)
.setPageToken(token)
.build();
}
@Override
public ListInterconnectAttachmentGroupsRequest injectPageSize(
ListInterconnectAttachmentGroupsRequest payload, int pageSize) {
return ListInterconnectAttachmentGroupsRequest.newBuilder(payload)
.setMaxResults(pageSize)
.build();
}
@Override
public Integer extractPageSize(ListInterconnectAttachmentGroupsRequest payload) {
return payload.getMaxResults();
}
@Override
public String extractNextToken(InterconnectAttachmentGroupsListResponse payload) {
return payload.getNextPageToken();
}
@Override
public Iterable<InterconnectAttachmentGroup> extractResources(
InterconnectAttachmentGroupsListResponse payload) {
return payload.getItemsList();
}
};
private static final PagedListResponseFactory<
ListInterconnectAttachmentGroupsRequest,
InterconnectAttachmentGroupsListResponse,
ListPagedResponse>
LIST_PAGE_STR_FACT =
new PagedListResponseFactory<
ListInterconnectAttachmentGroupsRequest,
InterconnectAttachmentGroupsListResponse,
ListPagedResponse>() {
@Override
public ApiFuture<ListPagedResponse> getFuturePagedResponse(
UnaryCallable<
ListInterconnectAttachmentGroupsRequest,
InterconnectAttachmentGroupsListResponse>
callable,
ListInterconnectAttachmentGroupsRequest request,
ApiCallContext context,
ApiFuture<InterconnectAttachmentGroupsListResponse> futureResponse) {
PageContext<
ListInterconnectAttachmentGroupsRequest,
InterconnectAttachmentGroupsListResponse,
InterconnectAttachmentGroup>
pageContext = PageContext.create(callable, LIST_PAGE_STR_DESC, request, context);
return ListPagedResponse.createAsync(pageContext, futureResponse);
}
};
/** Returns the object with the settings used for calls to delete. */
public UnaryCallSettings<DeleteInterconnectAttachmentGroupRequest, Operation> deleteSettings() {
return deleteSettings;
}
/** Returns the object with the settings used for calls to delete. */
public OperationCallSettings<DeleteInterconnectAttachmentGroupRequest, Operation, Operation>
deleteOperationSettings() {
return deleteOperationSettings;
}
/** Returns the object with the settings used for calls to get. */
public UnaryCallSettings<GetInterconnectAttachmentGroupRequest, InterconnectAttachmentGroup>
getSettings() {
return getSettings;
}
/** Returns the object with the settings used for calls to getIamPolicy. */
public UnaryCallSettings<GetIamPolicyInterconnectAttachmentGroupRequest, Policy>
getIamPolicySettings() {
return getIamPolicySettings;
}
/** Returns the object with the settings used for calls to getOperationalStatus. */
public UnaryCallSettings<
GetOperationalStatusInterconnectAttachmentGroupRequest,
InterconnectAttachmentGroupsGetOperationalStatusResponse>
getOperationalStatusSettings() {
return getOperationalStatusSettings;
}
/** Returns the object with the settings used for calls to insert. */
public UnaryCallSettings<InsertInterconnectAttachmentGroupRequest, Operation> insertSettings() {
return insertSettings;
}
/** Returns the object with the settings used for calls to insert. */
public OperationCallSettings<InsertInterconnectAttachmentGroupRequest, Operation, Operation>
insertOperationSettings() {
return insertOperationSettings;
}
/** Returns the object with the settings used for calls to list. */
public PagedCallSettings<
ListInterconnectAttachmentGroupsRequest,
InterconnectAttachmentGroupsListResponse,
ListPagedResponse>
listSettings() {
return listSettings;
}
/** Returns the object with the settings used for calls to patch. */
public UnaryCallSettings<PatchInterconnectAttachmentGroupRequest, Operation> patchSettings() {
return patchSettings;
}
/** Returns the object with the settings used for calls to patch. */
public OperationCallSettings<PatchInterconnectAttachmentGroupRequest, Operation, Operation>
patchOperationSettings() {
return patchOperationSettings;
}
/** Returns the object with the settings used for calls to setIamPolicy. */
public UnaryCallSettings<SetIamPolicyInterconnectAttachmentGroupRequest, Policy>
setIamPolicySettings() {
return setIamPolicySettings;
}
/** Returns the object with the settings used for calls to testIamPermissions. */
public UnaryCallSettings<
TestIamPermissionsInterconnectAttachmentGroupRequest, TestPermissionsResponse>
testIamPermissionsSettings() {
return testIamPermissionsSettings;
}
public InterconnectAttachmentGroupsStub createStub() throws IOException {
if (getTransportChannelProvider()
.getTransportName()
.equals(HttpJsonTransportChannel.getHttpJsonTransportName())) {
return HttpJsonInterconnectAttachmentGroupsStub.create(this);
}
throw new UnsupportedOperationException(
String.format(
"Transport not supported: %s", getTransportChannelProvider().getTransportName()));
}
/** Returns the default service name. */
@Override
public String getServiceName() {
return "compute";
}
/** Returns a builder for the default ExecutorProvider for this service. */
public static InstantiatingExecutorProvider.Builder defaultExecutorProviderBuilder() {
return InstantiatingExecutorProvider.newBuilder();
}
/** Returns the default service endpoint. */
@ObsoleteApi("Use getEndpoint() instead")
public static String getDefaultEndpoint() {
return "compute.googleapis.com:443";
}
/** Returns the default mTLS service endpoint. */
public static String getDefaultMtlsEndpoint() {
return "compute.mtls.googleapis.com:443";
}
/** Returns the default service scopes. */
public static List<String> getDefaultServiceScopes() {
return DEFAULT_SERVICE_SCOPES;
}
/** Returns a builder for the default credentials for this service. */
public static GoogleCredentialsProvider.Builder defaultCredentialsProviderBuilder() {
return GoogleCredentialsProvider.newBuilder()
.setScopesToApply(DEFAULT_SERVICE_SCOPES)
.setUseJwtAccessWithScope(true);
}
/** Returns a builder for the default ChannelProvider for this service. */
public static InstantiatingHttpJsonChannelProvider.Builder
defaultHttpJsonTransportProviderBuilder() {
return InstantiatingHttpJsonChannelProvider.newBuilder();
}
public static TransportChannelProvider defaultTransportChannelProvider() {
return defaultHttpJsonTransportProviderBuilder().build();
}
public static ApiClientHeaderProvider.Builder defaultApiClientHeaderProviderBuilder() {
return ApiClientHeaderProvider.newBuilder()
.setGeneratedLibToken(
"gapic",
GaxProperties.getLibraryVersion(InterconnectAttachmentGroupsStubSettings.class))
.setTransportToken(
GaxHttpJsonProperties.getHttpJsonTokenName(),
GaxHttpJsonProperties.getHttpJsonVersion());
}
/** Returns a new builder for this class. */
public static Builder newBuilder() {
return Builder.createDefault();
}
/** Returns a new builder for this class. */
public static Builder newBuilder(ClientContext clientContext) {
return new Builder(clientContext);
}
/** Returns a builder containing all the values of this settings class. */
public Builder toBuilder() {
return new Builder(this);
}
protected InterconnectAttachmentGroupsStubSettings(Builder settingsBuilder) throws IOException {
super(settingsBuilder);
deleteSettings = settingsBuilder.deleteSettings().build();
deleteOperationSettings = settingsBuilder.deleteOperationSettings().build();
getSettings = settingsBuilder.getSettings().build();
getIamPolicySettings = settingsBuilder.getIamPolicySettings().build();
getOperationalStatusSettings = settingsBuilder.getOperationalStatusSettings().build();
insertSettings = settingsBuilder.insertSettings().build();
insertOperationSettings = settingsBuilder.insertOperationSettings().build();
listSettings = settingsBuilder.listSettings().build();
patchSettings = settingsBuilder.patchSettings().build();
patchOperationSettings = settingsBuilder.patchOperationSettings().build();
setIamPolicySettings = settingsBuilder.setIamPolicySettings().build();
testIamPermissionsSettings = settingsBuilder.testIamPermissionsSettings().build();
}
/** Builder for InterconnectAttachmentGroupsStubSettings. */
public static class Builder
extends StubSettings.Builder<InterconnectAttachmentGroupsStubSettings, Builder> {
private final ImmutableList<UnaryCallSettings.Builder<?, ?>> unaryMethodSettingsBuilders;
private final UnaryCallSettings.Builder<DeleteInterconnectAttachmentGroupRequest, Operation>
deleteSettings;
private final OperationCallSettings.Builder<
DeleteInterconnectAttachmentGroupRequest, Operation, Operation>
deleteOperationSettings;
private final UnaryCallSettings.Builder<
GetInterconnectAttachmentGroupRequest, InterconnectAttachmentGroup>
getSettings;
private final UnaryCallSettings.Builder<GetIamPolicyInterconnectAttachmentGroupRequest, Policy>
getIamPolicySettings;
private final UnaryCallSettings.Builder<
GetOperationalStatusInterconnectAttachmentGroupRequest,
InterconnectAttachmentGroupsGetOperationalStatusResponse>
getOperationalStatusSettings;
private final UnaryCallSettings.Builder<InsertInterconnectAttachmentGroupRequest, Operation>
insertSettings;
private final OperationCallSettings.Builder<
InsertInterconnectAttachmentGroupRequest, Operation, Operation>
insertOperationSettings;
private final PagedCallSettings.Builder<
ListInterconnectAttachmentGroupsRequest,
InterconnectAttachmentGroupsListResponse,
ListPagedResponse>
listSettings;
private final UnaryCallSettings.Builder<PatchInterconnectAttachmentGroupRequest, Operation>
patchSettings;
private final OperationCallSettings.Builder<
PatchInterconnectAttachmentGroupRequest, Operation, Operation>
patchOperationSettings;
private final UnaryCallSettings.Builder<SetIamPolicyInterconnectAttachmentGroupRequest, Policy>
setIamPolicySettings;
private final UnaryCallSettings.Builder<
TestIamPermissionsInterconnectAttachmentGroupRequest, TestPermissionsResponse>
testIamPermissionsSettings;
private static final ImmutableMap<String, ImmutableSet<StatusCode.Code>>
RETRYABLE_CODE_DEFINITIONS;
static {
ImmutableMap.Builder<String, ImmutableSet<StatusCode.Code>> definitions =
ImmutableMap.builder();
definitions.put(
"no_retry_1_codes", ImmutableSet.copyOf(Lists.<StatusCode.Code>newArrayList()));
definitions.put(
"retry_policy_0_codes",
ImmutableSet.copyOf(
Lists.<StatusCode.Code>newArrayList(
StatusCode.Code.DEADLINE_EXCEEDED, StatusCode.Code.UNAVAILABLE)));
RETRYABLE_CODE_DEFINITIONS = definitions.build();
}
private static final ImmutableMap<String, RetrySettings> RETRY_PARAM_DEFINITIONS;
static {
ImmutableMap.Builder<String, RetrySettings> definitions = ImmutableMap.builder();
RetrySettings settings = null;
settings =
RetrySettings.newBuilder()
.setInitialRpcTimeoutDuration(Duration.ofMillis(600000L))
.setRpcTimeoutMultiplier(1.0)
.setMaxRpcTimeoutDuration(Duration.ofMillis(600000L))
.setTotalTimeoutDuration(Duration.ofMillis(600000L))
.build();
definitions.put("no_retry_1_params", settings);
settings =
RetrySettings.newBuilder()
.setInitialRetryDelayDuration(Duration.ofMillis(100L))
.setRetryDelayMultiplier(1.3)
.setMaxRetryDelayDuration(Duration.ofMillis(60000L))
.setInitialRpcTimeoutDuration(Duration.ofMillis(600000L))
.setRpcTimeoutMultiplier(1.0)
.setMaxRpcTimeoutDuration(Duration.ofMillis(600000L))
.setTotalTimeoutDuration(Duration.ofMillis(600000L))
.build();
definitions.put("retry_policy_0_params", settings);
RETRY_PARAM_DEFINITIONS = definitions.build();
}
protected Builder() {
this(((ClientContext) null));
}
protected Builder(ClientContext clientContext) {
super(clientContext);
deleteSettings = UnaryCallSettings.newUnaryCallSettingsBuilder();
deleteOperationSettings = OperationCallSettings.newBuilder();
getSettings = UnaryCallSettings.newUnaryCallSettingsBuilder();
getIamPolicySettings = UnaryCallSettings.newUnaryCallSettingsBuilder();
getOperationalStatusSettings = UnaryCallSettings.newUnaryCallSettingsBuilder();
insertSettings = UnaryCallSettings.newUnaryCallSettingsBuilder();
insertOperationSettings = OperationCallSettings.newBuilder();
listSettings = PagedCallSettings.newBuilder(LIST_PAGE_STR_FACT);
patchSettings = UnaryCallSettings.newUnaryCallSettingsBuilder();
patchOperationSettings = OperationCallSettings.newBuilder();
setIamPolicySettings = UnaryCallSettings.newUnaryCallSettingsBuilder();
testIamPermissionsSettings = UnaryCallSettings.newUnaryCallSettingsBuilder();
unaryMethodSettingsBuilders =
ImmutableList.<UnaryCallSettings.Builder<?, ?>>of(
deleteSettings,
getSettings,
getIamPolicySettings,
getOperationalStatusSettings,
insertSettings,
listSettings,
patchSettings,
setIamPolicySettings,
testIamPermissionsSettings);
initDefaults(this);
}
protected Builder(InterconnectAttachmentGroupsStubSettings settings) {
super(settings);
deleteSettings = settings.deleteSettings.toBuilder();
deleteOperationSettings = settings.deleteOperationSettings.toBuilder();
getSettings = settings.getSettings.toBuilder();
getIamPolicySettings = settings.getIamPolicySettings.toBuilder();
getOperationalStatusSettings = settings.getOperationalStatusSettings.toBuilder();
insertSettings = settings.insertSettings.toBuilder();
insertOperationSettings = settings.insertOperationSettings.toBuilder();
listSettings = settings.listSettings.toBuilder();
patchSettings = settings.patchSettings.toBuilder();
patchOperationSettings = settings.patchOperationSettings.toBuilder();
setIamPolicySettings = settings.setIamPolicySettings.toBuilder();
testIamPermissionsSettings = settings.testIamPermissionsSettings.toBuilder();
unaryMethodSettingsBuilders =
ImmutableList.<UnaryCallSettings.Builder<?, ?>>of(
deleteSettings,
getSettings,
getIamPolicySettings,
getOperationalStatusSettings,
insertSettings,
listSettings,
patchSettings,
setIamPolicySettings,
testIamPermissionsSettings);
}
private static Builder createDefault() {
Builder builder = new Builder(((ClientContext) null));
builder.setTransportChannelProvider(defaultTransportChannelProvider());
builder.setCredentialsProvider(defaultCredentialsProviderBuilder().build());
builder.setInternalHeaderProvider(defaultApiClientHeaderProviderBuilder().build());
builder.setMtlsEndpoint(getDefaultMtlsEndpoint());
builder.setSwitchToMtlsEndpointAllowed(true);
return initDefaults(builder);
}
private static Builder initDefaults(Builder builder) {
builder
.deleteSettings()
.setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("no_retry_1_codes"))
.setRetrySettings(RETRY_PARAM_DEFINITIONS.get("no_retry_1_params"));
builder
.getSettings()
.setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_0_codes"))
.setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_0_params"));
builder
.getIamPolicySettings()
.setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_0_codes"))
.setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_0_params"));
builder
.getOperationalStatusSettings()
.setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_0_codes"))
.setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_0_params"));
builder
.insertSettings()
.setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("no_retry_1_codes"))
.setRetrySettings(RETRY_PARAM_DEFINITIONS.get("no_retry_1_params"));
builder
.listSettings()
.setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_0_codes"))
.setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_0_params"));
builder
.patchSettings()
.setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("no_retry_1_codes"))
.setRetrySettings(RETRY_PARAM_DEFINITIONS.get("no_retry_1_params"));
builder
.setIamPolicySettings()
.setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("no_retry_1_codes"))
.setRetrySettings(RETRY_PARAM_DEFINITIONS.get("no_retry_1_params"));
builder
.testIamPermissionsSettings()
.setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("no_retry_1_codes"))
.setRetrySettings(RETRY_PARAM_DEFINITIONS.get("no_retry_1_params"));
builder
.deleteOperationSettings()
.setInitialCallSettings(
UnaryCallSettings
.<DeleteInterconnectAttachmentGroupRequest, OperationSnapshot>
newUnaryCallSettingsBuilder()
.setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("no_retry_1_codes"))
.setRetrySettings(RETRY_PARAM_DEFINITIONS.get("no_retry_1_params"))
.build())
.setResponseTransformer(
ProtoOperationTransformers.ResponseTransformer.create(Operation.class))
.setMetadataTransformer(
ProtoOperationTransformers.MetadataTransformer.create(Operation.class))
.setPollingAlgorithm(
OperationTimedPollAlgorithm.create(
RetrySettings.newBuilder()
.setInitialRetryDelayDuration(Duration.ofMillis(500L))
.setRetryDelayMultiplier(1.5)
.setMaxRetryDelayDuration(Duration.ofMillis(20000L))
.setInitialRpcTimeoutDuration(Duration.ZERO)
.setRpcTimeoutMultiplier(1.0)
.setMaxRpcTimeoutDuration(Duration.ZERO)
.setTotalTimeoutDuration(Duration.ofMillis(600000L))
.build()));
builder
.insertOperationSettings()
.setInitialCallSettings(
UnaryCallSettings
.<InsertInterconnectAttachmentGroupRequest, OperationSnapshot>
newUnaryCallSettingsBuilder()
.setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("no_retry_1_codes"))
.setRetrySettings(RETRY_PARAM_DEFINITIONS.get("no_retry_1_params"))
.build())
.setResponseTransformer(
ProtoOperationTransformers.ResponseTransformer.create(Operation.class))
.setMetadataTransformer(
ProtoOperationTransformers.MetadataTransformer.create(Operation.class))
.setPollingAlgorithm(
OperationTimedPollAlgorithm.create(
RetrySettings.newBuilder()
.setInitialRetryDelayDuration(Duration.ofMillis(500L))
.setRetryDelayMultiplier(1.5)
.setMaxRetryDelayDuration(Duration.ofMillis(20000L))
.setInitialRpcTimeoutDuration(Duration.ZERO)
.setRpcTimeoutMultiplier(1.0)
.setMaxRpcTimeoutDuration(Duration.ZERO)
.setTotalTimeoutDuration(Duration.ofMillis(600000L))
.build()));
builder
.patchOperationSettings()
.setInitialCallSettings(
UnaryCallSettings
.<PatchInterconnectAttachmentGroupRequest, OperationSnapshot>
newUnaryCallSettingsBuilder()
.setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("no_retry_1_codes"))
.setRetrySettings(RETRY_PARAM_DEFINITIONS.get("no_retry_1_params"))
.build())
.setResponseTransformer(
ProtoOperationTransformers.ResponseTransformer.create(Operation.class))
.setMetadataTransformer(
ProtoOperationTransformers.MetadataTransformer.create(Operation.class))
.setPollingAlgorithm(
OperationTimedPollAlgorithm.create(
RetrySettings.newBuilder()
.setInitialRetryDelayDuration(Duration.ofMillis(500L))
.setRetryDelayMultiplier(1.5)
.setMaxRetryDelayDuration(Duration.ofMillis(20000L))
.setInitialRpcTimeoutDuration(Duration.ZERO)
.setRpcTimeoutMultiplier(1.0)
.setMaxRpcTimeoutDuration(Duration.ZERO)
.setTotalTimeoutDuration(Duration.ofMillis(600000L))
.build()));
return builder;
}
/**
* Applies the given settings updater function to all of the unary API methods in this service.
*
* <p>Note: This method does not support applying settings to streaming methods.
*/
public Builder applyToAllUnaryMethods(
ApiFunction<UnaryCallSettings.Builder<?, ?>, Void> settingsUpdater) {
super.applyToAllUnaryMethods(unaryMethodSettingsBuilders, settingsUpdater);
return this;
}
public ImmutableList<UnaryCallSettings.Builder<?, ?>> unaryMethodSettingsBuilders() {
return unaryMethodSettingsBuilders;
}
/** Returns the builder for the settings used for calls to delete. */
public UnaryCallSettings.Builder<DeleteInterconnectAttachmentGroupRequest, Operation>
deleteSettings() {
return deleteSettings;
}
/** Returns the builder for the settings used for calls to delete. */
public OperationCallSettings.Builder<
DeleteInterconnectAttachmentGroupRequest, Operation, Operation>
deleteOperationSettings() {
return deleteOperationSettings;
}
/** Returns the builder for the settings used for calls to get. */
public UnaryCallSettings.Builder<
GetInterconnectAttachmentGroupRequest, InterconnectAttachmentGroup>
getSettings() {
return getSettings;
}
/** Returns the builder for the settings used for calls to getIamPolicy. */
public UnaryCallSettings.Builder<GetIamPolicyInterconnectAttachmentGroupRequest, Policy>
getIamPolicySettings() {
return getIamPolicySettings;
}
/** Returns the builder for the settings used for calls to getOperationalStatus. */
public UnaryCallSettings.Builder<
GetOperationalStatusInterconnectAttachmentGroupRequest,
InterconnectAttachmentGroupsGetOperationalStatusResponse>
getOperationalStatusSettings() {
return getOperationalStatusSettings;
}
/** Returns the builder for the settings used for calls to insert. */
public UnaryCallSettings.Builder<InsertInterconnectAttachmentGroupRequest, Operation>
insertSettings() {
return insertSettings;
}
/** Returns the builder for the settings used for calls to insert. */
public OperationCallSettings.Builder<
InsertInterconnectAttachmentGroupRequest, Operation, Operation>
insertOperationSettings() {
return insertOperationSettings;
}
/** Returns the builder for the settings used for calls to list. */
public PagedCallSettings.Builder<
ListInterconnectAttachmentGroupsRequest,
InterconnectAttachmentGroupsListResponse,
ListPagedResponse>
listSettings() {
return listSettings;
}
/** Returns the builder for the settings used for calls to patch. */
public UnaryCallSettings.Builder<PatchInterconnectAttachmentGroupRequest, Operation>
patchSettings() {
return patchSettings;
}
/** Returns the builder for the settings used for calls to patch. */
public OperationCallSettings.Builder<
PatchInterconnectAttachmentGroupRequest, Operation, Operation>
patchOperationSettings() {
return patchOperationSettings;
}
/** Returns the builder for the settings used for calls to setIamPolicy. */
public UnaryCallSettings.Builder<SetIamPolicyInterconnectAttachmentGroupRequest, Policy>
setIamPolicySettings() {
return setIamPolicySettings;
}
/** Returns the builder for the settings used for calls to testIamPermissions. */
public UnaryCallSettings.Builder<
TestIamPermissionsInterconnectAttachmentGroupRequest, TestPermissionsResponse>
testIamPermissionsSettings() {
return testIamPermissionsSettings;
}
@Override
public InterconnectAttachmentGroupsStubSettings build() throws IOException {
return new InterconnectAttachmentGroupsStubSettings(this);
}
}
}
|
apache/ignite | 36,257 | modules/core/src/test/java/org/apache/ignite/internal/processors/cache/persistence/snapshot/incremental/IncrementalSnapshotRestoreTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.ignite.internal.processors.cache.persistence.snapshot.incremental;
import java.io.File;
import java.io.IOException;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Random;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.atomic.AtomicReference;
import java.util.function.Consumer;
import java.util.stream.Collectors;
import java.util.stream.IntStream;
import javax.cache.Cache;
import org.apache.ignite.Ignite;
import org.apache.ignite.IgniteCache;
import org.apache.ignite.IgniteCheckedException;
import org.apache.ignite.IgniteException;
import org.apache.ignite.binary.BinaryObject;
import org.apache.ignite.binary.BinaryType;
import org.apache.ignite.cache.ReadRepairStrategy;
import org.apache.ignite.cache.affinity.rendezvous.RendezvousAffinityFunction;
import org.apache.ignite.cache.query.ScanQuery;
import org.apache.ignite.cluster.ClusterState;
import org.apache.ignite.configuration.CacheConfiguration;
import org.apache.ignite.configuration.IgniteConfiguration;
import org.apache.ignite.events.CacheConsistencyViolationEvent;
import org.apache.ignite.events.Event;
import org.apache.ignite.failure.StopNodeFailureHandler;
import org.apache.ignite.internal.GridKernalContext;
import org.apache.ignite.internal.IgniteEx;
import org.apache.ignite.internal.TestRecordingCommunicationSpi;
import org.apache.ignite.internal.binary.BinaryContext;
import org.apache.ignite.internal.management.consistency.ConsistencyRepairCommandArg;
import org.apache.ignite.internal.management.consistency.ConsistencyRepairTask;
import org.apache.ignite.internal.management.consistency.ConsistencyTaskResult;
import org.apache.ignite.internal.pagemem.wal.WALIterator;
import org.apache.ignite.internal.pagemem.wal.record.IncrementalSnapshotFinishRecord;
import org.apache.ignite.internal.pagemem.wal.record.WALRecord;
import org.apache.ignite.internal.processors.cache.binary.CacheObjectBinaryProcessorImpl;
import org.apache.ignite.internal.processors.cache.distributed.dht.topology.GridDhtLocalPartition;
import org.apache.ignite.internal.processors.cache.distributed.near.GridNearTxFinishRequest;
import org.apache.ignite.internal.processors.cache.distributed.near.GridNearTxPrepareResponse;
import org.apache.ignite.internal.processors.cache.persistence.db.wal.crc.WalTestUtils;
import org.apache.ignite.internal.processors.cache.persistence.filename.SnapshotFileTree;
import org.apache.ignite.internal.processors.cache.persistence.snapshot.IgniteSnapshotManager;
import org.apache.ignite.internal.processors.cache.persistence.snapshot.IgniteSnapshotVerifyException;
import org.apache.ignite.internal.processors.cache.persistence.snapshot.IncrementalSnapshotMetadata;
import org.apache.ignite.internal.processors.cache.persistence.wal.FileDescriptor;
import org.apache.ignite.internal.processors.cache.persistence.wal.WALPointer;
import org.apache.ignite.internal.processors.cache.persistence.wal.reader.IgniteWalIteratorFactory;
import org.apache.ignite.internal.processors.cacheobject.IgniteCacheObjectProcessor;
import org.apache.ignite.internal.processors.marshaller.MappedName;
import org.apache.ignite.internal.util.typedef.F;
import org.apache.ignite.internal.util.typedef.G;
import org.apache.ignite.internal.util.typedef.internal.U;
import org.apache.ignite.internal.visor.VisorTaskArgument;
import org.apache.ignite.lang.IgniteFuture;
import org.apache.ignite.lang.IgnitePredicate;
import org.apache.ignite.lang.IgniteUuid;
import org.apache.ignite.plugin.AbstractTestPluginProvider;
import org.apache.ignite.plugin.PluginContext;
import org.apache.ignite.testframework.GridTestUtils;
import org.apache.ignite.transactions.Transaction;
import org.jetbrains.annotations.Nullable;
import org.junit.Test;
import static org.apache.ignite.events.EventType.EVT_CONSISTENCY_VIOLATION;
import static org.apache.ignite.internal.processors.cache.persistence.snapshot.AbstractSnapshotSelfTest.snp;
/** */
public class IncrementalSnapshotRestoreTest extends AbstractIncrementalSnapshotTest {
/** */
private static final Random RND = new Random();
/** Bound max key value put in cache (to make intersections of data between snapshots). */
private static final int BOUND = 1_000;
/** */
private static final int PARTS = 10;
/** */
private static volatile Runnable fail;
/** */
private static final String CACHE2 = CACHE + "2";
/** {@inheritDoc} */
@Override protected IgniteConfiguration getConfiguration(String instanceName) throws Exception {
IgniteConfiguration cfg = super.getConfiguration(instanceName);
cfg.setIncludeEventTypes(EVT_CONSISTENCY_VIOLATION);
cfg.setCacheConfiguration(cacheConfiguration(CACHE), cacheConfiguration(CACHE2));
cfg.setFailureHandler(new StopNodeFailureHandler());
cfg.setCommunicationSpi(new TestRecordingCommunicationSpi());
if (getTestIgniteInstanceIndex(instanceName) == 1)
cfg.setPluginProviders(new FailedIgniteSnapshotManagerProvider());
return cfg;
}
/** {@inheritDoc} */
@Override protected void beforeTest() throws Exception {
cleanPersistenceDir();
startGrids(nodes());
grid(0).cluster().state(ClusterState.ACTIVE);
}
/** */
@Override protected CacheConfiguration<Integer, Integer> cacheConfiguration(String name) {
return super.cacheConfiguration(name)
.setAffinity(new RendezvousAffinityFunction().setPartitions(PARTS));
}
/** */
@Test
public void testRecoverySnapshotNoData() throws Exception {
grid(0).snapshot().createSnapshot(SNP).get();
for (int i = 0; i < 2; i++)
grid(0).snapshot().createIncrementalSnapshot(SNP).get();
for (int i = 1; i <= 2; i++) {
restartWithCleanPersistence();
grid(0).snapshot().restoreSnapshot(SNP, null, i).get(getTestTimeout());
checkData(Collections.emptyMap(), CACHE);
}
}
/** */
@Test
public void testRecoveryWithNotBaselineNode() throws Exception {
grid(0).cluster().baselineAutoAdjustEnabled(false);
startGrid(nodes() + 1);
Map<Integer, Integer> expSnpData = new HashMap<>();
loadAndCreateSnapshot(true, (incSnp) -> loadData(CACHE, expSnpData, 1_000));
grid(0).destroyCache(CACHE);
grid(0).snapshot().restoreSnapshot(SNP, Collections.singleton(CACHE), 1).get(getTestTimeout());
checkData(expSnpData, CACHE);
}
/** */
@Test
public void testRecoveryOnClusterSnapshotOnly() throws Exception {
Map<Integer, Integer> expSnpData = new HashMap<>();
loadAndCreateSnapshot(true, (incSnp) -> {
Map<Integer, Integer> data = incSnp ? new HashMap<>() : expSnpData;
loadData(CACHE, data, 1_000);
});
restartWithCleanPersistence();
grid(0).snapshot().restoreSnapshot(SNP, null).get(getTestTimeout());
checkData(expSnpData, CACHE);
}
/** */
@Test
public void testIllegalIncrementalSnapshotIndex() throws Exception {
loadAndCreateSnapshot(true, (incSnp) -> loadData(CACHE, new HashMap<>(), 1));
restartWithCleanPersistence();
GridTestUtils.assertThrowsAnyCause(
log,
() -> grid(0).snapshot().restoreSnapshot(SNP, null, -1),
IllegalArgumentException.class,
"Incremental snapshot index must be greater than 0.");
GridTestUtils.assertThrowsAnyCause(
log,
() -> grid(0).snapshot().restoreSnapshot(SNP, null, 0),
IllegalArgumentException.class,
"Incremental snapshot index must be greater than 0.");
}
/** */
@Test
public void testRecoveryOnIncrementalSnapshot() throws Exception {
Map<Integer, Integer> expSnpData = new HashMap<>();
loadAndCreateSnapshot(true, (incSnp) -> loadData(CACHE, expSnpData, 1_000));
restartWithCleanPersistence();
grid(0).snapshot().restoreSnapshot(SNP, null, 1).get(getTestTimeout());
checkData(expSnpData, CACHE);
}
/** */
@Test
public void testRecoveryOnIncrementalSnapshotWithMultipleSegments() throws Exception {
Map<Integer, Integer> expSnpData = new HashMap<>();
loadAndCreateSnapshot(true, (incSnp) -> {
loadData(CACHE, expSnpData, 1_000);
if (incSnp) {
for (int i = 0; i < 3; i++) {
loadData(CACHE, expSnpData, 1_000);
rollWalSegment(grid(RND.nextInt(nodes())));
}
}
});
restartWithCleanPersistence();
grid(0).snapshot().restoreSnapshot(SNP, null, 1).get(getTestTimeout());
checkData(expSnpData, CACHE);
}
/** */
@Test
public void testRecoveryWithNoLocalPartitions() throws Exception {
stopAllGrids();
cleanPersistenceDir();
Ignite ign = startGrids(backups() + 2);
ign.cluster().state(ClusterState.ACTIVE);
Map<Integer, Integer> expSnpData = new HashMap<>();
loadAndCreateSnapshot(true, (incSnp) -> loadData(CACHE, expSnpData, 1_000));
restartWithCleanPersistence(backups() + 2, F.asList(CACHE, CACHE2));
grid(0).snapshot().restoreSnapshot(SNP, null, 1).get(getTestTimeout());
checkData(expSnpData, CACHE);
}
/** */
@Test
public void testRecoveryOnLastIncrementalSnapshot() throws Exception {
Map<Integer, Integer> expSnpData = new HashMap<>();
loadAndCreateSnapshot(true, (incSnp) -> loadData(CACHE, expSnpData, 1_000));
loadData(CACHE, expSnpData, 1_000);
grid(0).snapshot().createIncrementalSnapshot(SNP).get(getTestTimeout());
restartWithCleanPersistence();
grid(0).snapshot().restoreSnapshot(SNP, null, 2).get(getTestTimeout());
checkData(expSnpData, CACHE);
}
/** */
@Test
public void testRecoverySingleCacheGroup() throws Exception {
Map<Integer, Integer> expSnpData = new HashMap<>();
loadAndCreateSnapshot(true, (incSnp) -> {
for (int i = 0; i < 1_000; i++) {
try (Transaction tx = grid(0).transactions().txStart()) {
int key = (incSnp ? 1_000 : 0) + i;
grid(0).cache(CACHE).put(key, i);
grid(0).cache(CACHE2).put(key, i);
expSnpData.put(key, i);
tx.commit();
}
}
});
grid(0).snapshot().createIncrementalSnapshot(SNP).get();
restartWithCleanPersistence();
grid(0).snapshot().restoreSnapshot(SNP, Collections.singleton(CACHE), 1).get(getTestTimeout());
checkData(expSnpData, CACHE);
assertNoCaches(Collections.singleton(CACHE2));
restartWithCleanPersistence();
grid(0).snapshot().restoreSnapshot(SNP, Collections.singleton(CACHE2), 1).get(getTestTimeout());
checkData(expSnpData, CACHE2);
assertNoCaches(Collections.singleton(CACHE));
}
/** */
@Test
public void testRecoverySingleKey() throws Exception {
Map<Integer, Integer> expSnpData = new HashMap<>();
loadAndCreateSnapshot(true, (incSnp) -> loadData(CACHE, expSnpData, 1));
restartWithCleanPersistence();
grid(0).snapshot().restoreSnapshot(SNP, null, 1).get(getTestTimeout());
checkData(expSnpData, CACHE);
}
/** */
@Test
public void testNonExistentSnapshotFailed() throws Exception {
loadAndCreateSnapshot(true, (incSnp) -> loadData(CACHE, new HashMap<>(), 1));
restartWithCleanPersistence();
GridTestUtils.assertThrowsAnyCause(
log,
() -> grid(0).snapshot().restoreSnapshot(SNP, null, 2).get(getTestTimeout()),
IllegalArgumentException.class,
"No incremental snapshot found"
);
}
/** */
@Test
public void testRecoveryOnClusterSnapshotIfNoWalsOnSingleNode() throws Exception {
loadAndCreateSnapshot(true, (incSnp) -> loadData(CACHE, new HashMap<>(), 1_000));
restartWithCleanPersistence();
SnapshotFileTree sft = snapshotFileTree(grid(1), SNP);
File rm = sft.incrementalSnapshotFileTree(1).walSegment(0);
assertTrue(rm.exists());
assertTrue(U.delete(rm));
GridTestUtils.assertThrowsAnyCause(log,
() -> grid(0).snapshot().restoreSnapshot(SNP, null, 1).get(),
IgniteSnapshotVerifyException.class,
"No WAL segments found for incremental snapshot");
awaitPartitionMapExchange();
assertNoCaches(F.asList(CACHE, CACHE2));
}
/** */
@Test
public void testFailedOnCorruptedWalSegment() throws Exception {
loadAndCreateSnapshot(true, (incSnp) -> loadData(CACHE, new HashMap<>(), 1_000));
restartWithCleanPersistence();
corruptIncrementalSnapshot(1, 1, 0);
GridTestUtils.assertThrowsAnyCause(log,
() -> grid(0).snapshot().restoreSnapshot(SNP, null, 1).get(),
IgniteException.class, "System WAL record for incremental snapshot wasn't found");
awaitPartitionMapExchange();
assertNoCaches(F.asList(CACHE, CACHE2));
}
/** */
@Test
public void testFailedOnCorruptedIntermediateWalSegment() throws Exception {
int crptNodeIdx = 1;
loadAndCreateSnapshot(true, (incSnp) -> {
loadData(CACHE, new HashMap<>(), 1_000);
if (incSnp) {
// Prepare incremental snapshot of 3 segments.
for (int i = 0; i < 3; i++) {
// Load data after ClusterSnapshotRecord.
loadData(CACHE, new HashMap<>(), 1_000);
rollWalSegment(grid(crptNodeIdx));
}
loadData(CACHE, new HashMap<>(), 1_000);
}
});
restartWithCleanPersistence();
corruptIncrementalSnapshot(crptNodeIdx, 1, 1);
Throwable ex = GridTestUtils.assertThrows(log,
() -> grid(0).snapshot().restoreSnapshot(SNP, null, 1).get(),
Throwable.class, null);
boolean expExc = false;
// Corrupted WAL segment leads to different errors.
if (ex instanceof IgniteException) {
if (ex.getMessage().contains("Failed to read WAL record at position")
|| ex.getMessage().contains("WAL tail reached not in the last available segment"))
expExc = true;
}
else if (ex instanceof AssertionError)
expExc = true;
assertTrue(ex.getMessage(), expExc);
awaitPartitionMapExchange();
assertNoCaches(F.asList(CACHE, CACHE2));
}
/** */
@Test
public void testIgnoresInconsistentSnapshot() throws Exception {
Ignite cln = startClientGrid(nodes());
Map<Integer, Integer> expSnpData = new HashMap<>();
loadAndCreateSnapshot(false, (incSnp) -> loadData(CACHE, expSnpData, 1_000));
loadData(CACHE, expSnpData, 1_000);
TestRecordingCommunicationSpi.spi(cln).blockMessages((n, msg) -> msg instanceof GridNearTxFinishRequest);
// Transaction data will be part of next incremental snapshot.
runTxAsync(cln, expSnpData);
TestRecordingCommunicationSpi.spi(cln).waitForBlocked();
IgniteFuture<Void> incSnpFut = grid(0).snapshot().createIncrementalSnapshot(SNP);
// Wait for incremental snapshot started.
assertTrue(GridTestUtils
.waitForCondition(() -> snp(grid(0)).incrementalSnapshotId() != null, getTestTimeout(), 10));
stopGrid(nodes());
GridTestUtils.assertThrowsAnyCause(log, incSnpFut::get, IgniteException.class, "Incremental snapshot is inconsistent");
loadData(CACHE, expSnpData, 1);
grid(0).snapshot().createIncrementalSnapshot(SNP).get();
restartWithCleanPersistence();
grid(0).snapshot().restoreSnapshot(SNP, null, 1).get(getTestTimeout());
checkData(expSnpData, CACHE);
}
/** */
@Test
public void testTransactionInclude() throws Exception {
Map<Integer, Integer> expSnpData = new HashMap<>();
loadAndCreateSnapshot(false, (incSnp) -> loadData(CACHE, expSnpData, 1_000));
loadData(CACHE, expSnpData, 1_000);
TestRecordingCommunicationSpi.spi(grid(0)).blockMessages((n, msg) -> msg instanceof GridNearTxFinishRequest);
// Transaction will be included into incremental snapshot.
runTxAsync(grid(0), expSnpData);
TestRecordingCommunicationSpi.spi(grid(0)).waitForBlocked();
IgniteFuture<Void> incSnpFut = grid(0).snapshot().createIncrementalSnapshot(SNP);
// Wait for incremental snapshot started.
assertTrue(GridTestUtils
.waitForCondition(() -> snp(grid(0)).incrementalSnapshotId() != null, getTestTimeout(), 10));
TestRecordingCommunicationSpi.spi(grid(0)).stopBlock();
incSnpFut.get(getTestTimeout());
for (int i = 0; i < nodes(); i++) {
try (WALIterator it = walIter(i)) {
while (it.hasNext()) {
WALRecord rec = it.next().getValue();
if (rec.type() == WALRecord.RecordType.INCREMENTAL_SNAPSHOT_FINISH_RECORD)
assertFalse(((IncrementalSnapshotFinishRecord)rec).included().isEmpty());
}
}
}
restartWithCleanPersistence();
grid(0).snapshot().restoreSnapshot(SNP, null, 1).get(getTestTimeout());
checkData(expSnpData, CACHE);
}
/** */
@Test
public void testTransactionExclude() throws Exception {
Map<Integer, Integer> expSnpData = new HashMap<>();
loadAndCreateSnapshot(false, (incSnp) -> loadData(CACHE, expSnpData, 1_000));
loadData(CACHE, expSnpData, 1_000);
for (int n = 1; n < nodes(); n++) {
TestRecordingCommunicationSpi.spi(grid(n))
.blockMessages((node, msg) -> msg instanceof GridNearTxPrepareResponse);
}
// Transaction will be excluded from incremental snapshot.
runTxAsync(grid(0), null);
for (int n = 1; n < nodes(); n++)
TestRecordingCommunicationSpi.spi(grid(n)).waitForBlocked();
IgniteFuture<Void> incSnpFut = grid(0).snapshot().createIncrementalSnapshot(SNP);
// Wait for incremental snapshot started.
assertTrue(GridTestUtils
.waitForCondition(() -> snp(grid(0)).incrementalSnapshotId() != null, getTestTimeout(), 10));
for (int n = 1; n < nodes(); n++)
TestRecordingCommunicationSpi.spi(grid(n)).stopBlock();
incSnpFut.get(getTestTimeout());
for (int i = 0; i < nodes(); i++) {
try (WALIterator it = walIter(i)) {
while (it.hasNext()) {
WALRecord rec = it.next().getValue();
if (rec.type() == WALRecord.RecordType.INCREMENTAL_SNAPSHOT_FINISH_RECORD)
assertFalse(((IncrementalSnapshotFinishRecord)rec).excluded().isEmpty());
}
}
}
restartWithCleanPersistence();
grid(0).snapshot().restoreSnapshot(SNP, null, 1).get(getTestTimeout());
checkData(expSnpData, CACHE);
}
/** */
@Test
public void testRestoreBinaryObjects() throws Exception {
Map<BinaryObject, BinaryObject> expSnpData = new HashMap<>();
loadAndCreateSnapshot(true, (incSnp) -> {
try (Transaction tx = grid(0).transactions().txStart()) {
BinaryObject key = grid(0).binary().builder("TestKey")
.setField("key", incSnp ? 123 : 122)
.build();
BinaryObject val = grid(0).binary().builder("TestVal")
.setField("val", 0)
.build();
grid(0).cache(CACHE).put(key, val);
expSnpData.put(key, val);
tx.commit();
}
});
restartWithCleanPersistence();
grid(0).snapshot().restoreSnapshot(SNP, null, 1).get(getTestTimeout());
checkData(expSnpData, CACHE);
}
/** */
@Test
public void testRestoreFromSecondAttempt() throws Exception {
fail = () -> {
throw new RuntimeException("Force to fail snapshot restore.");
};
Map<Integer, Integer> expSnpData = new HashMap<>();
loadAndCreateSnapshot(true, (incSnp) -> loadData(CACHE, expSnpData, 1_000));
restartWithCleanPersistence();
GridTestUtils.assertThrowsAnyCause(log,
() -> grid(0).snapshot().restoreSnapshot(SNP, null, 1).get(),
RuntimeException.class, "Force to fail snapshot restore.");
awaitPartitionMapExchange();
grid(0).snapshot().restoreSnapshot(SNP, null, 1).get();
checkData(expSnpData, CACHE);
stopAllGrids();
startGrids(3).cluster().state(ClusterState.ACTIVE);
checkData(expSnpData, CACHE);
}
/** */
@Test
public void testNoGapsInCountersAfterRestore() throws Exception {
Map<Integer, Integer> expSnpData = new HashMap<>();
loadAndCreateSnapshot(false, (incSnp) -> loadData(CACHE, expSnpData, 1_000));
loadData(CACHE, expSnpData, 1_000);
final CountDownLatch txIdSetLatch = new CountDownLatch(1);
final CountDownLatch msgBlkSet = new CountDownLatch(1);
final AtomicReference<IgniteUuid> exclTxId = new AtomicReference<>();
multithreadedAsync(() -> {
try (Transaction tx = grid(0).transactions().txStart()) {
// Use keys out of bound to avoid dead blocking transactions and snapshot while keys are being locked.
for (int i = 0; i < 10; i++)
grid(0).cache(CACHE).put(BOUND + i, 0);
exclTxId.set(tx.xid());
txIdSetLatch.countDown();
U.awaitQuiet(msgBlkSet);
tx.commit();
}
}, 1);
U.awaitQuiet(txIdSetLatch);
for (int n = 1; n < nodes(); n++) {
TestRecordingCommunicationSpi.spi(grid(n)).blockMessages((node, msg) ->
msg instanceof GridNearTxPrepareResponse
&& ((GridNearTxPrepareResponse)msg).version().asIgniteUuid().equals(exclTxId.get()));
}
msgBlkSet.countDown();
for (int n = 1; n < nodes(); n++)
TestRecordingCommunicationSpi.spi(grid(n)).waitForBlocked();
loadData(CACHE, expSnpData, 100);
IgniteFuture<Void> incSnpFut = grid(0).snapshot().createIncrementalSnapshot(SNP);
// Wait for incremental snapshot started.
assertTrue(GridTestUtils
.waitForCondition(() -> snp(grid(0)).incrementalSnapshotId() != null, getTestTimeout(), 10));
for (int n = 1; n < nodes(); n++)
TestRecordingCommunicationSpi.spi(grid(n)).stopBlock();
incSnpFut.get(getTestTimeout());
restartWithCleanPersistence();
grid(0).snapshot().restoreSnapshot(SNP, null, 1).get(getTestTimeout());
checkData(expSnpData, CACHE);
for (int i = 0; i < nodes(); i++) {
for (GridDhtLocalPartition locPart: grid(i).cachex(CACHE).context().topology().localPartitions())
assertNull(locPart.finalizeUpdateCounters());
}
}
/** */
@Test
public void testBinaryMetaDataRestored() throws Exception {
loadAndCreateSnapshot(false, (incSnp) -> loadData(CACHE, new HashMap<>(), 1_000));
grid(0).cache(CACHE).put(10_000, new Person("name"));
checkBinaryMetaRestored((shouldExists) -> {
BinaryContext binCtx = ((CacheObjectBinaryProcessorImpl)grid(0).context().cacheObjects()).binaryContext();
int persTypeId = binCtx.typeId(Person.class.getName());
for (int n = 0; n < nodes(); n++) {
List<Map<Integer, MappedName>> mappings = grid(n).context().marshallerContext().getCachedMappings();
IgniteCacheObjectProcessor objPrc = grid(n).context().cacheObjects();
if (shouldExists) {
assertTrue(mappings.stream().anyMatch(m -> m.containsKey(persTypeId)));
assertNotNull(objPrc.metadata(objPrc.typeId(Person.class.getName())));
}
else {
assertFalse(mappings.stream().anyMatch(m -> m.containsKey(persTypeId)));
assertNull(objPrc.metadata(objPrc.typeId(Person.class.getName())));
}
}
// Check accessing class only after explicit validate meta on every node.
if (shouldExists) {
Person p = (Person)grid(0).cache(CACHE).get(10_000);
assertEquals("name", p.name);
}
});
}
/** */
@Test
public void testChangedBinaryMetaDataRestored() throws Exception {
loadAndCreateSnapshot(false, (incSnp) -> {
Random rnd = new Random();
for (int i = 0; i < 1_000; i++) {
BinaryObject obj = grid(0).binary().builder("TestType")
.setField("age", rnd.nextInt(100))
.build();
grid(0).cache(CACHE).withKeepBinary().put(i, obj);
}
});
grid(0).cache(CACHE).withKeepBinary().put(
10_000,
grid(0).binary().builder("TestType")
.setField("age", 10)
.setField("balance", 100)
.build());
checkBinaryMetaRestored((shouldChange) -> {
for (int n = 0; n < nodes(); n++) {
IgniteCacheObjectProcessor objPrc = grid(n).context().cacheObjects();
BinaryType binType = objPrc.metadata(objPrc.typeId("TestType"));
assertNotNull(binType);
assertEquals(shouldChange ? 2 : 1, binType.fieldNames().size());
}
// Check accessing class after explicit validate meta.
BinaryObject obj = (BinaryObject)grid(0).cache(CACHE).withKeepBinary().get(10_000);
if (shouldChange) {
assertEquals(10, (int)obj.field("age"));
assertEquals(100, (int)obj.field("balance"));
}
else
assertNull(obj);
});
}
/** Checks that binary metadata and marshaller restored from an incremental snapshot, and still exists after restart. */
private void checkBinaryMetaRestored(Consumer<Boolean> binaryMetaCheck) throws Exception {
grid(0).snapshot().createIncrementalSnapshot(SNP).get();
restartWithCleanPersistence();
grid(0).snapshot().restoreSnapshot(SNP, null).get(getTestTimeout());
binaryMetaCheck.accept(false);
restartWithCleanPersistence();
grid(0).snapshot().restoreSnapshot(SNP, null, 1).get(getTestTimeout());
binaryMetaCheck.accept(true);
stopAllGrids();
startGrids(nodes());
binaryMetaCheck.accept(true);
}
/**
* Load and create full and incremental snapshots.
*
* @param createIncSnp Whether to create incremental snapshot.
* @param loadData Loads data, consumes stage (for base snapshot {@code false}, for incremental snapshot {@code true}).
*/
private void loadAndCreateSnapshot(boolean createIncSnp, Consumer<Boolean> loadData) {
loadData.accept(false);
grid(0).snapshot().createSnapshot(SNP).get();
if (createIncSnp) {
loadData.accept(true);
grid(0).snapshot().createIncrementalSnapshot(SNP).get();
}
}
/** */
private void checkData(Map<?, ?> expData, String cacheName) throws Exception {
List<Cache.Entry<Object, Object>> actData = grid(0).cache(cacheName).withKeepBinary().query(new ScanQuery<>()).getAll();
assertEquals(actData.size(), expData.size());
for (Cache.Entry<Object, Object> e: actData) {
assertTrue("Missed: " + e, expData.containsKey(e.getKey()));
assertEquals(e.getValue(), expData.get(e.getKey()));
}
assertPartitionsSame(idleVerify(grid(0)));
// Read repair check - OK.
AtomicBoolean readRepairCheckFailed = new AtomicBoolean(false);
grid(0).events().remoteListen(null, (IgnitePredicate<Event>)e -> {
assert e instanceof CacheConsistencyViolationEvent;
readRepairCheckFailed.set(true);
return true;
}, EVT_CONSISTENCY_VIOLATION);
ConsistencyRepairCommandArg arg = new ConsistencyRepairCommandArg();
arg.cache(cacheName);
arg.partitions(IntStream.range(0, PARTS).toArray());
arg.strategy(ReadRepairStrategy.CHECK_ONLY);
ConsistencyTaskResult res = grid(0).compute().execute(
ConsistencyRepairTask.class,
new VisorTaskArgument<>(
G.allGrids().stream().map(ign -> ign.cluster().localNode().id()).collect(Collectors.toList()),
arg,
false
)
).result();
assertFalse(res.message(), res.cancelled());
assertFalse(res.message(), res.failed());
assertFalse(readRepairCheckFailed.get());
}
/** */
private void runTxAsync(Ignite txCrdNode, @Nullable Map<Integer, Integer> data) throws Exception {
multithreadedAsync(() -> {
try (Transaction tx = txCrdNode.transactions().txStart()) {
for (int i = 0; i < 50; i++) {
while (true) {
int key = RND.nextInt(BOUND);
int val = RND.nextInt();
if (data != null) {
if (!data.containsKey(key))
continue;
data.put(key, val);
}
txCrdNode.cache(CACHE).put(key, val);
break;
}
}
tx.commit();
}
}, 1);
}
/**
* @param cacheName Cache name to load.
* @param data Map of inserted entries.
* @param opsCnt Count of operations to load.
*/
private void loadData(String cacheName, Map<Integer, Integer> data, int opsCnt) {
IgniteCache<Integer, Integer> cache = grid(0).cache(cacheName);
int bound = 1000;
for (int i = 0; i < opsCnt; i++) {
try (Transaction tx = grid(0).transactions().txStart()) {
Operation op = Operation.values()[RND.nextInt(Operation.values().length)];
switch (op) {
case PUT:
int putKey = RND.nextInt(bound);
int putVal = RND.nextInt();
data.put(putKey, putVal);
cache.put(putKey, putVal);
break;
case PUT_ALL:
int putKey1 = RND.nextInt(bound);
int putVal1 = RND.nextInt();
int putKey2 = RND.nextInt(bound);
int putVal2 = RND.nextInt();
data.putAll(F.asMap(putKey1, putVal1, putKey2, putVal2));
cache.putAll(F.asMap(putKey1, putVal1, putKey2, putVal2));
break;
case REMOVE:
int rmKey = RND.nextInt(bound);
data.remove(rmKey);
cache.remove(rmKey);
break;
case REMOVE_ALL:
int rmKey1 = RND.nextInt(bound);
int rmKey2 = RND.nextInt(bound);
data.remove(rmKey1);
data.remove(rmKey2);
cache.removeAll(new HashSet<>(Arrays.asList(rmKey1, rmKey2)));
break;
}
tx.commit();
}
}
}
/** Corrupts WAL segment in incremental snapshot. */
private void corruptIncrementalSnapshot(int nodeIdx, int incIdx, int segIdx) throws Exception {
IgniteWalIteratorFactory factory = new IgniteWalIteratorFactory(log);
SnapshotFileTree sft = snapshotFileTree(grid(nodeIdx), SNP);
File[] incSegs = sft.incrementalSnapshotFileTree(incIdx).walCompactedOrRawSegments();
Arrays.sort(incSegs);
File crptSeg = incSegs[segIdx];
IgniteWalIteratorFactory.IteratorParametersBuilder params = new IgniteWalIteratorFactory.IteratorParametersBuilder()
.filesOrDirs(crptSeg);
try (WALIterator it = factory.iterator(params)) {
for (int i = 0; i < 400; i++)
it.next();
WALPointer corruptPtr = it.next().getKey();
WalTestUtils.corruptWalSegmentFile(new FileDescriptor(incSegs[segIdx]), corruptPtr);
}
}
/** */
private void restartWithCleanPersistence() throws Exception {
restartWithCleanPersistence(nodes(), F.asList(CACHE, CACHE2));
}
/** */
private void assertNoCaches(Collection<String> caches) {
for (int i = 0; i < nodes(); i++) {
for (String cache: caches)
assertNull("[node=" + i + ", cache=" + cache + ']', grid(i).cache(cache));
}
}
/** {@inheritDoc} */
@Override protected int nodes() {
return 3;
}
/** {@inheritDoc} */
@Override protected int backups() {
return 2;
}
/** */
private static class FailedIgniteSnapshotManagerProvider extends AbstractTestPluginProvider {
/** {@inheritDoc} */
@Override public String name() {
return "FailedIgniteSnapshotManagerProvider";
}
/** {@inheritDoc} */
@Override public <T> @Nullable T createComponent(PluginContext ctx, Class<T> cls) {
if (IgniteSnapshotManager.class.equals(cls))
return (T)new FailedIgniteSnapshotManager(((IgniteEx)ctx.grid()).context());
return null;
}
}
/** */
private static class FailedIgniteSnapshotManager extends IgniteSnapshotManager {
/** */
public FailedIgniteSnapshotManager(GridKernalContext ctx) {
super(ctx);
}
/** {@inheritDoc} */
@Override public IncrementalSnapshotMetadata readIncrementalSnapshotMetadata(File meta) throws IgniteCheckedException, IOException {
if (fail != null) {
Runnable f = fail;
fail = null;
f.run();
}
return super.readIncrementalSnapshotMetadata(meta);
}
}
/** */
private static class Person {
/** */
private final String name;
/** */
Person(String name) {
this.name = name;
}
}
/** */
private enum Operation {
/** */
PUT,
/** */
PUT_ALL,
/** */
REMOVE,
/** */
REMOVE_ALL
}
}
|
apache/maven-archetype | 36,262 | maven-archetype-plugin/src/main/java/org/apache/maven/archetype/mojos/IntegrationTestMojo.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.maven.archetype.mojos;
import javax.inject.Inject;
import java.io.BufferedReader;
import java.io.File;
import java.io.FileReader;
import java.io.FileWriter;
import java.io.IOException;
import java.io.InputStream;
import java.io.Reader;
import java.io.StringWriter;
import java.io.Writer;
import java.nio.file.Files;
import java.util.Arrays;
import java.util.Collection;
import java.util.HashMap;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.Properties;
import java.util.Set;
import org.apache.commons.collections.CollectionUtils;
import org.apache.maven.archetype.ArchetypeGenerationRequest;
import org.apache.maven.archetype.ArchetypeGenerationResult;
import org.apache.maven.archetype.common.Constants;
import org.apache.maven.archetype.downloader.DownloadException;
import org.apache.maven.archetype.downloader.Downloader;
import org.apache.maven.archetype.exception.ArchetypeNotConfigured;
import org.apache.maven.archetype.generator.ArchetypeGenerator;
import org.apache.maven.archetype.ui.generation.ArchetypeGenerationConfigurator;
import org.apache.maven.execution.MavenSession;
import org.apache.maven.plugin.AbstractMojo;
import org.apache.maven.plugin.MojoExecutionException;
import org.apache.maven.plugins.annotations.Mojo;
import org.apache.maven.plugins.annotations.Parameter;
import org.apache.maven.project.MavenProject;
import org.apache.maven.settings.Settings;
import org.apache.maven.settings.io.xpp3.SettingsXpp3Writer;
import org.apache.maven.shared.invoker.DefaultInvocationRequest;
import org.apache.maven.shared.invoker.InvocationRequest;
import org.apache.maven.shared.invoker.InvocationResult;
import org.apache.maven.shared.invoker.Invoker;
import org.apache.maven.shared.invoker.MavenInvocationException;
import org.apache.maven.shared.scriptinterpreter.ScriptException;
import org.apache.maven.shared.scriptinterpreter.ScriptRunner;
import org.codehaus.plexus.util.FileUtils;
import org.codehaus.plexus.util.IOUtil;
import org.codehaus.plexus.util.InterpolationFilterReader;
import org.codehaus.plexus.util.StringUtils;
import org.codehaus.plexus.util.introspection.ReflectionValueExtractor;
import org.codehaus.plexus.util.xml.XmlStreamReader;
import org.codehaus.plexus.util.xml.XmlStreamWriter;
/**
* <p>Execute the archetype integration tests, consisting in generating projects from the current archetype and optionally
* comparing generated projects with reference copy.</p>
*
* <p>Each IT consists of a sub-directory in <code>src/test/resources/projects</code> containing:</p>
*
* <ul>
* <li>a <code>goal.txt</code> file, containing a list of goals to run against the generated project (can be empty,
* content ignored before maven-archetype-plugin 2.1),</li>
* <li>an <code>archetype.properties</code> file, containing properties for project generation,</li>
* <li>an optional <code>reference/</code> directory containing a reference copy of the expected project created from
* the IT.</li>
* </ul>
* <p>To let the IT create a Maven module below some other Maven project (being generated from another archetype)
* one can additionally specify an optional <code>archetype.pom.properties</code> file in the parent directory,
* specifying the archetype's <code>groupId</code>, <code>artifactId</code> and <code>version</code> along with its
* <code>archetype.properties</code> file, containing properties for project generation. Both files are leveraged
* to create the parent project for this IT. Parent projects can be nested.</p>
*
* <p>An example structure for such an integration test looks like this</p>
* <table>
* <caption>integration test folder structure</caption>
* <tr>
* <th>File/Directory</th>
* <th>Description</th>
* </tr>
* <tr>
* <td><code>src/test/resources/projects/it1</code></td>
* <td>Directory for integration test 1</td>
* </tr>
* <tr>
* <td><code>src/test/resources/projects/it1/archetype.pom.properties</code></td>
* <td>GAV for the archetype from which to generate the parent</td>
* </tr>
* <tr>
* <td><code>src/test/resources/projects/it1/archetype.properties</code></td>
* <td>All required properties for the archetype being specified by <code>archetype.pom.properties</code> on this level</td>
* </tr>
* <tr>
* <td><code>src/test/resources/projects/it1/child</code></td>
* <td>Directory for maven module within integration test 1 (this folder's name is not relevant)</td>
* </tr>
* <tr>
* <td><code>src/test/resources/projects/it1/child/goal.txt</code></td>
* <td>The file containing the list of goals to be executed against the generated project</td>
* </tr>
* <tr>
* <td><code>src/test/resources/projects/it1/child/archetype.properties</code></td>
* <td>All required properties for this project's archetype</td>
* </tr>
* </table>
*
* <p>Notice that it is expected to be run as part as of a build after the <code>package</code> phase and not directly as a
* goal from CLI.</p>
*
* @author rafale
*/
@Mojo(name = "integration-test", requiresProject = true)
public class IntegrationTestMojo extends AbstractMojo {
private ArchetypeGenerator archetypeGenerator;
private Downloader downloader;
private Invoker invoker;
private ArchetypeGenerationConfigurator archetypeGenerationConfigurator;
@Inject
public IntegrationTestMojo(
ArchetypeGenerator archetypeGenerator,
Downloader downloader,
Invoker invoker,
ArchetypeGenerationConfigurator archetypeGenerationConfigurator) {
this.archetypeGenerator = archetypeGenerator;
this.downloader = downloader;
this.invoker = invoker;
this.archetypeGenerationConfigurator = archetypeGenerationConfigurator;
}
/**
* The archetype project to execute the integration tests on.
*/
@Parameter(defaultValue = "${project}", readonly = true, required = true)
private MavenProject project;
@Parameter(defaultValue = "${session}", readonly = true, required = true)
private MavenSession session;
/**
* Skip the integration test.
*/
@Parameter(property = "archetype.test.skip")
private boolean skip = false;
/**
* Directory of test projects
*
* @since 2.2
*/
@Parameter(
property = "archetype.test.projectsDirectory",
defaultValue = "${project.build.testOutputDirectory}/projects",
required = true)
private File testProjectsDirectory;
/**
* Relative path of a cleanup/verification hook script to run after executing the build. This script may be written
* with either BeanShell or Groovy. If the file extension is omitted (e.g. <code>verify</code>), the
* plugin searches for the file by trying out the well-known extensions <code>.bsh</code> and <code>.groovy</code>.
* If this script exists for a particular project but returns any non-null value different from <code>true</code> or
* throws an exception, the corresponding build is flagged as a failure.
*
* @since 2.2
*/
@Parameter(property = "archetype.test.verifyScript", defaultValue = "verify")
private String postBuildHookScript;
/**
* Suppress logging to the <code>build.log</code> file.
*
* @since 2.2
*/
@Parameter(property = "archetype.test.noLog", defaultValue = "false")
private boolean noLog;
/**
* Flag used to determine whether the build logs should be output to the normal mojo log.
*
* @since 2.2
*/
@Parameter(property = "archetype.test.streamLogs", defaultValue = "true")
private boolean streamLogs;
/**
* The file encoding for the post-build script.
*
* @since 2.2
*/
@Parameter(property = "encoding", defaultValue = "${project.build.sourceEncoding}")
private String encoding;
/**
* The local repository to run maven instance.
*
* @since 2.2
*/
@Parameter(
property = "archetype.test.localRepositoryPath",
defaultValue = "${settings.localRepository}",
required = true)
private File localRepositoryPath;
/**
* flag to enable show mvn version used for running its (cli option : -V,--show-version )
*
* @since 2.2
*/
@Parameter(property = "archetype.test.showVersion", defaultValue = "false")
private boolean showVersion;
/**
* Ignores the EOL encoding for comparing files (default and original behaviour is false).
*
* @since 2.3
*/
@Parameter(property = "archetype.test.ignoreEOLStyle", defaultValue = "false")
private boolean ignoreEOLStyle;
/**
* Whether to show debug statements in the build output.
*
* @since 2.2
*/
@Parameter(property = "archetype.test.debug", defaultValue = "false")
private boolean debug;
/**
* A list of additional properties which will be used to filter tokens in settings.xml
*
* @since 2.2
*/
@Parameter
private Map<String, String> filterProperties;
/**
* The current user system settings for use in Maven.
*
* @since 2.2
*/
@Parameter(defaultValue = "${settings}", required = true, readonly = true)
private Settings settings;
/**
* Path to an alternate <code>settings.xml</code> to use for Maven invocation with all ITs. Note that the
* <code><localRepository></code> element of this settings file is always ignored, i.e. the path given by the
* parameter {@link #localRepositoryPath} is dominant.
*
* @since 2.2
*/
@Parameter(property = "archetype.test.settingsFile")
private File settingsFile;
/**
* Common set of properties to pass in on each project's command line, via -D parameters.
*
* @since 3.0.2
*/
@Parameter
private Map<String, String> properties = new HashMap<>();
@Override
public void execute() throws MojoExecutionException {
if (skip) {
return;
}
if (!testProjectsDirectory.exists()) {
getLog().warn("No Archetype IT projects: root 'projects' directory not found.");
return;
}
File archetypeFile = project.getArtifact().getFile();
if (archetypeFile == null) {
throw new MojoExecutionException("Unable to get the archetypes' artifact which should have just been built:"
+ " you probably launched 'mvn archetype:integration-test' instead of"
+ " 'mvn integration-test'.");
}
try {
List<File> projectsGoalFiles = FileUtils.getFiles(testProjectsDirectory, "**/goal.txt", "");
if (projectsGoalFiles.isEmpty()) {
getLog().warn("No Archetype IT projects: no directory with goal.txt found.");
return;
}
StringWriter errorWriter = new StringWriter();
for (File goalFile : projectsGoalFiles) {
try {
processIntegrationTest(goalFile, archetypeFile);
} catch (IntegrationTestFailure ex) {
errorWriter.write(
"\nArchetype IT '" + goalFile.getParentFile().getName() + "' failed: ");
errorWriter.write(ex.getMessage());
}
}
String errors = errorWriter.toString();
if (!(errors == null || errors.isEmpty())) {
throw new MojoExecutionException(errors);
}
} catch (IOException ex) {
throw new MojoExecutionException(ex.getMessage(), ex);
}
}
/**
* Checks that actual directory content is the same as reference.
*
* @param reference the reference directory
* @param actual the actual directory to compare with the reference
* @throws IntegrationTestFailure if content differs
*/
private void assertDirectoryEquals(File reference, File actual) throws IntegrationTestFailure, IOException {
List<String> referenceFiles =
FileUtils.getFileAndDirectoryNames(reference, "**", null, false, true, true, true);
getLog().debug("reference content: " + referenceFiles);
List<String> actualFiles = FileUtils.getFileAndDirectoryNames(actual, "**", null, false, true, true, true);
getLog().debug("actual content: " + actualFiles);
boolean fileNamesEquals = CollectionUtils.isEqualCollection(referenceFiles, actualFiles);
if (!fileNamesEquals) {
getLog().debug("Actual list of files is not the same as reference:");
int missing = 0;
for (String ref : referenceFiles) {
if (actualFiles.contains(ref)) {
actualFiles.remove(ref);
getLog().debug("Contained " + ref);
} else {
missing++;
getLog().error("Not contained " + ref);
}
}
getLog().error("Remains " + actualFiles);
throw new IntegrationTestFailure("Reference and generated project differs (missing: " + missing
+ ", unexpected: " + actualFiles.size() + ")");
}
if (!ignoreEOLStyle) {
getLog().warn("Property ignoreEOLStyle was not set - files will be compared considering their EOL style!");
}
boolean contentEquals = true;
for (String file : referenceFiles) {
File referenceFile = new File(reference, file);
File actualFile = new File(actual, file);
if (referenceFile.isDirectory()) {
if (actualFile.isFile()) {
getLog().error("File " + file + " is a directory in the reference but a file in actual");
contentEquals = false;
}
} else if (actualFile.isDirectory()) {
if (referenceFile.isFile()) {
getLog().error("File " + file + " is a file in the reference but a directory in actual");
contentEquals = false;
}
} else if (!contentEquals(referenceFile, actualFile)) {
getLog().error("Contents of file " + file + " are not equal");
contentEquals = false;
}
}
if (!contentEquals) {
throw new IntegrationTestFailure("Some content are not equals");
}
}
/**
* Uses the {@link #ignoreEOLStyle} attribute to compare the two files. If {@link #ignoreEOLStyle} is true,
* then the comparison does not take care about the EOL (aka newline) character.
*/
private boolean contentEquals(File referenceFile, File actualFile) throws IOException {
// Original behaviour
if (!ignoreEOLStyle) {
return FileUtils.contentEquals(referenceFile, actualFile);
}
getLog().debug("Comparing files with EOL style ignored.");
try (BufferedReader referenceFileReader = new BufferedReader(new FileReader(referenceFile));
BufferedReader actualFileReader = new BufferedReader(new FileReader(actualFile))) {
String refLine = null;
String actualLine = null;
do {
refLine = referenceFileReader.readLine();
actualLine = actualFileReader.readLine();
if (!Objects.equals(refLine, actualLine)) {
getLog().error("Conflict found. Reference line :");
getLog().error(refLine);
getLog().error("Actual line :");
getLog().error(actualLine);
return false;
}
} while (refLine != null || actualLine != null);
return true;
}
}
private Properties loadProperties(final File propertiesFile) throws IOException {
Properties properties = new Properties();
try (InputStream in = Files.newInputStream(propertiesFile.toPath())) {
properties.load(in);
}
return properties;
}
private void processIntegrationTest(File goalFile, File archetypeFile)
throws IntegrationTestFailure, MojoExecutionException {
getLog().info("Processing Archetype IT project: "
+ goalFile.getParentFile().getName());
try {
Properties properties = getProperties(goalFile);
File basedir = new File(goalFile.getParentFile(), "project");
FileUtils.deleteDirectory(basedir);
FileUtils.mkdir(basedir.toString());
basedir = setupParentProjects(goalFile.getParentFile().getParentFile(), basedir);
ArchetypeGenerationRequest request = generate(
project.getGroupId(),
project.getArtifactId(),
project.getVersion(),
archetypeFile,
properties,
basedir.toString());
File reference = new File(goalFile.getParentFile(), "reference");
if (reference.exists()) {
// compare generated project with reference
getLog().info("Comparing generated project with reference content: " + reference);
assertDirectoryEquals(reference, new File(basedir, request.getArtifactId()));
}
String goals = FileUtils.fileRead(goalFile);
if (goals != null && !goals.isEmpty()) {
invokePostArchetypeGenerationGoals(goals.trim(), new File(basedir, request.getArtifactId()), goalFile);
}
} catch (IOException ioe) {
throw new IntegrationTestFailure(ioe);
}
}
private ArchetypeGenerationRequest generate(
String archetypeGroupId,
String archetypeArtifactId,
String archetypeVersion,
File archetypeFile,
Properties properties,
String basedir)
throws IntegrationTestFailure, MojoExecutionException {
// @formatter:off
ArchetypeGenerationRequest request = new ArchetypeGenerationRequest()
.setArchetypeGroupId(archetypeGroupId)
.setArchetypeArtifactId(archetypeArtifactId)
.setArchetypeVersion(archetypeVersion)
.setGroupId(properties.getProperty(Constants.GROUP_ID))
.setArtifactId(properties.getProperty(Constants.ARTIFACT_ID))
.setVersion(properties.getProperty(Constants.VERSION))
.setPackage(properties.getProperty(Constants.PACKAGE))
.setRepositorySession(session.getRepositorySession())
.setOutputDirectory(basedir)
.setProperties(properties);
// @formatter:on
ArchetypeGenerationResult result = new ArchetypeGenerationResult();
try {
archetypeGenerationConfigurator.configureArchetype(request, false, properties);
} catch (Exception e) {
throw new MojoExecutionException("Cannot configure archetype", e);
}
archetypeGenerator.generateArchetype(request, archetypeFile, result);
if (result.getCause() != null) {
if (result.getCause() instanceof ArchetypeNotConfigured) {
ArchetypeNotConfigured anc = (ArchetypeNotConfigured) result.getCause();
throw new IntegrationTestFailure(
"Missing required properties in archetype.properties: "
+ StringUtils.join(anc.getMissingProperties().iterator(), ", "),
anc);
}
throw new IntegrationTestFailure(result.getCause().getMessage(), result.getCause());
}
return request;
}
private File setupParentProjects(File configFolder, File buildFolder)
throws IOException, MojoExecutionException, IntegrationTestFailure {
// look for 'archetype.pom.properties'
File archetypePomPropertiesFile = new File(configFolder, "archetype.pom.properties");
if (!archetypePomPropertiesFile.exists()) {
getLog().debug("No 'archetype.pom.properties' file found in " + configFolder);
return buildFolder;
}
// go up to the parent configuration folder
buildFolder = setupParentProjects(configFolder.getParentFile(), buildFolder);
Properties archetypePomProperties = loadProperties(archetypePomPropertiesFile);
String groupId = archetypePomProperties.getProperty(Constants.GROUP_ID);
if (groupId == null || groupId.isEmpty()) {
throw new MojoExecutionException(
"Property " + Constants.GROUP_ID + " not set in " + archetypePomPropertiesFile);
}
String artifactId = archetypePomProperties.getProperty(Constants.ARTIFACT_ID);
if (artifactId == null || artifactId.isEmpty()) {
throw new MojoExecutionException(
"Property " + Constants.ARTIFACT_ID + " not set in " + archetypePomPropertiesFile);
}
String version = archetypePomProperties.getProperty(Constants.VERSION);
if (version == null || version.isEmpty()) {
throw new MojoExecutionException(
"Property " + Constants.VERSION + " not set in " + archetypePomPropertiesFile);
}
File archetypeFile;
try {
archetypeFile = getArchetypeFile(groupId, artifactId, version);
} catch (DownloadException e) {
throw new MojoExecutionException("Could not resolve archetype artifact ", e);
}
Properties archetypeProperties = getProperties(archetypePomPropertiesFile);
getLog().info("Setting up parent project in " + buildFolder);
ArchetypeGenerationRequest request =
generate(groupId, artifactId, version, archetypeFile, archetypeProperties, buildFolder.toString());
return new File(buildFolder, request.getArtifactId());
}
private File getArchetypeFile(String groupId, String artifactId, String version) throws DownloadException {
return downloader.download(
groupId, artifactId, version, project.getRemoteProjectRepositories(), session.getRepositorySession());
}
private Properties getProperties(File goalFile) throws IOException {
File propertiesFile = new File(goalFile.getParentFile(), "archetype.properties");
return loadProperties(propertiesFile);
}
private void invokePostArchetypeGenerationGoals(String goals, File basedir, File goalFile)
throws IntegrationTestFailure, IOException, MojoExecutionException {
FileLogger logger = setupLogger(basedir);
if (!StringUtils.isBlank(goals)) {
getLog().info("Invoking post-archetype-generation goals: " + goals);
if (!localRepositoryPath.exists()) {
localRepositoryPath.mkdirs();
}
// @formatter:off
InvocationRequest request = new DefaultInvocationRequest()
.setBaseDirectory(basedir)
.setGoals(Arrays.asList(StringUtils.split(goals, ",")))
.setLocalRepositoryDirectory(localRepositoryPath)
.setBatchMode(true)
.setShowErrors(true);
// @formatter:on
request.setDebug(debug);
request.setShowVersion(showVersion);
if (logger != null) {
request.setErrorHandler(logger);
request.setOutputHandler(logger);
}
if (!properties.isEmpty()) {
Properties props = new Properties();
for (Map.Entry<String, String> entry : properties.entrySet()) {
if (entry.getValue() != null) {
props.setProperty(entry.getKey(), entry.getValue());
}
}
request.setProperties(props);
}
File archetypeItDirectory = new File(project.getBuild().getDirectory(), "archetype-it");
if (archetypeItDirectory.exists()) {
FileUtils.deleteDirectory(archetypeItDirectory);
}
archetypeItDirectory.mkdir();
File userSettings;
if (settingsFile != null) {
userSettings = new File(archetypeItDirectory, "interpolated-" + settingsFile.getName());
buildInterpolatedFile(settingsFile, userSettings);
} else {
// Use settings coming from the main Maven build
userSettings = new File(archetypeItDirectory, "archetype-settings.xml");
SettingsXpp3Writer settingsWriter = new SettingsXpp3Writer();
try (FileWriter fileWriter = new FileWriter(userSettings)) {
settingsWriter.write(fileWriter, settings);
}
}
request.setUserSettingsFile(userSettings);
try {
InvocationResult result = invoker.execute(request);
getLog().info("Post-archetype-generation invoker exit code: " + result.getExitCode());
if (result.getExitCode() != 0) {
throw new IntegrationTestFailure(
"Execution failure: exit code = " + result.getExitCode(), result.getExecutionException());
}
} catch (MavenInvocationException e) {
throw new IntegrationTestFailure("Cannot run additions goals.", e);
}
} else {
getLog().info("No post-archetype-generation goals to invoke.");
}
// verify result
try (ScriptRunner scriptRunner = new ScriptRunner()) {
scriptRunner.setScriptEncoding(encoding);
Map<String, Object> context = new LinkedHashMap<>();
context.put("projectDir", basedir);
scriptRunner.run("post-build script", goalFile.getParentFile(), postBuildHookScript, context, logger);
} catch (ScriptException e) {
throw new IntegrationTestFailure("post build script failure failure: " + e.getMessage(), e);
}
}
private FileLogger setupLogger(File basedir) throws IOException {
FileLogger logger = null;
if (!noLog) {
File outputLog = new File(basedir, "build.log");
if (streamLogs) {
logger = new FileLogger(outputLog, getLog());
} else {
logger = new FileLogger(outputLog);
}
getLog().debug("build log initialized in: " + outputLog);
}
return logger;
}
static class IntegrationTestFailure extends Exception {
IntegrationTestFailure() {
super();
}
IntegrationTestFailure(String message) {
super(message);
}
IntegrationTestFailure(Throwable cause) {
super(cause);
}
IntegrationTestFailure(String message, Throwable cause) {
super(message, cause);
}
}
/**
* Returns the map-based value source used to interpolate settings and other stuff.
*
* @return The map-based value source for interpolation, never <code>null</code>.
*/
private Map<String, Object> getInterpolationValueSource() {
Map<String, Object> props = new HashMap<>();
if (filterProperties != null) {
props.putAll(filterProperties);
}
if (filterProperties != null) {
props.putAll(filterProperties);
}
props.put("basedir", this.project.getBasedir().getAbsolutePath());
props.put("baseurl", toUrl(this.project.getBasedir().getAbsolutePath()));
if (settings.getLocalRepository() != null) {
props.put("localRepository", settings.getLocalRepository());
props.put("localRepositoryUrl", toUrl(settings.getLocalRepository()));
}
return new CompositeMap(this.project, props);
}
protected void buildInterpolatedFile(File originalFile, File interpolatedFile) throws MojoExecutionException {
getLog().debug("Interpolate " + originalFile.getPath() + " to " + interpolatedFile.getPath());
try {
String xml;
// interpolation with token @...@
Map<String, Object> composite = getInterpolationValueSource();
try (Reader xmlStreamReader = new XmlStreamReader(originalFile);
Reader reader = new InterpolationFilterReader(xmlStreamReader, composite, "@", "@")) {
xml = IOUtil.toString(reader);
}
try (Writer writer = new XmlStreamWriter(interpolatedFile)) {
interpolatedFile.getParentFile().mkdirs();
writer.write(xml);
}
} catch (IOException e) {
throw new MojoExecutionException("Failed to interpolate file " + originalFile.getPath(), e);
}
}
private static class CompositeMap implements Map<String, Object> {
/**
* The Maven project from which to extract interpolated values, never <code>null</code>.
*/
private MavenProject mavenProject;
/**
* The set of additional properties from which to extract interpolated values, never <code>null</code>.
*/
private Map<String, Object> properties;
/**
* Creates a new interpolation source backed by the specified Maven project and some user-specified properties.
*
* @param mavenProject The Maven project from which to extract interpolated values, must not be
* <code>null</code>.
* @param properties The set of additional properties from which to extract interpolated values, may be
* <code>null</code>.
*/
protected CompositeMap(MavenProject mavenProject, Map<String, Object> properties) {
if (mavenProject == null) {
throw new IllegalArgumentException("no project specified");
}
this.mavenProject = mavenProject;
this.properties = properties == null ? new HashMap<>() : properties;
}
/**
* {@inheritDoc}
*
* @see java.util.Map#clear()
*/
@Override
public void clear() {
// nothing here
}
/**
* {@inheritDoc}
*
* @see java.util.Map#containsKey(java.lang.Object)
*/
@Override
public boolean containsKey(Object key) {
if (!(key instanceof String)) {
return false;
}
String expression = (String) key;
if (expression.startsWith("project.") || expression.startsWith("pom.")) {
try {
Object evaluated = ReflectionValueExtractor.evaluate(expression, this.mavenProject);
if (evaluated != null) {
return true;
}
} catch (Exception e) {
// uhm do we have to throw a RuntimeException here ?
}
}
return properties.containsKey(key) || mavenProject.getProperties().containsKey(key);
}
/**
* {@inheritDoc}
*
* @see java.util.Map#containsValue(java.lang.Object)
*/
@Override
public boolean containsValue(Object value) {
throw new UnsupportedOperationException();
}
/**
* {@inheritDoc}
*
* @see java.util.Map#entrySet()
*/
@Override
public Set<Entry<String, Object>> entrySet() {
throw new UnsupportedOperationException();
}
/**
* {@inheritDoc}
*
* @see java.util.Map#get(java.lang.Object)
*/
@Override
public Object get(Object key) {
if (!(key instanceof String)) {
return null;
}
String expression = (String) key;
if (expression.startsWith("project.") || expression.startsWith("pom.")) {
try {
Object evaluated = ReflectionValueExtractor.evaluate(expression, this.mavenProject);
if (evaluated != null) {
return evaluated;
}
} catch (Exception e) {
// uhm do we have to throw a RuntimeException here ?
}
}
Object value = properties.get(key);
return value != null ? value : this.mavenProject.getProperties().get(key);
}
/**
* {@inheritDoc}
*
* @see java.util.Map#isEmpty()
*/
@Override
public boolean isEmpty() {
return this.mavenProject == null
&& this.mavenProject.getProperties().isEmpty()
&& this.properties.isEmpty();
}
/**
* {@inheritDoc}
*
* @see java.util.Map#keySet()
*/
@Override
public Set<String> keySet() {
throw new UnsupportedOperationException();
}
/**
* {@inheritDoc}
*
* @see java.util.Map#put(java.lang.Object, java.lang.Object)
*/
@Override
public Object put(String key, Object value) {
throw new UnsupportedOperationException();
}
/**
* {@inheritDoc}
*
* @see java.util.Map#putAll(java.util.Map)
*/
@Override
public void putAll(Map<? extends String, ? extends Object> t) {
throw new UnsupportedOperationException();
}
/**
* {@inheritDoc}
*
* @see java.util.Map#remove(java.lang.Object)
*/
@Override
public Object remove(Object key) {
throw new UnsupportedOperationException();
}
/**
* {@inheritDoc}
*
* @see java.util.Map#size()
*/
@Override
public int size() {
throw new UnsupportedOperationException();
}
/**
* {@inheritDoc}
*
* @see java.util.Map#values()
*/
@Override
public Collection<Object> values() {
throw new UnsupportedOperationException();
}
}
/**
* Converts the specified filesystem path to a URL. The resulting URL has no trailing slash regardless whether the
* path denotes a file or a directory.
*
* @param filename The filesystem path to convert, must not be <code>null</code>.
* @return The <code>file:</code> URL for the specified path, never <code>null</code>.
*/
private static String toUrl(String filename) {
/*
* NOTE: Maven fails to properly handle percent-encoded "file:" URLs (WAGON-111) so don't use File.toURI() here
* as-is but use the decoded path component in the URL.
*/
String url = "file://" + new File(filename).toURI().getPath();
if (url.endsWith("/")) {
url = url.substring(0, url.length() - 1);
}
return url;
}
}
|
googleapis/google-cloud-java | 36,538 | java-grafeas/src/main/java/io/grafeas/v1/stub/GrafeasStubSettings.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.grafeas.v1.stub;
import static io.grafeas.v1.GrafeasClient.ListNoteOccurrencesPagedResponse;
import static io.grafeas.v1.GrafeasClient.ListNotesPagedResponse;
import static io.grafeas.v1.GrafeasClient.ListOccurrencesPagedResponse;
import com.google.api.core.ApiFunction;
import com.google.api.core.ApiFuture;
import com.google.api.core.ObsoleteApi;
import com.google.api.gax.core.GaxProperties;
import com.google.api.gax.core.GoogleCredentialsProvider;
import com.google.api.gax.core.InstantiatingExecutorProvider;
import com.google.api.gax.grpc.GaxGrpcProperties;
import com.google.api.gax.grpc.GrpcTransportChannel;
import com.google.api.gax.grpc.InstantiatingGrpcChannelProvider;
import com.google.api.gax.retrying.RetrySettings;
import com.google.api.gax.rpc.ApiCallContext;
import com.google.api.gax.rpc.ApiClientHeaderProvider;
import com.google.api.gax.rpc.ClientContext;
import com.google.api.gax.rpc.PageContext;
import com.google.api.gax.rpc.PagedCallSettings;
import com.google.api.gax.rpc.PagedListDescriptor;
import com.google.api.gax.rpc.PagedListResponseFactory;
import com.google.api.gax.rpc.StatusCode;
import com.google.api.gax.rpc.StubSettings;
import com.google.api.gax.rpc.TransportChannelProvider;
import com.google.api.gax.rpc.UnaryCallSettings;
import com.google.api.gax.rpc.UnaryCallable;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.Lists;
import com.google.protobuf.Empty;
import io.grafeas.v1.BatchCreateNotesRequest;
import io.grafeas.v1.BatchCreateNotesResponse;
import io.grafeas.v1.BatchCreateOccurrencesRequest;
import io.grafeas.v1.BatchCreateOccurrencesResponse;
import io.grafeas.v1.CreateNoteRequest;
import io.grafeas.v1.CreateOccurrenceRequest;
import io.grafeas.v1.DeleteNoteRequest;
import io.grafeas.v1.DeleteOccurrenceRequest;
import io.grafeas.v1.GetNoteRequest;
import io.grafeas.v1.GetOccurrenceNoteRequest;
import io.grafeas.v1.GetOccurrenceRequest;
import io.grafeas.v1.ListNoteOccurrencesRequest;
import io.grafeas.v1.ListNoteOccurrencesResponse;
import io.grafeas.v1.ListNotesRequest;
import io.grafeas.v1.ListNotesResponse;
import io.grafeas.v1.ListOccurrencesRequest;
import io.grafeas.v1.ListOccurrencesResponse;
import io.grafeas.v1.Note;
import io.grafeas.v1.Occurrence;
import io.grafeas.v1.UpdateNoteRequest;
import io.grafeas.v1.UpdateOccurrenceRequest;
import java.io.IOException;
import java.time.Duration;
import java.util.List;
import javax.annotation.Generated;
// AUTO-GENERATED DOCUMENTATION AND CLASS.
/**
* Settings class to configure an instance of {@link GrafeasStub}.
*
* <p>The default instance has everything set to sensible defaults:
*
* <ul>
* <li>Credentials are acquired automatically through Application Default Credentials.
* <li>Retries are configured for idempotent methods but not for non-idempotent methods.
* </ul>
*
* <p>The builder of this class is recursive, so contained classes are themselves builders. When
* build() is called, the tree of builders is called to create the complete settings object.
*
* <p>For example, to set the
* [RetrySettings](https://cloud.google.com/java/docs/reference/gax/latest/com.google.api.gax.retrying.RetrySettings)
* of getOccurrence:
*
* <pre>{@code
* // This snippet has been automatically generated and should be regarded as a code template only.
* // It will require modifications to work:
* // - It may require correct/in-range values for request initialization.
* // - It may require specifying regional endpoints when creating the service client as shown in
* // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library
* GrafeasStubSettings.Builder grafeasSettingsBuilder = GrafeasStubSettings.newBuilder();
* grafeasSettingsBuilder
* .getOccurrenceSettings()
* .setRetrySettings(
* grafeasSettingsBuilder
* .getOccurrenceSettings()
* .getRetrySettings()
* .toBuilder()
* .setInitialRetryDelayDuration(Duration.ofSeconds(1))
* .setInitialRpcTimeoutDuration(Duration.ofSeconds(5))
* .setMaxAttempts(5)
* .setMaxRetryDelayDuration(Duration.ofSeconds(30))
* .setMaxRpcTimeoutDuration(Duration.ofSeconds(60))
* .setRetryDelayMultiplier(1.3)
* .setRpcTimeoutMultiplier(1.5)
* .setTotalTimeoutDuration(Duration.ofSeconds(300))
* .build());
* GrafeasStubSettings grafeasSettings = grafeasSettingsBuilder.build();
* }</pre>
*
* Please refer to the [Client Side Retry
* Guide](https://github.com/googleapis/google-cloud-java/blob/main/docs/client_retries.md) for
* additional support in setting retries.
*/
@Generated("by gapic-generator-java")
public class GrafeasStubSettings extends StubSettings<GrafeasStubSettings> {
/** The default scopes of the service. */
private static final ImmutableList<String> DEFAULT_SERVICE_SCOPES =
ImmutableList.<String>builder().build();
private final UnaryCallSettings<GetOccurrenceRequest, Occurrence> getOccurrenceSettings;
private final PagedCallSettings<
ListOccurrencesRequest, ListOccurrencesResponse, ListOccurrencesPagedResponse>
listOccurrencesSettings;
private final UnaryCallSettings<DeleteOccurrenceRequest, Empty> deleteOccurrenceSettings;
private final UnaryCallSettings<CreateOccurrenceRequest, Occurrence> createOccurrenceSettings;
private final UnaryCallSettings<BatchCreateOccurrencesRequest, BatchCreateOccurrencesResponse>
batchCreateOccurrencesSettings;
private final UnaryCallSettings<UpdateOccurrenceRequest, Occurrence> updateOccurrenceSettings;
private final UnaryCallSettings<GetOccurrenceNoteRequest, Note> getOccurrenceNoteSettings;
private final UnaryCallSettings<GetNoteRequest, Note> getNoteSettings;
private final PagedCallSettings<ListNotesRequest, ListNotesResponse, ListNotesPagedResponse>
listNotesSettings;
private final UnaryCallSettings<DeleteNoteRequest, Empty> deleteNoteSettings;
private final UnaryCallSettings<CreateNoteRequest, Note> createNoteSettings;
private final UnaryCallSettings<BatchCreateNotesRequest, BatchCreateNotesResponse>
batchCreateNotesSettings;
private final UnaryCallSettings<UpdateNoteRequest, Note> updateNoteSettings;
private final PagedCallSettings<
ListNoteOccurrencesRequest, ListNoteOccurrencesResponse, ListNoteOccurrencesPagedResponse>
listNoteOccurrencesSettings;
private static final PagedListDescriptor<
ListOccurrencesRequest, ListOccurrencesResponse, Occurrence>
LIST_OCCURRENCES_PAGE_STR_DESC =
new PagedListDescriptor<ListOccurrencesRequest, ListOccurrencesResponse, Occurrence>() {
@Override
public String emptyToken() {
return "";
}
@Override
public ListOccurrencesRequest injectToken(
ListOccurrencesRequest payload, String token) {
return ListOccurrencesRequest.newBuilder(payload).setPageToken(token).build();
}
@Override
public ListOccurrencesRequest injectPageSize(
ListOccurrencesRequest payload, int pageSize) {
return ListOccurrencesRequest.newBuilder(payload).setPageSize(pageSize).build();
}
@Override
public Integer extractPageSize(ListOccurrencesRequest payload) {
return payload.getPageSize();
}
@Override
public String extractNextToken(ListOccurrencesResponse payload) {
return payload.getNextPageToken();
}
@Override
public Iterable<Occurrence> extractResources(ListOccurrencesResponse payload) {
return payload.getOccurrencesList();
}
};
private static final PagedListDescriptor<ListNotesRequest, ListNotesResponse, Note>
LIST_NOTES_PAGE_STR_DESC =
new PagedListDescriptor<ListNotesRequest, ListNotesResponse, Note>() {
@Override
public String emptyToken() {
return "";
}
@Override
public ListNotesRequest injectToken(ListNotesRequest payload, String token) {
return ListNotesRequest.newBuilder(payload).setPageToken(token).build();
}
@Override
public ListNotesRequest injectPageSize(ListNotesRequest payload, int pageSize) {
return ListNotesRequest.newBuilder(payload).setPageSize(pageSize).build();
}
@Override
public Integer extractPageSize(ListNotesRequest payload) {
return payload.getPageSize();
}
@Override
public String extractNextToken(ListNotesResponse payload) {
return payload.getNextPageToken();
}
@Override
public Iterable<Note> extractResources(ListNotesResponse payload) {
return payload.getNotesList();
}
};
private static final PagedListDescriptor<
ListNoteOccurrencesRequest, ListNoteOccurrencesResponse, Occurrence>
LIST_NOTE_OCCURRENCES_PAGE_STR_DESC =
new PagedListDescriptor<
ListNoteOccurrencesRequest, ListNoteOccurrencesResponse, Occurrence>() {
@Override
public String emptyToken() {
return "";
}
@Override
public ListNoteOccurrencesRequest injectToken(
ListNoteOccurrencesRequest payload, String token) {
return ListNoteOccurrencesRequest.newBuilder(payload).setPageToken(token).build();
}
@Override
public ListNoteOccurrencesRequest injectPageSize(
ListNoteOccurrencesRequest payload, int pageSize) {
return ListNoteOccurrencesRequest.newBuilder(payload).setPageSize(pageSize).build();
}
@Override
public Integer extractPageSize(ListNoteOccurrencesRequest payload) {
return payload.getPageSize();
}
@Override
public String extractNextToken(ListNoteOccurrencesResponse payload) {
return payload.getNextPageToken();
}
@Override
public Iterable<Occurrence> extractResources(ListNoteOccurrencesResponse payload) {
return payload.getOccurrencesList();
}
};
private static final PagedListResponseFactory<
ListOccurrencesRequest, ListOccurrencesResponse, ListOccurrencesPagedResponse>
LIST_OCCURRENCES_PAGE_STR_FACT =
new PagedListResponseFactory<
ListOccurrencesRequest, ListOccurrencesResponse, ListOccurrencesPagedResponse>() {
@Override
public ApiFuture<ListOccurrencesPagedResponse> getFuturePagedResponse(
UnaryCallable<ListOccurrencesRequest, ListOccurrencesResponse> callable,
ListOccurrencesRequest request,
ApiCallContext context,
ApiFuture<ListOccurrencesResponse> futureResponse) {
PageContext<ListOccurrencesRequest, ListOccurrencesResponse, Occurrence> pageContext =
PageContext.create(callable, LIST_OCCURRENCES_PAGE_STR_DESC, request, context);
return ListOccurrencesPagedResponse.createAsync(pageContext, futureResponse);
}
};
private static final PagedListResponseFactory<
ListNotesRequest, ListNotesResponse, ListNotesPagedResponse>
LIST_NOTES_PAGE_STR_FACT =
new PagedListResponseFactory<
ListNotesRequest, ListNotesResponse, ListNotesPagedResponse>() {
@Override
public ApiFuture<ListNotesPagedResponse> getFuturePagedResponse(
UnaryCallable<ListNotesRequest, ListNotesResponse> callable,
ListNotesRequest request,
ApiCallContext context,
ApiFuture<ListNotesResponse> futureResponse) {
PageContext<ListNotesRequest, ListNotesResponse, Note> pageContext =
PageContext.create(callable, LIST_NOTES_PAGE_STR_DESC, request, context);
return ListNotesPagedResponse.createAsync(pageContext, futureResponse);
}
};
private static final PagedListResponseFactory<
ListNoteOccurrencesRequest, ListNoteOccurrencesResponse, ListNoteOccurrencesPagedResponse>
LIST_NOTE_OCCURRENCES_PAGE_STR_FACT =
new PagedListResponseFactory<
ListNoteOccurrencesRequest,
ListNoteOccurrencesResponse,
ListNoteOccurrencesPagedResponse>() {
@Override
public ApiFuture<ListNoteOccurrencesPagedResponse> getFuturePagedResponse(
UnaryCallable<ListNoteOccurrencesRequest, ListNoteOccurrencesResponse> callable,
ListNoteOccurrencesRequest request,
ApiCallContext context,
ApiFuture<ListNoteOccurrencesResponse> futureResponse) {
PageContext<ListNoteOccurrencesRequest, ListNoteOccurrencesResponse, Occurrence>
pageContext =
PageContext.create(
callable, LIST_NOTE_OCCURRENCES_PAGE_STR_DESC, request, context);
return ListNoteOccurrencesPagedResponse.createAsync(pageContext, futureResponse);
}
};
/** Returns the object with the settings used for calls to getOccurrence. */
public UnaryCallSettings<GetOccurrenceRequest, Occurrence> getOccurrenceSettings() {
return getOccurrenceSettings;
}
/** Returns the object with the settings used for calls to listOccurrences. */
public PagedCallSettings<
ListOccurrencesRequest, ListOccurrencesResponse, ListOccurrencesPagedResponse>
listOccurrencesSettings() {
return listOccurrencesSettings;
}
/** Returns the object with the settings used for calls to deleteOccurrence. */
public UnaryCallSettings<DeleteOccurrenceRequest, Empty> deleteOccurrenceSettings() {
return deleteOccurrenceSettings;
}
/** Returns the object with the settings used for calls to createOccurrence. */
public UnaryCallSettings<CreateOccurrenceRequest, Occurrence> createOccurrenceSettings() {
return createOccurrenceSettings;
}
/** Returns the object with the settings used for calls to batchCreateOccurrences. */
public UnaryCallSettings<BatchCreateOccurrencesRequest, BatchCreateOccurrencesResponse>
batchCreateOccurrencesSettings() {
return batchCreateOccurrencesSettings;
}
/** Returns the object with the settings used for calls to updateOccurrence. */
public UnaryCallSettings<UpdateOccurrenceRequest, Occurrence> updateOccurrenceSettings() {
return updateOccurrenceSettings;
}
/** Returns the object with the settings used for calls to getOccurrenceNote. */
public UnaryCallSettings<GetOccurrenceNoteRequest, Note> getOccurrenceNoteSettings() {
return getOccurrenceNoteSettings;
}
/** Returns the object with the settings used for calls to getNote. */
public UnaryCallSettings<GetNoteRequest, Note> getNoteSettings() {
return getNoteSettings;
}
/** Returns the object with the settings used for calls to listNotes. */
public PagedCallSettings<ListNotesRequest, ListNotesResponse, ListNotesPagedResponse>
listNotesSettings() {
return listNotesSettings;
}
/** Returns the object with the settings used for calls to deleteNote. */
public UnaryCallSettings<DeleteNoteRequest, Empty> deleteNoteSettings() {
return deleteNoteSettings;
}
/** Returns the object with the settings used for calls to createNote. */
public UnaryCallSettings<CreateNoteRequest, Note> createNoteSettings() {
return createNoteSettings;
}
/** Returns the object with the settings used for calls to batchCreateNotes. */
public UnaryCallSettings<BatchCreateNotesRequest, BatchCreateNotesResponse>
batchCreateNotesSettings() {
return batchCreateNotesSettings;
}
/** Returns the object with the settings used for calls to updateNote. */
public UnaryCallSettings<UpdateNoteRequest, Note> updateNoteSettings() {
return updateNoteSettings;
}
/** Returns the object with the settings used for calls to listNoteOccurrences. */
public PagedCallSettings<
ListNoteOccurrencesRequest, ListNoteOccurrencesResponse, ListNoteOccurrencesPagedResponse>
listNoteOccurrencesSettings() {
return listNoteOccurrencesSettings;
}
public GrafeasStub createStub() throws IOException {
if (getTransportChannelProvider()
.getTransportName()
.equals(GrpcTransportChannel.getGrpcTransportName())) {
return GrpcGrafeasStub.create(this);
}
throw new UnsupportedOperationException(
String.format(
"Transport not supported: %s", getTransportChannelProvider().getTransportName()));
}
/** Returns the default service name. */
@Override
public String getServiceName() {
return "containeranalysis";
}
/** Returns a builder for the default ExecutorProvider for this service. */
public static InstantiatingExecutorProvider.Builder defaultExecutorProviderBuilder() {
return InstantiatingExecutorProvider.newBuilder();
}
/** Returns the default service endpoint. */
@ObsoleteApi("Use getEndpoint() instead")
public static String getDefaultEndpoint() {
return null;
}
/** Returns the default mTLS service endpoint. */
public static String getDefaultMtlsEndpoint() {
return "containeranalysis.mtls.googleapis.com:443";
}
/** Returns the default service scopes. */
public static List<String> getDefaultServiceScopes() {
return DEFAULT_SERVICE_SCOPES;
}
/** Returns a builder for the default credentials for this service. */
public static GoogleCredentialsProvider.Builder defaultCredentialsProviderBuilder() {
return GoogleCredentialsProvider.newBuilder()
.setScopesToApply(DEFAULT_SERVICE_SCOPES)
.setUseJwtAccessWithScope(true);
}
/** Returns a builder for the default ChannelProvider for this service. */
public static InstantiatingGrpcChannelProvider.Builder defaultGrpcTransportProviderBuilder() {
return InstantiatingGrpcChannelProvider.newBuilder()
.setMaxInboundMessageSize(Integer.MAX_VALUE);
}
public static TransportChannelProvider defaultTransportChannelProvider() {
return defaultGrpcTransportProviderBuilder().build();
}
public static ApiClientHeaderProvider.Builder defaultApiClientHeaderProviderBuilder() {
return ApiClientHeaderProvider.newBuilder()
.setGeneratedLibToken("gapic", GaxProperties.getLibraryVersion(GrafeasStubSettings.class))
.setTransportToken(
GaxGrpcProperties.getGrpcTokenName(), GaxGrpcProperties.getGrpcVersion());
}
/** Returns a new builder for this class. */
public static Builder newBuilder() {
return Builder.createDefault();
}
/** Returns a new builder for this class. */
public static Builder newBuilder(ClientContext clientContext) {
return new Builder(clientContext);
}
/** Returns a builder containing all the values of this settings class. */
public Builder toBuilder() {
return new Builder(this);
}
protected GrafeasStubSettings(Builder settingsBuilder) throws IOException {
super(settingsBuilder);
getOccurrenceSettings = settingsBuilder.getOccurrenceSettings().build();
listOccurrencesSettings = settingsBuilder.listOccurrencesSettings().build();
deleteOccurrenceSettings = settingsBuilder.deleteOccurrenceSettings().build();
createOccurrenceSettings = settingsBuilder.createOccurrenceSettings().build();
batchCreateOccurrencesSettings = settingsBuilder.batchCreateOccurrencesSettings().build();
updateOccurrenceSettings = settingsBuilder.updateOccurrenceSettings().build();
getOccurrenceNoteSettings = settingsBuilder.getOccurrenceNoteSettings().build();
getNoteSettings = settingsBuilder.getNoteSettings().build();
listNotesSettings = settingsBuilder.listNotesSettings().build();
deleteNoteSettings = settingsBuilder.deleteNoteSettings().build();
createNoteSettings = settingsBuilder.createNoteSettings().build();
batchCreateNotesSettings = settingsBuilder.batchCreateNotesSettings().build();
updateNoteSettings = settingsBuilder.updateNoteSettings().build();
listNoteOccurrencesSettings = settingsBuilder.listNoteOccurrencesSettings().build();
}
/** Builder for GrafeasStubSettings. */
public static class Builder extends StubSettings.Builder<GrafeasStubSettings, Builder> {
private final ImmutableList<UnaryCallSettings.Builder<?, ?>> unaryMethodSettingsBuilders;
private final UnaryCallSettings.Builder<GetOccurrenceRequest, Occurrence> getOccurrenceSettings;
private final PagedCallSettings.Builder<
ListOccurrencesRequest, ListOccurrencesResponse, ListOccurrencesPagedResponse>
listOccurrencesSettings;
private final UnaryCallSettings.Builder<DeleteOccurrenceRequest, Empty>
deleteOccurrenceSettings;
private final UnaryCallSettings.Builder<CreateOccurrenceRequest, Occurrence>
createOccurrenceSettings;
private final UnaryCallSettings.Builder<
BatchCreateOccurrencesRequest, BatchCreateOccurrencesResponse>
batchCreateOccurrencesSettings;
private final UnaryCallSettings.Builder<UpdateOccurrenceRequest, Occurrence>
updateOccurrenceSettings;
private final UnaryCallSettings.Builder<GetOccurrenceNoteRequest, Note>
getOccurrenceNoteSettings;
private final UnaryCallSettings.Builder<GetNoteRequest, Note> getNoteSettings;
private final PagedCallSettings.Builder<
ListNotesRequest, ListNotesResponse, ListNotesPagedResponse>
listNotesSettings;
private final UnaryCallSettings.Builder<DeleteNoteRequest, Empty> deleteNoteSettings;
private final UnaryCallSettings.Builder<CreateNoteRequest, Note> createNoteSettings;
private final UnaryCallSettings.Builder<BatchCreateNotesRequest, BatchCreateNotesResponse>
batchCreateNotesSettings;
private final UnaryCallSettings.Builder<UpdateNoteRequest, Note> updateNoteSettings;
private final PagedCallSettings.Builder<
ListNoteOccurrencesRequest,
ListNoteOccurrencesResponse,
ListNoteOccurrencesPagedResponse>
listNoteOccurrencesSettings;
private static final ImmutableMap<String, ImmutableSet<StatusCode.Code>>
RETRYABLE_CODE_DEFINITIONS;
static {
ImmutableMap.Builder<String, ImmutableSet<StatusCode.Code>> definitions =
ImmutableMap.builder();
definitions.put(
"retry_policy_0_codes",
ImmutableSet.copyOf(
Lists.<StatusCode.Code>newArrayList(
StatusCode.Code.UNAVAILABLE, StatusCode.Code.DEADLINE_EXCEEDED)));
definitions.put(
"no_retry_1_codes", ImmutableSet.copyOf(Lists.<StatusCode.Code>newArrayList()));
RETRYABLE_CODE_DEFINITIONS = definitions.build();
}
private static final ImmutableMap<String, RetrySettings> RETRY_PARAM_DEFINITIONS;
static {
ImmutableMap.Builder<String, RetrySettings> definitions = ImmutableMap.builder();
RetrySettings settings = null;
settings =
RetrySettings.newBuilder()
.setInitialRetryDelayDuration(Duration.ofMillis(100L))
.setRetryDelayMultiplier(1.3)
.setMaxRetryDelayDuration(Duration.ofMillis(60000L))
.setInitialRpcTimeoutDuration(Duration.ofMillis(30000L))
.setRpcTimeoutMultiplier(1.0)
.setMaxRpcTimeoutDuration(Duration.ofMillis(30000L))
.setTotalTimeoutDuration(Duration.ofMillis(30000L))
.build();
definitions.put("retry_policy_0_params", settings);
settings =
RetrySettings.newBuilder()
.setInitialRpcTimeoutDuration(Duration.ofMillis(30000L))
.setRpcTimeoutMultiplier(1.0)
.setMaxRpcTimeoutDuration(Duration.ofMillis(30000L))
.setTotalTimeoutDuration(Duration.ofMillis(30000L))
.build();
definitions.put("no_retry_1_params", settings);
RETRY_PARAM_DEFINITIONS = definitions.build();
}
protected Builder() {
this(((ClientContext) null));
}
protected Builder(ClientContext clientContext) {
super(clientContext);
getOccurrenceSettings = UnaryCallSettings.newUnaryCallSettingsBuilder();
listOccurrencesSettings = PagedCallSettings.newBuilder(LIST_OCCURRENCES_PAGE_STR_FACT);
deleteOccurrenceSettings = UnaryCallSettings.newUnaryCallSettingsBuilder();
createOccurrenceSettings = UnaryCallSettings.newUnaryCallSettingsBuilder();
batchCreateOccurrencesSettings = UnaryCallSettings.newUnaryCallSettingsBuilder();
updateOccurrenceSettings = UnaryCallSettings.newUnaryCallSettingsBuilder();
getOccurrenceNoteSettings = UnaryCallSettings.newUnaryCallSettingsBuilder();
getNoteSettings = UnaryCallSettings.newUnaryCallSettingsBuilder();
listNotesSettings = PagedCallSettings.newBuilder(LIST_NOTES_PAGE_STR_FACT);
deleteNoteSettings = UnaryCallSettings.newUnaryCallSettingsBuilder();
createNoteSettings = UnaryCallSettings.newUnaryCallSettingsBuilder();
batchCreateNotesSettings = UnaryCallSettings.newUnaryCallSettingsBuilder();
updateNoteSettings = UnaryCallSettings.newUnaryCallSettingsBuilder();
listNoteOccurrencesSettings =
PagedCallSettings.newBuilder(LIST_NOTE_OCCURRENCES_PAGE_STR_FACT);
unaryMethodSettingsBuilders =
ImmutableList.<UnaryCallSettings.Builder<?, ?>>of(
getOccurrenceSettings,
listOccurrencesSettings,
deleteOccurrenceSettings,
createOccurrenceSettings,
batchCreateOccurrencesSettings,
updateOccurrenceSettings,
getOccurrenceNoteSettings,
getNoteSettings,
listNotesSettings,
deleteNoteSettings,
createNoteSettings,
batchCreateNotesSettings,
updateNoteSettings,
listNoteOccurrencesSettings);
initDefaults(this);
}
protected Builder(GrafeasStubSettings settings) {
super(settings);
getOccurrenceSettings = settings.getOccurrenceSettings.toBuilder();
listOccurrencesSettings = settings.listOccurrencesSettings.toBuilder();
deleteOccurrenceSettings = settings.deleteOccurrenceSettings.toBuilder();
createOccurrenceSettings = settings.createOccurrenceSettings.toBuilder();
batchCreateOccurrencesSettings = settings.batchCreateOccurrencesSettings.toBuilder();
updateOccurrenceSettings = settings.updateOccurrenceSettings.toBuilder();
getOccurrenceNoteSettings = settings.getOccurrenceNoteSettings.toBuilder();
getNoteSettings = settings.getNoteSettings.toBuilder();
listNotesSettings = settings.listNotesSettings.toBuilder();
deleteNoteSettings = settings.deleteNoteSettings.toBuilder();
createNoteSettings = settings.createNoteSettings.toBuilder();
batchCreateNotesSettings = settings.batchCreateNotesSettings.toBuilder();
updateNoteSettings = settings.updateNoteSettings.toBuilder();
listNoteOccurrencesSettings = settings.listNoteOccurrencesSettings.toBuilder();
unaryMethodSettingsBuilders =
ImmutableList.<UnaryCallSettings.Builder<?, ?>>of(
getOccurrenceSettings,
listOccurrencesSettings,
deleteOccurrenceSettings,
createOccurrenceSettings,
batchCreateOccurrencesSettings,
updateOccurrenceSettings,
getOccurrenceNoteSettings,
getNoteSettings,
listNotesSettings,
deleteNoteSettings,
createNoteSettings,
batchCreateNotesSettings,
updateNoteSettings,
listNoteOccurrencesSettings);
}
private static Builder createDefault() {
Builder builder = new Builder(((ClientContext) null));
builder.setTransportChannelProvider(defaultTransportChannelProvider());
builder.setCredentialsProvider(defaultCredentialsProviderBuilder().build());
builder.setInternalHeaderProvider(defaultApiClientHeaderProviderBuilder().build());
builder.setMtlsEndpoint(getDefaultMtlsEndpoint());
builder.setSwitchToMtlsEndpointAllowed(true);
return initDefaults(builder);
}
private static Builder initDefaults(Builder builder) {
builder
.getOccurrenceSettings()
.setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_0_codes"))
.setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_0_params"));
builder
.listOccurrencesSettings()
.setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_0_codes"))
.setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_0_params"));
builder
.deleteOccurrenceSettings()
.setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_0_codes"))
.setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_0_params"));
builder
.createOccurrenceSettings()
.setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("no_retry_1_codes"))
.setRetrySettings(RETRY_PARAM_DEFINITIONS.get("no_retry_1_params"));
builder
.batchCreateOccurrencesSettings()
.setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("no_retry_1_codes"))
.setRetrySettings(RETRY_PARAM_DEFINITIONS.get("no_retry_1_params"));
builder
.updateOccurrenceSettings()
.setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("no_retry_1_codes"))
.setRetrySettings(RETRY_PARAM_DEFINITIONS.get("no_retry_1_params"));
builder
.getOccurrenceNoteSettings()
.setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_0_codes"))
.setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_0_params"));
builder
.getNoteSettings()
.setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_0_codes"))
.setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_0_params"));
builder
.listNotesSettings()
.setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_0_codes"))
.setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_0_params"));
builder
.deleteNoteSettings()
.setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_0_codes"))
.setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_0_params"));
builder
.createNoteSettings()
.setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("no_retry_1_codes"))
.setRetrySettings(RETRY_PARAM_DEFINITIONS.get("no_retry_1_params"));
builder
.batchCreateNotesSettings()
.setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("no_retry_1_codes"))
.setRetrySettings(RETRY_PARAM_DEFINITIONS.get("no_retry_1_params"));
builder
.updateNoteSettings()
.setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("no_retry_1_codes"))
.setRetrySettings(RETRY_PARAM_DEFINITIONS.get("no_retry_1_params"));
builder
.listNoteOccurrencesSettings()
.setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_0_codes"))
.setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_0_params"));
return builder;
}
/**
* Applies the given settings updater function to all of the unary API methods in this service.
*
* <p>Note: This method does not support applying settings to streaming methods.
*/
public Builder applyToAllUnaryMethods(
ApiFunction<UnaryCallSettings.Builder<?, ?>, Void> settingsUpdater) {
super.applyToAllUnaryMethods(unaryMethodSettingsBuilders, settingsUpdater);
return this;
}
public ImmutableList<UnaryCallSettings.Builder<?, ?>> unaryMethodSettingsBuilders() {
return unaryMethodSettingsBuilders;
}
/** Returns the builder for the settings used for calls to getOccurrence. */
public UnaryCallSettings.Builder<GetOccurrenceRequest, Occurrence> getOccurrenceSettings() {
return getOccurrenceSettings;
}
/** Returns the builder for the settings used for calls to listOccurrences. */
public PagedCallSettings.Builder<
ListOccurrencesRequest, ListOccurrencesResponse, ListOccurrencesPagedResponse>
listOccurrencesSettings() {
return listOccurrencesSettings;
}
/** Returns the builder for the settings used for calls to deleteOccurrence. */
public UnaryCallSettings.Builder<DeleteOccurrenceRequest, Empty> deleteOccurrenceSettings() {
return deleteOccurrenceSettings;
}
/** Returns the builder for the settings used for calls to createOccurrence. */
public UnaryCallSettings.Builder<CreateOccurrenceRequest, Occurrence>
createOccurrenceSettings() {
return createOccurrenceSettings;
}
/** Returns the builder for the settings used for calls to batchCreateOccurrences. */
public UnaryCallSettings.Builder<BatchCreateOccurrencesRequest, BatchCreateOccurrencesResponse>
batchCreateOccurrencesSettings() {
return batchCreateOccurrencesSettings;
}
/** Returns the builder for the settings used for calls to updateOccurrence. */
public UnaryCallSettings.Builder<UpdateOccurrenceRequest, Occurrence>
updateOccurrenceSettings() {
return updateOccurrenceSettings;
}
/** Returns the builder for the settings used for calls to getOccurrenceNote. */
public UnaryCallSettings.Builder<GetOccurrenceNoteRequest, Note> getOccurrenceNoteSettings() {
return getOccurrenceNoteSettings;
}
/** Returns the builder for the settings used for calls to getNote. */
public UnaryCallSettings.Builder<GetNoteRequest, Note> getNoteSettings() {
return getNoteSettings;
}
/** Returns the builder for the settings used for calls to listNotes. */
public PagedCallSettings.Builder<ListNotesRequest, ListNotesResponse, ListNotesPagedResponse>
listNotesSettings() {
return listNotesSettings;
}
/** Returns the builder for the settings used for calls to deleteNote. */
public UnaryCallSettings.Builder<DeleteNoteRequest, Empty> deleteNoteSettings() {
return deleteNoteSettings;
}
/** Returns the builder for the settings used for calls to createNote. */
public UnaryCallSettings.Builder<CreateNoteRequest, Note> createNoteSettings() {
return createNoteSettings;
}
/** Returns the builder for the settings used for calls to batchCreateNotes. */
public UnaryCallSettings.Builder<BatchCreateNotesRequest, BatchCreateNotesResponse>
batchCreateNotesSettings() {
return batchCreateNotesSettings;
}
/** Returns the builder for the settings used for calls to updateNote. */
public UnaryCallSettings.Builder<UpdateNoteRequest, Note> updateNoteSettings() {
return updateNoteSettings;
}
/** Returns the builder for the settings used for calls to listNoteOccurrences. */
public PagedCallSettings.Builder<
ListNoteOccurrencesRequest,
ListNoteOccurrencesResponse,
ListNoteOccurrencesPagedResponse>
listNoteOccurrencesSettings() {
return listNoteOccurrencesSettings;
}
@Override
public GrafeasStubSettings build() throws IOException {
return new GrafeasStubSettings(this);
}
}
}
|
googleapis/google-cloud-java | 36,208 | java-servicedirectory/proto-google-cloud-servicedirectory-v1/src/main/java/com/google/cloud/servicedirectory/v1/ListServicesResponse.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/servicedirectory/v1/registration_service.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.servicedirectory.v1;
/**
*
*
* <pre>
* The response message for
* [RegistrationService.ListServices][google.cloud.servicedirectory.v1.RegistrationService.ListServices].
* </pre>
*
* Protobuf type {@code google.cloud.servicedirectory.v1.ListServicesResponse}
*/
public final class ListServicesResponse extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.servicedirectory.v1.ListServicesResponse)
ListServicesResponseOrBuilder {
private static final long serialVersionUID = 0L;
// Use ListServicesResponse.newBuilder() to construct.
private ListServicesResponse(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private ListServicesResponse() {
services_ = java.util.Collections.emptyList();
nextPageToken_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new ListServicesResponse();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.servicedirectory.v1.RegistrationServiceProto
.internal_static_google_cloud_servicedirectory_v1_ListServicesResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.servicedirectory.v1.RegistrationServiceProto
.internal_static_google_cloud_servicedirectory_v1_ListServicesResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.servicedirectory.v1.ListServicesResponse.class,
com.google.cloud.servicedirectory.v1.ListServicesResponse.Builder.class);
}
public static final int SERVICES_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private java.util.List<com.google.cloud.servicedirectory.v1.Service> services_;
/**
*
*
* <pre>
* The list of services.
* </pre>
*
* <code>repeated .google.cloud.servicedirectory.v1.Service services = 1;</code>
*/
@java.lang.Override
public java.util.List<com.google.cloud.servicedirectory.v1.Service> getServicesList() {
return services_;
}
/**
*
*
* <pre>
* The list of services.
* </pre>
*
* <code>repeated .google.cloud.servicedirectory.v1.Service services = 1;</code>
*/
@java.lang.Override
public java.util.List<? extends com.google.cloud.servicedirectory.v1.ServiceOrBuilder>
getServicesOrBuilderList() {
return services_;
}
/**
*
*
* <pre>
* The list of services.
* </pre>
*
* <code>repeated .google.cloud.servicedirectory.v1.Service services = 1;</code>
*/
@java.lang.Override
public int getServicesCount() {
return services_.size();
}
/**
*
*
* <pre>
* The list of services.
* </pre>
*
* <code>repeated .google.cloud.servicedirectory.v1.Service services = 1;</code>
*/
@java.lang.Override
public com.google.cloud.servicedirectory.v1.Service getServices(int index) {
return services_.get(index);
}
/**
*
*
* <pre>
* The list of services.
* </pre>
*
* <code>repeated .google.cloud.servicedirectory.v1.Service services = 1;</code>
*/
@java.lang.Override
public com.google.cloud.servicedirectory.v1.ServiceOrBuilder getServicesOrBuilder(int index) {
return services_.get(index);
}
public static final int NEXT_PAGE_TOKEN_FIELD_NUMBER = 2;
@SuppressWarnings("serial")
private volatile java.lang.Object nextPageToken_ = "";
/**
*
*
* <pre>
* Token to retrieve the next page of results, or empty if there are no
* more results in the list.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The nextPageToken.
*/
@java.lang.Override
public java.lang.String getNextPageToken() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
nextPageToken_ = s;
return s;
}
}
/**
*
*
* <pre>
* Token to retrieve the next page of results, or empty if there are no
* more results in the list.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The bytes for nextPageToken.
*/
@java.lang.Override
public com.google.protobuf.ByteString getNextPageTokenBytes() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
nextPageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
for (int i = 0; i < services_.size(); i++) {
output.writeMessage(1, services_.get(i));
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 2, nextPageToken_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
for (int i = 0; i < services_.size(); i++) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, services_.get(i));
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, nextPageToken_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.servicedirectory.v1.ListServicesResponse)) {
return super.equals(obj);
}
com.google.cloud.servicedirectory.v1.ListServicesResponse other =
(com.google.cloud.servicedirectory.v1.ListServicesResponse) obj;
if (!getServicesList().equals(other.getServicesList())) return false;
if (!getNextPageToken().equals(other.getNextPageToken())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (getServicesCount() > 0) {
hash = (37 * hash) + SERVICES_FIELD_NUMBER;
hash = (53 * hash) + getServicesList().hashCode();
}
hash = (37 * hash) + NEXT_PAGE_TOKEN_FIELD_NUMBER;
hash = (53 * hash) + getNextPageToken().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.servicedirectory.v1.ListServicesResponse parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.servicedirectory.v1.ListServicesResponse parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.servicedirectory.v1.ListServicesResponse parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.servicedirectory.v1.ListServicesResponse parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.servicedirectory.v1.ListServicesResponse parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.servicedirectory.v1.ListServicesResponse parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.servicedirectory.v1.ListServicesResponse parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.servicedirectory.v1.ListServicesResponse parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.servicedirectory.v1.ListServicesResponse parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.servicedirectory.v1.ListServicesResponse parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.servicedirectory.v1.ListServicesResponse parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.servicedirectory.v1.ListServicesResponse parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.servicedirectory.v1.ListServicesResponse prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* The response message for
* [RegistrationService.ListServices][google.cloud.servicedirectory.v1.RegistrationService.ListServices].
* </pre>
*
* Protobuf type {@code google.cloud.servicedirectory.v1.ListServicesResponse}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.servicedirectory.v1.ListServicesResponse)
com.google.cloud.servicedirectory.v1.ListServicesResponseOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.servicedirectory.v1.RegistrationServiceProto
.internal_static_google_cloud_servicedirectory_v1_ListServicesResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.servicedirectory.v1.RegistrationServiceProto
.internal_static_google_cloud_servicedirectory_v1_ListServicesResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.servicedirectory.v1.ListServicesResponse.class,
com.google.cloud.servicedirectory.v1.ListServicesResponse.Builder.class);
}
// Construct using com.google.cloud.servicedirectory.v1.ListServicesResponse.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
if (servicesBuilder_ == null) {
services_ = java.util.Collections.emptyList();
} else {
services_ = null;
servicesBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000001);
nextPageToken_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.servicedirectory.v1.RegistrationServiceProto
.internal_static_google_cloud_servicedirectory_v1_ListServicesResponse_descriptor;
}
@java.lang.Override
public com.google.cloud.servicedirectory.v1.ListServicesResponse getDefaultInstanceForType() {
return com.google.cloud.servicedirectory.v1.ListServicesResponse.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.servicedirectory.v1.ListServicesResponse build() {
com.google.cloud.servicedirectory.v1.ListServicesResponse result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.servicedirectory.v1.ListServicesResponse buildPartial() {
com.google.cloud.servicedirectory.v1.ListServicesResponse result =
new com.google.cloud.servicedirectory.v1.ListServicesResponse(this);
buildPartialRepeatedFields(result);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartialRepeatedFields(
com.google.cloud.servicedirectory.v1.ListServicesResponse result) {
if (servicesBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0)) {
services_ = java.util.Collections.unmodifiableList(services_);
bitField0_ = (bitField0_ & ~0x00000001);
}
result.services_ = services_;
} else {
result.services_ = servicesBuilder_.build();
}
}
private void buildPartial0(com.google.cloud.servicedirectory.v1.ListServicesResponse result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000002) != 0)) {
result.nextPageToken_ = nextPageToken_;
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.servicedirectory.v1.ListServicesResponse) {
return mergeFrom((com.google.cloud.servicedirectory.v1.ListServicesResponse) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.servicedirectory.v1.ListServicesResponse other) {
if (other == com.google.cloud.servicedirectory.v1.ListServicesResponse.getDefaultInstance())
return this;
if (servicesBuilder_ == null) {
if (!other.services_.isEmpty()) {
if (services_.isEmpty()) {
services_ = other.services_;
bitField0_ = (bitField0_ & ~0x00000001);
} else {
ensureServicesIsMutable();
services_.addAll(other.services_);
}
onChanged();
}
} else {
if (!other.services_.isEmpty()) {
if (servicesBuilder_.isEmpty()) {
servicesBuilder_.dispose();
servicesBuilder_ = null;
services_ = other.services_;
bitField0_ = (bitField0_ & ~0x00000001);
servicesBuilder_ =
com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders
? getServicesFieldBuilder()
: null;
} else {
servicesBuilder_.addAllMessages(other.services_);
}
}
}
if (!other.getNextPageToken().isEmpty()) {
nextPageToken_ = other.nextPageToken_;
bitField0_ |= 0x00000002;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
com.google.cloud.servicedirectory.v1.Service m =
input.readMessage(
com.google.cloud.servicedirectory.v1.Service.parser(), extensionRegistry);
if (servicesBuilder_ == null) {
ensureServicesIsMutable();
services_.add(m);
} else {
servicesBuilder_.addMessage(m);
}
break;
} // case 10
case 18:
{
nextPageToken_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000002;
break;
} // case 18
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.util.List<com.google.cloud.servicedirectory.v1.Service> services_ =
java.util.Collections.emptyList();
private void ensureServicesIsMutable() {
if (!((bitField0_ & 0x00000001) != 0)) {
services_ =
new java.util.ArrayList<com.google.cloud.servicedirectory.v1.Service>(services_);
bitField0_ |= 0x00000001;
}
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.servicedirectory.v1.Service,
com.google.cloud.servicedirectory.v1.Service.Builder,
com.google.cloud.servicedirectory.v1.ServiceOrBuilder>
servicesBuilder_;
/**
*
*
* <pre>
* The list of services.
* </pre>
*
* <code>repeated .google.cloud.servicedirectory.v1.Service services = 1;</code>
*/
public java.util.List<com.google.cloud.servicedirectory.v1.Service> getServicesList() {
if (servicesBuilder_ == null) {
return java.util.Collections.unmodifiableList(services_);
} else {
return servicesBuilder_.getMessageList();
}
}
/**
*
*
* <pre>
* The list of services.
* </pre>
*
* <code>repeated .google.cloud.servicedirectory.v1.Service services = 1;</code>
*/
public int getServicesCount() {
if (servicesBuilder_ == null) {
return services_.size();
} else {
return servicesBuilder_.getCount();
}
}
/**
*
*
* <pre>
* The list of services.
* </pre>
*
* <code>repeated .google.cloud.servicedirectory.v1.Service services = 1;</code>
*/
public com.google.cloud.servicedirectory.v1.Service getServices(int index) {
if (servicesBuilder_ == null) {
return services_.get(index);
} else {
return servicesBuilder_.getMessage(index);
}
}
/**
*
*
* <pre>
* The list of services.
* </pre>
*
* <code>repeated .google.cloud.servicedirectory.v1.Service services = 1;</code>
*/
public Builder setServices(int index, com.google.cloud.servicedirectory.v1.Service value) {
if (servicesBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureServicesIsMutable();
services_.set(index, value);
onChanged();
} else {
servicesBuilder_.setMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* The list of services.
* </pre>
*
* <code>repeated .google.cloud.servicedirectory.v1.Service services = 1;</code>
*/
public Builder setServices(
int index, com.google.cloud.servicedirectory.v1.Service.Builder builderForValue) {
if (servicesBuilder_ == null) {
ensureServicesIsMutable();
services_.set(index, builderForValue.build());
onChanged();
} else {
servicesBuilder_.setMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* The list of services.
* </pre>
*
* <code>repeated .google.cloud.servicedirectory.v1.Service services = 1;</code>
*/
public Builder addServices(com.google.cloud.servicedirectory.v1.Service value) {
if (servicesBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureServicesIsMutable();
services_.add(value);
onChanged();
} else {
servicesBuilder_.addMessage(value);
}
return this;
}
/**
*
*
* <pre>
* The list of services.
* </pre>
*
* <code>repeated .google.cloud.servicedirectory.v1.Service services = 1;</code>
*/
public Builder addServices(int index, com.google.cloud.servicedirectory.v1.Service value) {
if (servicesBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureServicesIsMutable();
services_.add(index, value);
onChanged();
} else {
servicesBuilder_.addMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* The list of services.
* </pre>
*
* <code>repeated .google.cloud.servicedirectory.v1.Service services = 1;</code>
*/
public Builder addServices(
com.google.cloud.servicedirectory.v1.Service.Builder builderForValue) {
if (servicesBuilder_ == null) {
ensureServicesIsMutable();
services_.add(builderForValue.build());
onChanged();
} else {
servicesBuilder_.addMessage(builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* The list of services.
* </pre>
*
* <code>repeated .google.cloud.servicedirectory.v1.Service services = 1;</code>
*/
public Builder addServices(
int index, com.google.cloud.servicedirectory.v1.Service.Builder builderForValue) {
if (servicesBuilder_ == null) {
ensureServicesIsMutable();
services_.add(index, builderForValue.build());
onChanged();
} else {
servicesBuilder_.addMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* The list of services.
* </pre>
*
* <code>repeated .google.cloud.servicedirectory.v1.Service services = 1;</code>
*/
public Builder addAllServices(
java.lang.Iterable<? extends com.google.cloud.servicedirectory.v1.Service> values) {
if (servicesBuilder_ == null) {
ensureServicesIsMutable();
com.google.protobuf.AbstractMessageLite.Builder.addAll(values, services_);
onChanged();
} else {
servicesBuilder_.addAllMessages(values);
}
return this;
}
/**
*
*
* <pre>
* The list of services.
* </pre>
*
* <code>repeated .google.cloud.servicedirectory.v1.Service services = 1;</code>
*/
public Builder clearServices() {
if (servicesBuilder_ == null) {
services_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
} else {
servicesBuilder_.clear();
}
return this;
}
/**
*
*
* <pre>
* The list of services.
* </pre>
*
* <code>repeated .google.cloud.servicedirectory.v1.Service services = 1;</code>
*/
public Builder removeServices(int index) {
if (servicesBuilder_ == null) {
ensureServicesIsMutable();
services_.remove(index);
onChanged();
} else {
servicesBuilder_.remove(index);
}
return this;
}
/**
*
*
* <pre>
* The list of services.
* </pre>
*
* <code>repeated .google.cloud.servicedirectory.v1.Service services = 1;</code>
*/
public com.google.cloud.servicedirectory.v1.Service.Builder getServicesBuilder(int index) {
return getServicesFieldBuilder().getBuilder(index);
}
/**
*
*
* <pre>
* The list of services.
* </pre>
*
* <code>repeated .google.cloud.servicedirectory.v1.Service services = 1;</code>
*/
public com.google.cloud.servicedirectory.v1.ServiceOrBuilder getServicesOrBuilder(int index) {
if (servicesBuilder_ == null) {
return services_.get(index);
} else {
return servicesBuilder_.getMessageOrBuilder(index);
}
}
/**
*
*
* <pre>
* The list of services.
* </pre>
*
* <code>repeated .google.cloud.servicedirectory.v1.Service services = 1;</code>
*/
public java.util.List<? extends com.google.cloud.servicedirectory.v1.ServiceOrBuilder>
getServicesOrBuilderList() {
if (servicesBuilder_ != null) {
return servicesBuilder_.getMessageOrBuilderList();
} else {
return java.util.Collections.unmodifiableList(services_);
}
}
/**
*
*
* <pre>
* The list of services.
* </pre>
*
* <code>repeated .google.cloud.servicedirectory.v1.Service services = 1;</code>
*/
public com.google.cloud.servicedirectory.v1.Service.Builder addServicesBuilder() {
return getServicesFieldBuilder()
.addBuilder(com.google.cloud.servicedirectory.v1.Service.getDefaultInstance());
}
/**
*
*
* <pre>
* The list of services.
* </pre>
*
* <code>repeated .google.cloud.servicedirectory.v1.Service services = 1;</code>
*/
public com.google.cloud.servicedirectory.v1.Service.Builder addServicesBuilder(int index) {
return getServicesFieldBuilder()
.addBuilder(index, com.google.cloud.servicedirectory.v1.Service.getDefaultInstance());
}
/**
*
*
* <pre>
* The list of services.
* </pre>
*
* <code>repeated .google.cloud.servicedirectory.v1.Service services = 1;</code>
*/
public java.util.List<com.google.cloud.servicedirectory.v1.Service.Builder>
getServicesBuilderList() {
return getServicesFieldBuilder().getBuilderList();
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.servicedirectory.v1.Service,
com.google.cloud.servicedirectory.v1.Service.Builder,
com.google.cloud.servicedirectory.v1.ServiceOrBuilder>
getServicesFieldBuilder() {
if (servicesBuilder_ == null) {
servicesBuilder_ =
new com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.servicedirectory.v1.Service,
com.google.cloud.servicedirectory.v1.Service.Builder,
com.google.cloud.servicedirectory.v1.ServiceOrBuilder>(
services_, ((bitField0_ & 0x00000001) != 0), getParentForChildren(), isClean());
services_ = null;
}
return servicesBuilder_;
}
private java.lang.Object nextPageToken_ = "";
/**
*
*
* <pre>
* Token to retrieve the next page of results, or empty if there are no
* more results in the list.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The nextPageToken.
*/
public java.lang.String getNextPageToken() {
java.lang.Object ref = nextPageToken_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
nextPageToken_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Token to retrieve the next page of results, or empty if there are no
* more results in the list.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The bytes for nextPageToken.
*/
public com.google.protobuf.ByteString getNextPageTokenBytes() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
nextPageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Token to retrieve the next page of results, or empty if there are no
* more results in the list.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @param value The nextPageToken to set.
* @return This builder for chaining.
*/
public Builder setNextPageToken(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
nextPageToken_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Token to retrieve the next page of results, or empty if there are no
* more results in the list.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return This builder for chaining.
*/
public Builder clearNextPageToken() {
nextPageToken_ = getDefaultInstance().getNextPageToken();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
return this;
}
/**
*
*
* <pre>
* Token to retrieve the next page of results, or empty if there are no
* more results in the list.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @param value The bytes for nextPageToken to set.
* @return This builder for chaining.
*/
public Builder setNextPageTokenBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
nextPageToken_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.servicedirectory.v1.ListServicesResponse)
}
// @@protoc_insertion_point(class_scope:google.cloud.servicedirectory.v1.ListServicesResponse)
private static final com.google.cloud.servicedirectory.v1.ListServicesResponse DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.servicedirectory.v1.ListServicesResponse();
}
public static com.google.cloud.servicedirectory.v1.ListServicesResponse getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<ListServicesResponse> PARSER =
new com.google.protobuf.AbstractParser<ListServicesResponse>() {
@java.lang.Override
public ListServicesResponse parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<ListServicesResponse> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<ListServicesResponse> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.servicedirectory.v1.ListServicesResponse getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleapis/google-cloud-java | 36,265 | java-container/proto-google-cloud-container-v1beta1/src/main/java/com/google/container/v1beta1/ProtectConfig.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/container/v1beta1/cluster_service.proto
// Protobuf Java Version: 3.25.8
package com.google.container.v1beta1;
/**
*
*
* <pre>
* ProtectConfig defines the flags needed to enable/disable features for the
* Protect API.
* </pre>
*
* Protobuf type {@code google.container.v1beta1.ProtectConfig}
*/
public final class ProtectConfig extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.container.v1beta1.ProtectConfig)
ProtectConfigOrBuilder {
private static final long serialVersionUID = 0L;
// Use ProtectConfig.newBuilder() to construct.
private ProtectConfig(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private ProtectConfig() {
workloadVulnerabilityMode_ = 0;
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new ProtectConfig();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.container.v1beta1.ClusterServiceProto
.internal_static_google_container_v1beta1_ProtectConfig_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.container.v1beta1.ClusterServiceProto
.internal_static_google_container_v1beta1_ProtectConfig_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.container.v1beta1.ProtectConfig.class,
com.google.container.v1beta1.ProtectConfig.Builder.class);
}
/**
*
*
* <pre>
* WorkloadVulnerabilityMode defines mode to perform vulnerability scanning.
* </pre>
*
* Protobuf enum {@code google.container.v1beta1.ProtectConfig.WorkloadVulnerabilityMode}
*/
public enum WorkloadVulnerabilityMode implements com.google.protobuf.ProtocolMessageEnum {
/**
*
*
* <pre>
* Default value not specified.
* </pre>
*
* <code>WORKLOAD_VULNERABILITY_MODE_UNSPECIFIED = 0;</code>
*/
WORKLOAD_VULNERABILITY_MODE_UNSPECIFIED(0),
/**
*
*
* <pre>
* Disables Workload Vulnerability Scanning feature on the cluster.
* </pre>
*
* <code>DISABLED = 1;</code>
*/
DISABLED(1),
/**
*
*
* <pre>
* Applies basic vulnerability scanning settings for cluster workloads.
* </pre>
*
* <code>BASIC = 2;</code>
*/
BASIC(2),
UNRECOGNIZED(-1),
;
/**
*
*
* <pre>
* Default value not specified.
* </pre>
*
* <code>WORKLOAD_VULNERABILITY_MODE_UNSPECIFIED = 0;</code>
*/
public static final int WORKLOAD_VULNERABILITY_MODE_UNSPECIFIED_VALUE = 0;
/**
*
*
* <pre>
* Disables Workload Vulnerability Scanning feature on the cluster.
* </pre>
*
* <code>DISABLED = 1;</code>
*/
public static final int DISABLED_VALUE = 1;
/**
*
*
* <pre>
* Applies basic vulnerability scanning settings for cluster workloads.
* </pre>
*
* <code>BASIC = 2;</code>
*/
public static final int BASIC_VALUE = 2;
public final int getNumber() {
if (this == UNRECOGNIZED) {
throw new java.lang.IllegalArgumentException(
"Can't get the number of an unknown enum value.");
}
return value;
}
/**
* @param value The numeric wire value of the corresponding enum entry.
* @return The enum associated with the given numeric wire value.
* @deprecated Use {@link #forNumber(int)} instead.
*/
@java.lang.Deprecated
public static WorkloadVulnerabilityMode valueOf(int value) {
return forNumber(value);
}
/**
* @param value The numeric wire value of the corresponding enum entry.
* @return The enum associated with the given numeric wire value.
*/
public static WorkloadVulnerabilityMode forNumber(int value) {
switch (value) {
case 0:
return WORKLOAD_VULNERABILITY_MODE_UNSPECIFIED;
case 1:
return DISABLED;
case 2:
return BASIC;
default:
return null;
}
}
public static com.google.protobuf.Internal.EnumLiteMap<WorkloadVulnerabilityMode>
internalGetValueMap() {
return internalValueMap;
}
private static final com.google.protobuf.Internal.EnumLiteMap<WorkloadVulnerabilityMode>
internalValueMap =
new com.google.protobuf.Internal.EnumLiteMap<WorkloadVulnerabilityMode>() {
public WorkloadVulnerabilityMode findValueByNumber(int number) {
return WorkloadVulnerabilityMode.forNumber(number);
}
};
public final com.google.protobuf.Descriptors.EnumValueDescriptor getValueDescriptor() {
if (this == UNRECOGNIZED) {
throw new java.lang.IllegalStateException(
"Can't get the descriptor of an unrecognized enum value.");
}
return getDescriptor().getValues().get(ordinal());
}
public final com.google.protobuf.Descriptors.EnumDescriptor getDescriptorForType() {
return getDescriptor();
}
public static final com.google.protobuf.Descriptors.EnumDescriptor getDescriptor() {
return com.google.container.v1beta1.ProtectConfig.getDescriptor().getEnumTypes().get(0);
}
private static final WorkloadVulnerabilityMode[] VALUES = values();
public static WorkloadVulnerabilityMode valueOf(
com.google.protobuf.Descriptors.EnumValueDescriptor desc) {
if (desc.getType() != getDescriptor()) {
throw new java.lang.IllegalArgumentException("EnumValueDescriptor is not for this type.");
}
if (desc.getIndex() == -1) {
return UNRECOGNIZED;
}
return VALUES[desc.getIndex()];
}
private final int value;
private WorkloadVulnerabilityMode(int value) {
this.value = value;
}
// @@protoc_insertion_point(enum_scope:google.container.v1beta1.ProtectConfig.WorkloadVulnerabilityMode)
}
private int bitField0_;
public static final int WORKLOAD_CONFIG_FIELD_NUMBER = 1;
private com.google.container.v1beta1.WorkloadConfig workloadConfig_;
/**
*
*
* <pre>
* WorkloadConfig defines which actions are enabled for a cluster's workload
* configurations.
* </pre>
*
* <code>optional .google.container.v1beta1.WorkloadConfig workload_config = 1;</code>
*
* @return Whether the workloadConfig field is set.
*/
@java.lang.Override
public boolean hasWorkloadConfig() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* WorkloadConfig defines which actions are enabled for a cluster's workload
* configurations.
* </pre>
*
* <code>optional .google.container.v1beta1.WorkloadConfig workload_config = 1;</code>
*
* @return The workloadConfig.
*/
@java.lang.Override
public com.google.container.v1beta1.WorkloadConfig getWorkloadConfig() {
return workloadConfig_ == null
? com.google.container.v1beta1.WorkloadConfig.getDefaultInstance()
: workloadConfig_;
}
/**
*
*
* <pre>
* WorkloadConfig defines which actions are enabled for a cluster's workload
* configurations.
* </pre>
*
* <code>optional .google.container.v1beta1.WorkloadConfig workload_config = 1;</code>
*/
@java.lang.Override
public com.google.container.v1beta1.WorkloadConfigOrBuilder getWorkloadConfigOrBuilder() {
return workloadConfig_ == null
? com.google.container.v1beta1.WorkloadConfig.getDefaultInstance()
: workloadConfig_;
}
public static final int WORKLOAD_VULNERABILITY_MODE_FIELD_NUMBER = 2;
private int workloadVulnerabilityMode_ = 0;
/**
*
*
* <pre>
* Sets which mode to use for Protect workload vulnerability scanning feature.
* </pre>
*
* <code>
* optional .google.container.v1beta1.ProtectConfig.WorkloadVulnerabilityMode workload_vulnerability_mode = 2;
* </code>
*
* @return Whether the workloadVulnerabilityMode field is set.
*/
@java.lang.Override
public boolean hasWorkloadVulnerabilityMode() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
*
*
* <pre>
* Sets which mode to use for Protect workload vulnerability scanning feature.
* </pre>
*
* <code>
* optional .google.container.v1beta1.ProtectConfig.WorkloadVulnerabilityMode workload_vulnerability_mode = 2;
* </code>
*
* @return The enum numeric value on the wire for workloadVulnerabilityMode.
*/
@java.lang.Override
public int getWorkloadVulnerabilityModeValue() {
return workloadVulnerabilityMode_;
}
/**
*
*
* <pre>
* Sets which mode to use for Protect workload vulnerability scanning feature.
* </pre>
*
* <code>
* optional .google.container.v1beta1.ProtectConfig.WorkloadVulnerabilityMode workload_vulnerability_mode = 2;
* </code>
*
* @return The workloadVulnerabilityMode.
*/
@java.lang.Override
public com.google.container.v1beta1.ProtectConfig.WorkloadVulnerabilityMode
getWorkloadVulnerabilityMode() {
com.google.container.v1beta1.ProtectConfig.WorkloadVulnerabilityMode result =
com.google.container.v1beta1.ProtectConfig.WorkloadVulnerabilityMode.forNumber(
workloadVulnerabilityMode_);
return result == null
? com.google.container.v1beta1.ProtectConfig.WorkloadVulnerabilityMode.UNRECOGNIZED
: result;
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (((bitField0_ & 0x00000001) != 0)) {
output.writeMessage(1, getWorkloadConfig());
}
if (((bitField0_ & 0x00000002) != 0)) {
output.writeEnum(2, workloadVulnerabilityMode_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) != 0)) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, getWorkloadConfig());
}
if (((bitField0_ & 0x00000002) != 0)) {
size += com.google.protobuf.CodedOutputStream.computeEnumSize(2, workloadVulnerabilityMode_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.container.v1beta1.ProtectConfig)) {
return super.equals(obj);
}
com.google.container.v1beta1.ProtectConfig other =
(com.google.container.v1beta1.ProtectConfig) obj;
if (hasWorkloadConfig() != other.hasWorkloadConfig()) return false;
if (hasWorkloadConfig()) {
if (!getWorkloadConfig().equals(other.getWorkloadConfig())) return false;
}
if (hasWorkloadVulnerabilityMode() != other.hasWorkloadVulnerabilityMode()) return false;
if (hasWorkloadVulnerabilityMode()) {
if (workloadVulnerabilityMode_ != other.workloadVulnerabilityMode_) return false;
}
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (hasWorkloadConfig()) {
hash = (37 * hash) + WORKLOAD_CONFIG_FIELD_NUMBER;
hash = (53 * hash) + getWorkloadConfig().hashCode();
}
if (hasWorkloadVulnerabilityMode()) {
hash = (37 * hash) + WORKLOAD_VULNERABILITY_MODE_FIELD_NUMBER;
hash = (53 * hash) + workloadVulnerabilityMode_;
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.container.v1beta1.ProtectConfig parseFrom(java.nio.ByteBuffer data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.container.v1beta1.ProtectConfig parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.container.v1beta1.ProtectConfig parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.container.v1beta1.ProtectConfig parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.container.v1beta1.ProtectConfig parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.container.v1beta1.ProtectConfig parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.container.v1beta1.ProtectConfig parseFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.container.v1beta1.ProtectConfig parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.container.v1beta1.ProtectConfig parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.container.v1beta1.ProtectConfig parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.container.v1beta1.ProtectConfig parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.container.v1beta1.ProtectConfig parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.container.v1beta1.ProtectConfig prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* ProtectConfig defines the flags needed to enable/disable features for the
* Protect API.
* </pre>
*
* Protobuf type {@code google.container.v1beta1.ProtectConfig}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.container.v1beta1.ProtectConfig)
com.google.container.v1beta1.ProtectConfigOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.container.v1beta1.ClusterServiceProto
.internal_static_google_container_v1beta1_ProtectConfig_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.container.v1beta1.ClusterServiceProto
.internal_static_google_container_v1beta1_ProtectConfig_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.container.v1beta1.ProtectConfig.class,
com.google.container.v1beta1.ProtectConfig.Builder.class);
}
// Construct using com.google.container.v1beta1.ProtectConfig.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {
getWorkloadConfigFieldBuilder();
}
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
workloadConfig_ = null;
if (workloadConfigBuilder_ != null) {
workloadConfigBuilder_.dispose();
workloadConfigBuilder_ = null;
}
workloadVulnerabilityMode_ = 0;
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.container.v1beta1.ClusterServiceProto
.internal_static_google_container_v1beta1_ProtectConfig_descriptor;
}
@java.lang.Override
public com.google.container.v1beta1.ProtectConfig getDefaultInstanceForType() {
return com.google.container.v1beta1.ProtectConfig.getDefaultInstance();
}
@java.lang.Override
public com.google.container.v1beta1.ProtectConfig build() {
com.google.container.v1beta1.ProtectConfig result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.container.v1beta1.ProtectConfig buildPartial() {
com.google.container.v1beta1.ProtectConfig result =
new com.google.container.v1beta1.ProtectConfig(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(com.google.container.v1beta1.ProtectConfig result) {
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.workloadConfig_ =
workloadConfigBuilder_ == null ? workloadConfig_ : workloadConfigBuilder_.build();
to_bitField0_ |= 0x00000001;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.workloadVulnerabilityMode_ = workloadVulnerabilityMode_;
to_bitField0_ |= 0x00000002;
}
result.bitField0_ |= to_bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.container.v1beta1.ProtectConfig) {
return mergeFrom((com.google.container.v1beta1.ProtectConfig) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.container.v1beta1.ProtectConfig other) {
if (other == com.google.container.v1beta1.ProtectConfig.getDefaultInstance()) return this;
if (other.hasWorkloadConfig()) {
mergeWorkloadConfig(other.getWorkloadConfig());
}
if (other.hasWorkloadVulnerabilityMode()) {
setWorkloadVulnerabilityMode(other.getWorkloadVulnerabilityMode());
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
input.readMessage(getWorkloadConfigFieldBuilder().getBuilder(), extensionRegistry);
bitField0_ |= 0x00000001;
break;
} // case 10
case 16:
{
workloadVulnerabilityMode_ = input.readEnum();
bitField0_ |= 0x00000002;
break;
} // case 16
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private com.google.container.v1beta1.WorkloadConfig workloadConfig_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.container.v1beta1.WorkloadConfig,
com.google.container.v1beta1.WorkloadConfig.Builder,
com.google.container.v1beta1.WorkloadConfigOrBuilder>
workloadConfigBuilder_;
/**
*
*
* <pre>
* WorkloadConfig defines which actions are enabled for a cluster's workload
* configurations.
* </pre>
*
* <code>optional .google.container.v1beta1.WorkloadConfig workload_config = 1;</code>
*
* @return Whether the workloadConfig field is set.
*/
public boolean hasWorkloadConfig() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* WorkloadConfig defines which actions are enabled for a cluster's workload
* configurations.
* </pre>
*
* <code>optional .google.container.v1beta1.WorkloadConfig workload_config = 1;</code>
*
* @return The workloadConfig.
*/
public com.google.container.v1beta1.WorkloadConfig getWorkloadConfig() {
if (workloadConfigBuilder_ == null) {
return workloadConfig_ == null
? com.google.container.v1beta1.WorkloadConfig.getDefaultInstance()
: workloadConfig_;
} else {
return workloadConfigBuilder_.getMessage();
}
}
/**
*
*
* <pre>
* WorkloadConfig defines which actions are enabled for a cluster's workload
* configurations.
* </pre>
*
* <code>optional .google.container.v1beta1.WorkloadConfig workload_config = 1;</code>
*/
public Builder setWorkloadConfig(com.google.container.v1beta1.WorkloadConfig value) {
if (workloadConfigBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
workloadConfig_ = value;
} else {
workloadConfigBuilder_.setMessage(value);
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* WorkloadConfig defines which actions are enabled for a cluster's workload
* configurations.
* </pre>
*
* <code>optional .google.container.v1beta1.WorkloadConfig workload_config = 1;</code>
*/
public Builder setWorkloadConfig(
com.google.container.v1beta1.WorkloadConfig.Builder builderForValue) {
if (workloadConfigBuilder_ == null) {
workloadConfig_ = builderForValue.build();
} else {
workloadConfigBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* WorkloadConfig defines which actions are enabled for a cluster's workload
* configurations.
* </pre>
*
* <code>optional .google.container.v1beta1.WorkloadConfig workload_config = 1;</code>
*/
public Builder mergeWorkloadConfig(com.google.container.v1beta1.WorkloadConfig value) {
if (workloadConfigBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0)
&& workloadConfig_ != null
&& workloadConfig_
!= com.google.container.v1beta1.WorkloadConfig.getDefaultInstance()) {
getWorkloadConfigBuilder().mergeFrom(value);
} else {
workloadConfig_ = value;
}
} else {
workloadConfigBuilder_.mergeFrom(value);
}
if (workloadConfig_ != null) {
bitField0_ |= 0x00000001;
onChanged();
}
return this;
}
/**
*
*
* <pre>
* WorkloadConfig defines which actions are enabled for a cluster's workload
* configurations.
* </pre>
*
* <code>optional .google.container.v1beta1.WorkloadConfig workload_config = 1;</code>
*/
public Builder clearWorkloadConfig() {
bitField0_ = (bitField0_ & ~0x00000001);
workloadConfig_ = null;
if (workloadConfigBuilder_ != null) {
workloadConfigBuilder_.dispose();
workloadConfigBuilder_ = null;
}
onChanged();
return this;
}
/**
*
*
* <pre>
* WorkloadConfig defines which actions are enabled for a cluster's workload
* configurations.
* </pre>
*
* <code>optional .google.container.v1beta1.WorkloadConfig workload_config = 1;</code>
*/
public com.google.container.v1beta1.WorkloadConfig.Builder getWorkloadConfigBuilder() {
bitField0_ |= 0x00000001;
onChanged();
return getWorkloadConfigFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* WorkloadConfig defines which actions are enabled for a cluster's workload
* configurations.
* </pre>
*
* <code>optional .google.container.v1beta1.WorkloadConfig workload_config = 1;</code>
*/
public com.google.container.v1beta1.WorkloadConfigOrBuilder getWorkloadConfigOrBuilder() {
if (workloadConfigBuilder_ != null) {
return workloadConfigBuilder_.getMessageOrBuilder();
} else {
return workloadConfig_ == null
? com.google.container.v1beta1.WorkloadConfig.getDefaultInstance()
: workloadConfig_;
}
}
/**
*
*
* <pre>
* WorkloadConfig defines which actions are enabled for a cluster's workload
* configurations.
* </pre>
*
* <code>optional .google.container.v1beta1.WorkloadConfig workload_config = 1;</code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.container.v1beta1.WorkloadConfig,
com.google.container.v1beta1.WorkloadConfig.Builder,
com.google.container.v1beta1.WorkloadConfigOrBuilder>
getWorkloadConfigFieldBuilder() {
if (workloadConfigBuilder_ == null) {
workloadConfigBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.container.v1beta1.WorkloadConfig,
com.google.container.v1beta1.WorkloadConfig.Builder,
com.google.container.v1beta1.WorkloadConfigOrBuilder>(
getWorkloadConfig(), getParentForChildren(), isClean());
workloadConfig_ = null;
}
return workloadConfigBuilder_;
}
private int workloadVulnerabilityMode_ = 0;
/**
*
*
* <pre>
* Sets which mode to use for Protect workload vulnerability scanning feature.
* </pre>
*
* <code>
* optional .google.container.v1beta1.ProtectConfig.WorkloadVulnerabilityMode workload_vulnerability_mode = 2;
* </code>
*
* @return Whether the workloadVulnerabilityMode field is set.
*/
@java.lang.Override
public boolean hasWorkloadVulnerabilityMode() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
*
*
* <pre>
* Sets which mode to use for Protect workload vulnerability scanning feature.
* </pre>
*
* <code>
* optional .google.container.v1beta1.ProtectConfig.WorkloadVulnerabilityMode workload_vulnerability_mode = 2;
* </code>
*
* @return The enum numeric value on the wire for workloadVulnerabilityMode.
*/
@java.lang.Override
public int getWorkloadVulnerabilityModeValue() {
return workloadVulnerabilityMode_;
}
/**
*
*
* <pre>
* Sets which mode to use for Protect workload vulnerability scanning feature.
* </pre>
*
* <code>
* optional .google.container.v1beta1.ProtectConfig.WorkloadVulnerabilityMode workload_vulnerability_mode = 2;
* </code>
*
* @param value The enum numeric value on the wire for workloadVulnerabilityMode to set.
* @return This builder for chaining.
*/
public Builder setWorkloadVulnerabilityModeValue(int value) {
workloadVulnerabilityMode_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Sets which mode to use for Protect workload vulnerability scanning feature.
* </pre>
*
* <code>
* optional .google.container.v1beta1.ProtectConfig.WorkloadVulnerabilityMode workload_vulnerability_mode = 2;
* </code>
*
* @return The workloadVulnerabilityMode.
*/
@java.lang.Override
public com.google.container.v1beta1.ProtectConfig.WorkloadVulnerabilityMode
getWorkloadVulnerabilityMode() {
com.google.container.v1beta1.ProtectConfig.WorkloadVulnerabilityMode result =
com.google.container.v1beta1.ProtectConfig.WorkloadVulnerabilityMode.forNumber(
workloadVulnerabilityMode_);
return result == null
? com.google.container.v1beta1.ProtectConfig.WorkloadVulnerabilityMode.UNRECOGNIZED
: result;
}
/**
*
*
* <pre>
* Sets which mode to use for Protect workload vulnerability scanning feature.
* </pre>
*
* <code>
* optional .google.container.v1beta1.ProtectConfig.WorkloadVulnerabilityMode workload_vulnerability_mode = 2;
* </code>
*
* @param value The workloadVulnerabilityMode to set.
* @return This builder for chaining.
*/
public Builder setWorkloadVulnerabilityMode(
com.google.container.v1beta1.ProtectConfig.WorkloadVulnerabilityMode value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000002;
workloadVulnerabilityMode_ = value.getNumber();
onChanged();
return this;
}
/**
*
*
* <pre>
* Sets which mode to use for Protect workload vulnerability scanning feature.
* </pre>
*
* <code>
* optional .google.container.v1beta1.ProtectConfig.WorkloadVulnerabilityMode workload_vulnerability_mode = 2;
* </code>
*
* @return This builder for chaining.
*/
public Builder clearWorkloadVulnerabilityMode() {
bitField0_ = (bitField0_ & ~0x00000002);
workloadVulnerabilityMode_ = 0;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.container.v1beta1.ProtectConfig)
}
// @@protoc_insertion_point(class_scope:google.container.v1beta1.ProtectConfig)
private static final com.google.container.v1beta1.ProtectConfig DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.container.v1beta1.ProtectConfig();
}
public static com.google.container.v1beta1.ProtectConfig getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<ProtectConfig> PARSER =
new com.google.protobuf.AbstractParser<ProtectConfig>() {
@java.lang.Override
public ProtectConfig parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<ProtectConfig> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<ProtectConfig> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.container.v1beta1.ProtectConfig getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
apache/iotdb | 36,087 | integration-test/src/test/java/org/apache/iotdb/db/it/schema/IoTDBMetadataFetchIT.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.iotdb.db.it.schema;
import org.apache.iotdb.it.env.EnvFactory;
import org.apache.iotdb.itbase.category.ClusterIT;
import org.apache.iotdb.itbase.category.LocalStandaloneIT;
import org.apache.iotdb.util.AbstractSchemaIT;
import org.junit.After;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.junit.runners.Parameterized;
import java.sql.Connection;
import java.sql.ResultSet;
import java.sql.ResultSetMetaData;
import java.sql.SQLException;
import java.sql.Statement;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.fail;
/**
* Notice that, all test begins with "IoTDB" is integration test. All test which will start the
* IoTDB server should be defined as integration test.
*/
@Category({LocalStandaloneIT.class, ClusterIT.class})
public class IoTDBMetadataFetchIT extends AbstractSchemaIT {
public IoTDBMetadataFetchIT(SchemaTestMode schemaTestMode) {
super(schemaTestMode);
}
private static void insertSQL() {
try (Connection connection = EnvFactory.getEnv().getConnection();
Statement statement = connection.createStatement()) {
String[] insertSqls =
new String[] {
"CREATE DATABASE root.ln.wf01.wt01",
"CREATE DATABASE root.ln.wf01.wt02",
"CREATE DATABASE root.ln1.wf01.wt01",
"CREATE DATABASE root.ln2.wf01.wt01",
"CREATE TIMESERIES root.ln.wf01.wt01.status WITH DATATYPE = BOOLEAN, ENCODING = PLAIN",
"CREATE TIMESERIES root.ln.wf01.wt01.temperature WITH DATATYPE = FLOAT, ENCODING = RLE, "
+ "compressor = SNAPPY, 'MAX_POINT_NUMBER' = '3' ",
"CREATE ALIGNED TIMESERIES root.ln.wf01.wt02(s1 INT32, s2 DOUBLE)",
"CREATE TIMESERIES root.ln1.wf01.wt01.status WITH DATATYPE = BOOLEAN, ENCODING = PLAIN",
"CREATE TIMESERIES root.ln1.wf01.wt01.temperature WITH DATATYPE = FLOAT, ENCODING = RLE, "
+ "compressor = SNAPPY, 'MAX_POINT_NUMBER' = '3'",
"CREATE TIMESERIES root.ln2.wf01.wt01.status WITH DATATYPE = BOOLEAN, ENCODING = PLAIN",
"CREATE TIMESERIES root.ln2.wf01.wt01.temperature WITH DATATYPE = FLOAT, ENCODING = RLE, "
+ "compressor = SNAPPY, 'MAX_POINT_NUMBER' = '3'"
};
for (String sql : insertSqls) {
statement.execute(sql);
}
} catch (Exception e) {
e.printStackTrace();
fail(e.getMessage());
}
}
@Parameterized.BeforeParam
public static void before() throws Exception {
setUpEnvironment();
EnvFactory.getEnv().initClusterEnvironment();
}
@Parameterized.AfterParam
public static void after() throws Exception {
EnvFactory.getEnv().cleanClusterEnvironment();
tearDownEnvironment();
}
@Before
public void setUp() throws Exception {
insertSQL();
}
@After
public void tearDown() throws Exception {
clearSchema();
}
@Test
public void showTimeseriesTest() throws SQLException {
try (Connection connection = EnvFactory.getEnv().getConnection();
Statement statement = connection.createStatement()) {
String[] sqls =
new String[] {
"show timeseries root.ln.wf01.wt01.status", // full seriesPath
"show timeseries root.ln.**", // prefix seriesPath
"show timeseries root.ln.*.wt01.*", // seriesPath with stars
"show timeseries root.ln*.**", // the same as root
"show timeseries root.a.b", // nonexistent timeseries, thus returning ""
"show timeseries root.ln*.** where timeseries contains 'tat'",
"show timeseries root.ln.** where timeseries contains 'wf01.wt01'",
"show timeseries root.ln.** where dataType=BOOLEAN"
};
Set<String>[] standards =
new Set[] {
new HashSet<>(
Collections.singletonList(
"root.ln.wf01.wt01.status,null,root.ln.wf01.wt01,BOOLEAN,PLAIN,LZ4,null,null,null,null,BASE,")),
new HashSet<>(
Arrays.asList(
"root.ln.wf01.wt01.status,null,root.ln.wf01.wt01,BOOLEAN,PLAIN,LZ4,null,null,null,null,BASE,",
"root.ln.wf01.wt01.temperature,null,root.ln.wf01.wt01,FLOAT,RLE,SNAPPY,null,null,null,null,BASE,",
"root.ln.wf01.wt02.s1,null,root.ln.wf01.wt02,INT32,TS_2DIFF,LZ4,null,null,null,null,BASE,",
"root.ln.wf01.wt02.s2,null,root.ln.wf01.wt02,DOUBLE,GORILLA,LZ4,null,null,null,null,BASE,")),
new HashSet<>(
Arrays.asList(
"root.ln.wf01.wt01.status,null,root.ln.wf01.wt01,BOOLEAN,PLAIN,LZ4,null,null,null,null,BASE,",
"root.ln.wf01.wt01.temperature,null,root.ln.wf01.wt01,FLOAT,RLE,SNAPPY,null,null,null,null,BASE,")),
new HashSet<>(
Arrays.asList(
"root.ln.wf01.wt01.status,null,root.ln.wf01.wt01,BOOLEAN,PLAIN,LZ4,null,null,null,null,BASE,",
"root.ln.wf01.wt01.temperature,null,root.ln.wf01.wt01,FLOAT,RLE,SNAPPY,null,null,null,null,BASE,",
"root.ln.wf01.wt02.s1,null,root.ln.wf01.wt02,INT32,TS_2DIFF,LZ4,null,null,null,null,BASE,",
"root.ln.wf01.wt02.s2,null,root.ln.wf01.wt02,DOUBLE,GORILLA,LZ4,null,null,null,null,BASE,",
"root.ln1.wf01.wt01.status,null,root.ln1.wf01.wt01,BOOLEAN,PLAIN,LZ4,null,null,null,null,BASE,",
"root.ln1.wf01.wt01.temperature,null,root.ln1.wf01.wt01,FLOAT,RLE,SNAPPY,null,null,null,null,BASE,",
"root.ln2.wf01.wt01.status,null,root.ln2.wf01.wt01,BOOLEAN,PLAIN,LZ4,null,null,null,null,BASE,",
"root.ln2.wf01.wt01.temperature,null,root.ln2.wf01.wt01,FLOAT,RLE,SNAPPY,null,null,null,null,BASE,")),
new HashSet<>(),
new HashSet<>(
Arrays.asList(
"root.ln.wf01.wt01.status,null,root.ln.wf01.wt01,BOOLEAN,PLAIN,LZ4,null,null,null,null,BASE,",
"root.ln1.wf01.wt01.status,null,root.ln1.wf01.wt01,BOOLEAN,PLAIN,LZ4,null,null,null,null,BASE,",
"root.ln2.wf01.wt01.status,null,root.ln2.wf01.wt01,BOOLEAN,PLAIN,LZ4,null,null,null,null,BASE,")),
new HashSet<>(
Arrays.asList(
"root.ln.wf01.wt01.status,null,root.ln.wf01.wt01,BOOLEAN,PLAIN,LZ4,null,null,null,null,BASE,",
"root.ln.wf01.wt01.temperature,null,root.ln.wf01.wt01,FLOAT,RLE,SNAPPY,null,null,null,null,BASE,")),
new HashSet<>(
Collections.singletonList(
"root.ln.wf01.wt01.status,null,root.ln.wf01.wt01,BOOLEAN,PLAIN,LZ4,null,null,null,null,BASE,"))
};
for (int n = 0; n < sqls.length; n++) {
String sql = sqls[n];
Set<String> standard = standards[n];
try (ResultSet resultSet = statement.executeQuery(sql)) {
ResultSetMetaData resultSetMetaData = resultSet.getMetaData();
while (resultSet.next()) {
StringBuilder builder = new StringBuilder();
for (int i = 1; i <= resultSetMetaData.getColumnCount(); i++) {
builder.append(resultSet.getString(i)).append(",");
}
String string = builder.toString();
Assert.assertTrue(standard.contains(string));
standard.remove(string);
}
assertEquals(0, standard.size());
} catch (SQLException e) {
e.printStackTrace();
fail(e.getMessage());
}
}
}
}
@Test
public void showDatabasesTest() throws SQLException {
try (final Connection connection = EnvFactory.getEnv().getConnection();
final Statement statement = connection.createStatement()) {
final String[] sqls =
new String[] {
"show databases root.ln*.**",
"show databases root.ln.wf01.**",
"show databases root.ln.wf01.wt01.status"
};
final List<String>[] standards =
new List[] {
Arrays.asList(
"root.ln.wf01.wt01",
"root.ln.wf01.wt02",
"root.ln1.wf01.wt01",
"root.ln2.wf01.wt01"),
Arrays.asList("root.ln.wf01.wt01", "root.ln.wf01.wt02"),
Collections.emptyList()
};
for (int n = 0; n < sqls.length; n++) {
final String sql = sqls[n];
final List<String> standard = standards[n];
int i = 0;
try (final ResultSet resultSet = statement.executeQuery(sql)) {
while (resultSet.next()) {
assertEquals(standard.get(i++), resultSet.getString(1));
}
assertEquals(i, standard.size());
} catch (final SQLException e) {
e.printStackTrace();
fail(e.getMessage());
}
}
}
}
@Test
public void showDevicesWithSgTest() throws SQLException {
try (Connection connection = EnvFactory.getEnv().getConnection();
Statement statement = connection.createStatement()) {
statement.execute("set ttl to root.ln.wf01.wt02 8888");
String[] sqls =
new String[] {
"show devices root.ln.** with database", "show devices root.ln.wf01.wt01.temperature",
};
Set<String>[] standards =
new Set[] {
new HashSet<>(
Arrays.asList(
"root.ln.wf01.wt01,root.ln.wf01.wt01,false,null,INF,",
"root.ln.wf01.wt02,root.ln.wf01.wt02,true,null,8888,")),
new HashSet<>(),
};
for (int n = 0; n < sqls.length; n++) {
String sql = sqls[n];
Set<String> standard = standards[n];
try (ResultSet resultSet = statement.executeQuery(sql)) {
ResultSetMetaData resultSetMetaData = resultSet.getMetaData();
while (resultSet.next()) {
StringBuilder builder = new StringBuilder();
for (int i = 1; i <= resultSetMetaData.getColumnCount(); i++) {
builder.append(resultSet.getString(i)).append(",");
}
String string = builder.toString();
Assert.assertTrue(standard.contains(string));
standard.remove(string);
}
assertEquals(0, standard.size());
} catch (SQLException e) {
e.printStackTrace();
fail(e.getMessage());
}
}
}
}
@Test
public void showDevicesWithTemplate() throws SQLException {
try (Connection connection = EnvFactory.getEnv().getConnection();
Statement statement = connection.createStatement()) {
statement.execute("CREATE DATABASE root.sg1");
statement.execute("CREATE DATABASE root.sg2");
statement.execute("CREATE DEVICE TEMPLATE t1 (s1 INT64, s2 DOUBLE)");
statement.execute("CREATE DEVICE TEMPLATE t2 aligned (s1 INT64, s2 DOUBLE)");
statement.execute("SET DEVICE TEMPLATE t1 TO root.sg1.d1");
statement.execute("SET DEVICE TEMPLATE t2 TO root.sg1.d2");
statement.execute("SET DEVICE TEMPLATE t1 TO root.sg2.d1");
statement.execute("SET DEVICE TEMPLATE t2 TO root.sg2.d2");
statement.execute("CREATE TIMESERIES OF DEVICE TEMPLATE ON root.sg1.d2");
statement.execute("CREATE TIMESERIES OF DEVICE TEMPLATE ON root.sg2.d1");
statement.execute("CREATE TIMESERIES OF DEVICE TEMPLATE ON root.sg2.d2");
String[] sqls =
new String[] {
"show devices root.** where template is not null",
"show devices root.sg2.** with database where template = 't2'",
};
Set<String>[] standards =
new Set[] {
new HashSet<>(
Arrays.asList(
"root.sg1.d2,true,t2,INF,",
"root.sg2.d1,false,t1,INF,",
"root.sg2.d2,true,t2,INF,")),
new HashSet<>(Arrays.asList("root.sg2.d2,root.sg2,true,t2,INF,")),
};
for (int n = 0; n < sqls.length; n++) {
String sql = sqls[n];
Set<String> standard = standards[n];
try (ResultSet resultSet = statement.executeQuery(sql)) {
ResultSetMetaData resultSetMetaData = resultSet.getMetaData();
while (resultSet.next()) {
StringBuilder builder = new StringBuilder();
for (int i = 1; i <= resultSetMetaData.getColumnCount(); i++) {
builder.append(resultSet.getString(i)).append(",");
}
String string = builder.toString();
Assert.assertTrue(standard.contains(string));
standard.remove(string);
}
assertEquals(0, standard.size());
} catch (SQLException e) {
e.printStackTrace();
fail(e.getMessage());
}
}
}
}
@Test
public void showDevicesTest() throws SQLException {
try (Connection connection = EnvFactory.getEnv().getConnection();
Statement statement = connection.createStatement()) {
statement.execute("set ttl to root.ln.wf01.wt02 8888");
String[] sqls =
new String[] {
"show devices root.ln.**",
"show devices root.ln.wf01.wt01.temperature",
"show devices root.** where device contains 'wt02'",
};
Set<String>[] standards =
new Set[] {
new HashSet<>(
Arrays.asList(
"root.ln.wf01.wt01,false,null,INF,", "root.ln.wf01.wt02,true,null,8888,")),
new HashSet<>(),
new HashSet<>(Arrays.asList("root.ln.wf01.wt02,true,null,8888,")),
};
for (int n = 0; n < sqls.length; n++) {
String sql = sqls[n];
Set<String> standard = standards[n];
try (ResultSet resultSet = statement.executeQuery(sql)) {
ResultSetMetaData resultSetMetaData = resultSet.getMetaData();
while (resultSet.next()) {
StringBuilder builder = new StringBuilder();
for (int i = 1; i <= resultSetMetaData.getColumnCount(); i++) {
builder.append(resultSet.getString(i)).append(",");
}
String string = builder.toString();
Assert.assertTrue(standard.contains(string));
standard.remove(string);
}
assertEquals(0, standard.size());
} catch (SQLException e) {
e.printStackTrace();
fail(e.getMessage());
}
}
}
}
@Test
public void showDevicesWithWildcardTest() throws SQLException {
try (Connection connection = EnvFactory.getEnv().getConnection();
Statement statement = connection.createStatement()) {
statement.execute("set ttl to root.ln.wf01.wt02 8888");
String[] sqls =
new String[] {
"show devices root.l*.wf01.w*",
"show devices root.ln.*f01.*",
"show devices root.l*.*f*.*1",
};
Set<String>[] standards =
new Set[] {
new HashSet<>(
Arrays.asList(
"root.ln.wf01.wt01,false,null,INF,",
"root.ln.wf01.wt02,true,null,8888,",
"root.ln1.wf01.wt01,false,null,INF,",
"root.ln2.wf01.wt01,false,null,INF,")),
new HashSet<>(
Arrays.asList(
"root.ln.wf01.wt01,false,null,INF,", "root.ln.wf01.wt02,true,null,8888,")),
new HashSet<>(
Arrays.asList(
"root.ln.wf01.wt01,false,null,INF,",
"root.ln1.wf01.wt01,false,null,INF,",
"root.ln2.wf01.wt01,false,null,INF,"))
};
for (int n = 0; n < sqls.length; n++) {
String sql = sqls[n];
Set<String> standard = standards[n];
try (ResultSet resultSet = statement.executeQuery(sql)) {
ResultSetMetaData resultSetMetaData = resultSet.getMetaData();
while (resultSet.next()) {
StringBuilder builder = new StringBuilder();
for (int i = 1; i <= resultSetMetaData.getColumnCount(); i++) {
builder.append(resultSet.getString(i)).append(",");
}
String string = builder.toString();
Assert.assertTrue(standard.contains(string));
standard.remove(string);
}
assertEquals(0, standard.size());
} catch (SQLException e) {
e.printStackTrace();
fail(e.getMessage());
}
}
}
}
@Test
public void showChildPaths() throws SQLException {
try (Connection connection = EnvFactory.getEnv().getConnection();
Statement statement = connection.createStatement()) {
String[] sqls = new String[] {"show child paths root.ln"};
String[] standards = new String[] {"root.ln.wf01,SG INTERNAL,\n"};
for (int n = 0; n < sqls.length; n++) {
String sql = sqls[n];
String standard = standards[n];
StringBuilder builder = new StringBuilder();
try (ResultSet resultSet = statement.executeQuery(sql)) {
ResultSetMetaData resultSetMetaData = resultSet.getMetaData();
while (resultSet.next()) {
for (int i = 1; i <= resultSetMetaData.getColumnCount(); i++) {
builder.append(resultSet.getString(i)).append(",");
}
builder.append("\n");
}
Assert.assertEquals(standard, builder.toString());
} catch (SQLException e) {
e.printStackTrace();
fail(e.getMessage());
}
}
}
}
@Test
public void showChildNodes() throws SQLException {
try (Connection connection = EnvFactory.getEnv().getConnection();
Statement statement = connection.createStatement()) {
String[] sqls = new String[] {"show child nodes root.ln"};
String[] standards = new String[] {"wf01,\n"};
for (int n = 0; n < sqls.length; n++) {
String sql = sqls[n];
String standard = standards[n];
StringBuilder builder = new StringBuilder();
try (ResultSet resultSet = statement.executeQuery(sql)) {
ResultSetMetaData resultSetMetaData = resultSet.getMetaData();
while (resultSet.next()) {
for (int i = 1; i <= resultSetMetaData.getColumnCount(); i++) {
builder.append(resultSet.getString(i)).append(",");
}
builder.append("\n");
}
Assert.assertEquals(standard, builder.toString());
} catch (SQLException e) {
e.printStackTrace();
fail(e.getMessage());
}
}
}
}
@Test
public void showCountTimeSeries() throws SQLException {
try (Connection connection = EnvFactory.getEnv().getConnection();
Statement statement = connection.createStatement()) {
String[] sqls = new String[] {"COUNT TIMESERIES root.ln.**", "COUNT TIMESERIES root.ln*.**"};
String[] standards = new String[] {"4,\n", "8,\n"};
for (int n = 0; n < sqls.length; n++) {
String sql = sqls[n];
String standard = standards[n];
StringBuilder builder = new StringBuilder();
try (ResultSet resultSet = statement.executeQuery(sql)) {
ResultSetMetaData resultSetMetaData = resultSet.getMetaData();
while (resultSet.next()) {
for (int i = 1; i <= resultSetMetaData.getColumnCount(); i++) {
builder.append(resultSet.getString(i)).append(",");
}
builder.append("\n");
}
Assert.assertEquals(standard, builder.toString());
} catch (SQLException e) {
e.printStackTrace();
fail(e.getMessage());
}
}
}
}
@Test
public void showCountTimeSeriesWithTag() throws SQLException {
try (Connection connection = EnvFactory.getEnv().getConnection();
Statement statement = connection.createStatement()) {
statement.execute("ALTER timeseries root.ln1.wf01.wt01.status ADD TAGS tag1=v1, tag2=v2");
statement.execute("ALTER timeseries root.ln2.wf01.wt01.status ADD TAGS tag1=v1");
String[] sqls =
new String[] {
"COUNT TIMESERIES root.ln1.** where TAGS(tag1) = v1",
"COUNT TIMESERIES where TAGS(tag1) = v1",
"COUNT TIMESERIES where TAGS(tag3) = v3"
};
String[] standards = new String[] {"1,\n", "2,\n", "0,\n"};
for (int n = 0; n < sqls.length; n++) {
String sql = sqls[n];
String standard = standards[n];
StringBuilder builder = new StringBuilder();
try (ResultSet resultSet = statement.executeQuery(sql)) {
ResultSetMetaData resultSetMetaData = resultSet.getMetaData();
while (resultSet.next()) {
for (int i = 1; i <= resultSetMetaData.getColumnCount(); i++) {
builder.append(resultSet.getString(i)).append(",");
}
builder.append("\n");
}
Assert.assertEquals(standard, builder.toString());
} catch (SQLException e) {
e.printStackTrace();
fail(e.getMessage());
}
}
}
}
@Test
public void showCountTimeSeriesWithPathContains() throws SQLException {
try (Connection connection = EnvFactory.getEnv().getConnection();
Statement statement = connection.createStatement()) {
String[] sqls =
new String[] {
"COUNT TIMESERIES root.** where TIMESERIES contains 'wf01.wt01'",
"COUNT TIMESERIES root.ln.** where TIMESERIES contains 's'",
};
String[] standards = new String[] {"6,\n", "3,\n"};
for (int n = 0; n < sqls.length; n++) {
String sql = sqls[n];
String standard = standards[n];
StringBuilder builder = new StringBuilder();
try (ResultSet resultSet = statement.executeQuery(sql)) {
ResultSetMetaData resultSetMetaData = resultSet.getMetaData();
while (resultSet.next()) {
for (int i = 1; i <= resultSetMetaData.getColumnCount(); i++) {
builder.append(resultSet.getString(i)).append(",");
}
builder.append("\n");
}
Assert.assertEquals(standard, builder.toString());
} catch (SQLException e) {
e.printStackTrace();
fail(e.getMessage());
}
}
}
}
@Test
public void showCountDevices() throws SQLException {
try (Connection connection = EnvFactory.getEnv().getConnection();
Statement statement = connection.createStatement()) {
String[] sqls =
new String[] {
"COUNT DEVICES root.ln.**",
"COUNT DEVICES root.ln*.**",
"COUNT DEVICES root.ln.wf01.wt01.temperature"
};
String[] standards = new String[] {"2,\n", "4,\n", "0,\n"};
for (int n = 0; n < sqls.length; n++) {
String sql = sqls[n];
String standard = standards[n];
StringBuilder builder = new StringBuilder();
try (ResultSet resultSet = statement.executeQuery(sql)) {
ResultSetMetaData resultSetMetaData = resultSet.getMetaData();
while (resultSet.next()) {
for (int i = 1; i <= resultSetMetaData.getColumnCount(); i++) {
builder.append(resultSet.getString(i)).append(",");
}
builder.append("\n");
}
Assert.assertEquals(standard, builder.toString());
} catch (SQLException e) {
e.printStackTrace();
fail(e.getMessage());
}
}
}
}
@Test
public void showCountStorageGroup() throws SQLException {
try (Connection connection = EnvFactory.getEnv().getConnection();
Statement statement = connection.createStatement()) {
String[] sqls =
new String[] {
"count databases root.ln.**",
"count databases root.ln*.**",
"count databases root.ln.wf01.wt01.status"
};
String[] standards = new String[] {"2,\n", "4,\n", "0,\n"};
for (int n = 0; n < sqls.length; n++) {
String sql = sqls[n];
String standard = standards[n];
StringBuilder builder = new StringBuilder();
try (ResultSet resultSet = statement.executeQuery(sql)) {
ResultSetMetaData resultSetMetaData = resultSet.getMetaData();
while (resultSet.next()) {
for (int i = 1; i <= resultSetMetaData.getColumnCount(); i++) {
builder.append(resultSet.getString(i)).append(",");
}
builder.append("\n");
}
Assert.assertEquals(standard, builder.toString());
} catch (SQLException e) {
e.printStackTrace();
fail(e.getMessage());
}
}
}
}
@Test
public void showCountTimeSeriesGroupBy() throws SQLException {
try (Connection connection = EnvFactory.getEnv().getConnection();
Statement statement = connection.createStatement()) {
String[] sqls =
new String[] {
"COUNT TIMESERIES root.ln*.** group by level=1",
"COUNT TIMESERIES root.ln*.** group by level=3",
"COUNT TIMESERIES root.ln*.**.status group by level=2",
"COUNT TIMESERIES root.ln*.** group by level=5"
};
Set<String>[] standards =
new Set[] {
new HashSet<>(Arrays.asList("root.ln,4,", "root.ln1,2,", "root.ln2,2,")),
new HashSet<>(
Arrays.asList(
"root.ln.wf01.wt01,2,",
"root.ln.wf01.wt02,2,",
"root.ln1.wf01.wt01,2,",
"root.ln2.wf01.wt01,2,")),
new HashSet<>(Arrays.asList("root.ln.wf01,1,", "root.ln1.wf01,1,", "root.ln2.wf01,1,")),
Collections.emptySet()
};
for (int n = 0; n < sqls.length; n++) {
String sql = sqls[n];
Set<String> standard = standards[n];
try (ResultSet resultSet = statement.executeQuery(sql)) {
while (resultSet.next()) {
String string = resultSet.getString(1) + "," + resultSet.getLong(2) + ",";
Assert.assertTrue(standard.contains(string));
standard.remove(string);
}
assertEquals(0, standard.size());
} catch (SQLException e) {
e.printStackTrace();
fail(e.getMessage());
}
}
}
}
@Test
public void showCountTimeSeriesGroupByWithTag() throws SQLException {
try (Connection connection = EnvFactory.getEnv().getConnection();
Statement statement = connection.createStatement()) {
statement.execute("ALTER timeseries root.ln1.wf01.wt01.status ADD TAGS tag1=v1, tag2=v2");
statement.execute("ALTER timeseries root.ln2.wf01.wt01.status ADD TAGS tag1=v1");
String[] sqls =
new String[] {
"COUNT TIMESERIES root.** where TAGS(tag1) = v1 group by level=1",
"COUNT TIMESERIES root.** where TAGS(tag2) = v2 group by level=3",
"COUNT TIMESERIES root.**.status where TAGS(tag1) = v1 group by level=2",
"COUNT TIMESERIES root.** where TAGS(tag3) = v3 group by level=2"
};
Set<String>[] standards =
new Set[] {
new HashSet<>(Arrays.asList("root.ln1,1,", "root.ln2,1,")),
new HashSet<>(Collections.singletonList("root.ln1.wf01.wt01,1,")),
new HashSet<>(Arrays.asList("root.ln1.wf01,1,", "root.ln2.wf01,1,")),
Collections.emptySet(),
};
for (int n = 0; n < sqls.length; n++) {
String sql = sqls[n];
Set<String> standard = standards[n];
try (ResultSet resultSet = statement.executeQuery(sql)) {
while (resultSet.next()) {
String string = resultSet.getString(1) + "," + resultSet.getInt(2) + ",";
Assert.assertTrue(standard.contains(string));
standard.remove(string);
}
assertEquals(0, standard.size());
} catch (SQLException e) {
e.printStackTrace();
fail(e.getMessage());
}
}
}
}
@Test
public void showCountTimeSeriesGroupByWithPathContains() throws SQLException {
try (Connection connection = EnvFactory.getEnv().getConnection();
Statement statement = connection.createStatement()) {
String[] sqls =
new String[] {
"COUNT TIMESERIES root.** where TIMESERIES contains 'wf01.wt01' group by level=1",
"COUNT TIMESERIES root.ln.** where TIMESERIES contains 's' group by level=3"
};
Set<String>[] standards =
new Set[] {
new HashSet<>(Arrays.asList("root.ln,2,", "root.ln1,2,", "root.ln2,2,")),
new HashSet<>(Arrays.asList("root.ln.wf01.wt01,1,", "root.ln.wf01.wt02,2,"))
};
for (int n = 0; n < sqls.length; n++) {
String sql = sqls[n];
Set<String> standard = standards[n];
try (ResultSet resultSet = statement.executeQuery(sql)) {
while (resultSet.next()) {
String string = resultSet.getString(1) + "," + resultSet.getInt(2) + ",";
Assert.assertTrue(standard.contains(string));
standard.remove(string);
}
assertEquals(0, standard.size());
} catch (SQLException e) {
e.printStackTrace();
fail(e.getMessage());
}
}
}
}
@Test
public void showCountNodes() throws SQLException {
try (Connection connection = EnvFactory.getEnv().getConnection();
Statement statement = connection.createStatement()) {
String[] sqls =
new String[] {
"COUNT NODES root.ln*.** level=1",
"COUNT NODES root.ln level=1",
"COUNT NODES root.ln.wf01.** level=1",
"COUNT NODES root.ln.wf01.* level=2",
"COUNT NODES root.ln.wf01.* level=3",
"COUNT NODES root.ln.wf01.* level=4"
};
String[] standards = new String[] {"3,\n", "1,\n", "1,\n", "1,\n", "2,\n", "0,\n"};
for (int n = 0; n < sqls.length; n++) {
String sql = sqls[n];
String standard = standards[n];
StringBuilder builder = new StringBuilder();
try (ResultSet resultSet = statement.executeQuery(sql)) {
ResultSetMetaData resultSetMetaData = resultSet.getMetaData();
while (resultSet.next()) {
for (int i = 1; i <= resultSetMetaData.getColumnCount(); i++) {
builder.append(resultSet.getString(i)).append(",");
}
builder.append("\n");
}
Assert.assertEquals(standard, builder.toString());
} catch (SQLException e) {
e.printStackTrace();
fail(e.getMessage());
}
}
}
}
@Test
public void showAlignedTimeseriesWithAliasAndTags() throws SQLException {
try (Connection connection = EnvFactory.getEnv().getConnection();
Statement statement = connection.createStatement()) {
statement.execute(
"create aligned timeseries root.sg.d(s1(alias1) int32 tags('tag1'='v1', 'tag2'='v2'), s2 double attributes('attr3'='v3'))");
String[] expected =
new String[] {
"root.sg.d.s1,alias1,root.sg,INT32,TS_2DIFF,LZ4,{\"tag1\":\"v1\",\"tag2\":\"v2\"},null,null,null,BASE,",
"root.sg.d.s2,null,root.sg,DOUBLE,GORILLA,LZ4,null,{\"attr3\":\"v3\"},null,null,BASE,"
};
int num = 0;
try (ResultSet resultSet = statement.executeQuery("show timeseries root.sg.d.*")) {
ResultSetMetaData resultSetMetaData = resultSet.getMetaData();
while (resultSet.next()) {
StringBuilder builder = new StringBuilder();
for (int i = 1; i <= resultSetMetaData.getColumnCount(); i++) {
builder.append(resultSet.getString(i)).append(",");
}
Assert.assertEquals(expected[num++], builder.toString());
}
}
Assert.assertEquals(2, num);
}
}
@Test
public void showLatestTimeseriesTest() throws SQLException {
try (Connection connection = EnvFactory.getEnv().getConnection();
Statement statement = connection.createStatement()) {
statement.execute("insert into root.ln.wf01.wt01(time, status) values(1, 1)");
statement.execute("insert into root.ln.wf01.wt01(time, temperature) values(2, 1)");
String sql = "show latest timeseries root.ln.wf01.wt01.*";
Set<String> standard =
new HashSet<>(
Arrays.asList(
"root.ln.wf01.wt01.temperature,null,root.ln.wf01.wt01,FLOAT,RLE,SNAPPY,null,null,null,null,BASE,",
"root.ln.wf01.wt01.status,null,root.ln.wf01.wt01,BOOLEAN,PLAIN,LZ4,null,null,null,null,BASE,"));
try (ResultSet resultSet = statement.executeQuery(sql)) {
ResultSetMetaData resultSetMetaData = resultSet.getMetaData();
while (resultSet.next()) {
StringBuilder builder = new StringBuilder();
for (int i = 1; i <= resultSetMetaData.getColumnCount(); i++) {
builder.append(resultSet.getString(i)).append(",");
}
String string = builder.toString();
Assert.assertTrue(standard.contains(string));
standard.remove(string);
}
assertEquals(0, standard.size());
} catch (SQLException e) {
e.printStackTrace();
fail(e.getMessage());
}
}
}
@Test
public void showDeadbandInfo() throws SQLException {
try (Connection connection = EnvFactory.getEnv().getConnection();
Statement statement = connection.createStatement()) {
String[] sqls =
new String[] {
"CREATE TIMESERIES root.sg1.d0.s0 WITH DATATYPE=INT32",
"CREATE TIMESERIES root.sg1.d0.s1 WITH DATATYPE=INT32,ENCODING=PLAIN,LOSS=SDT,COMPDEV=2",
"CREATE TIMESERIES root.sg1.d0.s2 WITH DATATYPE=INT32,ENCODING=PLAIN,LOSS=SDT, COMPDEV=0.01, COMPMINTIME=2, COMPMAXTIME=15"
};
for (String sql : sqls) {
statement.execute(sql);
}
Set<String> standard =
new HashSet<>(
Arrays.asList(
"root.sg1.d0.s0,null,root.sg1,INT32,TS_2DIFF,LZ4,null,null,null,null,BASE,\n",
"root.sg1.d0.s1,null,root.sg1,INT32,PLAIN,LZ4,null,null,SDT,{compdev=2},BASE,\n",
"root.sg1.d0.s2,null,root.sg1,INT32,PLAIN,LZ4,null,null,SDT,{compdev=0.01, compmintime=2, compmaxtime=15},BASE,\n"));
try (ResultSet resultSet = statement.executeQuery("SHOW TIMESERIES root.sg1.d0.*")) {
ResultSetMetaData resultSetMetaData = resultSet.getMetaData();
while (resultSet.next()) {
StringBuilder builder = new StringBuilder();
for (int i = 1; i <= resultSetMetaData.getColumnCount(); i++) {
builder.append(resultSet.getString(i)).append(",");
}
builder.append("\n");
Assert.assertTrue(standard.contains(builder.toString()));
}
} catch (SQLException e) {
fail(e.getMessage());
} finally {
statement.execute("delete timeseries root.sg1.d0.*");
}
}
}
}
|
googleapis/google-cloud-java | 36,225 | java-container/proto-google-cloud-container-v1/src/main/java/com/google/container/v1/CheckAutopilotCompatibilityResponse.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/container/v1/cluster_service.proto
// Protobuf Java Version: 3.25.8
package com.google.container.v1;
/**
*
*
* <pre>
* CheckAutopilotCompatibilityResponse has a list of compatibility issues.
* </pre>
*
* Protobuf type {@code google.container.v1.CheckAutopilotCompatibilityResponse}
*/
public final class CheckAutopilotCompatibilityResponse
extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.container.v1.CheckAutopilotCompatibilityResponse)
CheckAutopilotCompatibilityResponseOrBuilder {
private static final long serialVersionUID = 0L;
// Use CheckAutopilotCompatibilityResponse.newBuilder() to construct.
private CheckAutopilotCompatibilityResponse(
com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private CheckAutopilotCompatibilityResponse() {
issues_ = java.util.Collections.emptyList();
summary_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new CheckAutopilotCompatibilityResponse();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.container.v1.ClusterServiceProto
.internal_static_google_container_v1_CheckAutopilotCompatibilityResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.container.v1.ClusterServiceProto
.internal_static_google_container_v1_CheckAutopilotCompatibilityResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.container.v1.CheckAutopilotCompatibilityResponse.class,
com.google.container.v1.CheckAutopilotCompatibilityResponse.Builder.class);
}
public static final int ISSUES_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private java.util.List<com.google.container.v1.AutopilotCompatibilityIssue> issues_;
/**
*
*
* <pre>
* The list of issues for the given operation.
* </pre>
*
* <code>repeated .google.container.v1.AutopilotCompatibilityIssue issues = 1;</code>
*/
@java.lang.Override
public java.util.List<com.google.container.v1.AutopilotCompatibilityIssue> getIssuesList() {
return issues_;
}
/**
*
*
* <pre>
* The list of issues for the given operation.
* </pre>
*
* <code>repeated .google.container.v1.AutopilotCompatibilityIssue issues = 1;</code>
*/
@java.lang.Override
public java.util.List<? extends com.google.container.v1.AutopilotCompatibilityIssueOrBuilder>
getIssuesOrBuilderList() {
return issues_;
}
/**
*
*
* <pre>
* The list of issues for the given operation.
* </pre>
*
* <code>repeated .google.container.v1.AutopilotCompatibilityIssue issues = 1;</code>
*/
@java.lang.Override
public int getIssuesCount() {
return issues_.size();
}
/**
*
*
* <pre>
* The list of issues for the given operation.
* </pre>
*
* <code>repeated .google.container.v1.AutopilotCompatibilityIssue issues = 1;</code>
*/
@java.lang.Override
public com.google.container.v1.AutopilotCompatibilityIssue getIssues(int index) {
return issues_.get(index);
}
/**
*
*
* <pre>
* The list of issues for the given operation.
* </pre>
*
* <code>repeated .google.container.v1.AutopilotCompatibilityIssue issues = 1;</code>
*/
@java.lang.Override
public com.google.container.v1.AutopilotCompatibilityIssueOrBuilder getIssuesOrBuilder(
int index) {
return issues_.get(index);
}
public static final int SUMMARY_FIELD_NUMBER = 2;
@SuppressWarnings("serial")
private volatile java.lang.Object summary_ = "";
/**
*
*
* <pre>
* The summary of the autopilot compatibility response.
* </pre>
*
* <code>string summary = 2;</code>
*
* @return The summary.
*/
@java.lang.Override
public java.lang.String getSummary() {
java.lang.Object ref = summary_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
summary_ = s;
return s;
}
}
/**
*
*
* <pre>
* The summary of the autopilot compatibility response.
* </pre>
*
* <code>string summary = 2;</code>
*
* @return The bytes for summary.
*/
@java.lang.Override
public com.google.protobuf.ByteString getSummaryBytes() {
java.lang.Object ref = summary_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
summary_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
for (int i = 0; i < issues_.size(); i++) {
output.writeMessage(1, issues_.get(i));
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(summary_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 2, summary_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
for (int i = 0; i < issues_.size(); i++) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, issues_.get(i));
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(summary_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, summary_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.container.v1.CheckAutopilotCompatibilityResponse)) {
return super.equals(obj);
}
com.google.container.v1.CheckAutopilotCompatibilityResponse other =
(com.google.container.v1.CheckAutopilotCompatibilityResponse) obj;
if (!getIssuesList().equals(other.getIssuesList())) return false;
if (!getSummary().equals(other.getSummary())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (getIssuesCount() > 0) {
hash = (37 * hash) + ISSUES_FIELD_NUMBER;
hash = (53 * hash) + getIssuesList().hashCode();
}
hash = (37 * hash) + SUMMARY_FIELD_NUMBER;
hash = (53 * hash) + getSummary().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.container.v1.CheckAutopilotCompatibilityResponse parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.container.v1.CheckAutopilotCompatibilityResponse parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.container.v1.CheckAutopilotCompatibilityResponse parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.container.v1.CheckAutopilotCompatibilityResponse parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.container.v1.CheckAutopilotCompatibilityResponse parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.container.v1.CheckAutopilotCompatibilityResponse parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.container.v1.CheckAutopilotCompatibilityResponse parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.container.v1.CheckAutopilotCompatibilityResponse parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.container.v1.CheckAutopilotCompatibilityResponse parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.container.v1.CheckAutopilotCompatibilityResponse parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.container.v1.CheckAutopilotCompatibilityResponse parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.container.v1.CheckAutopilotCompatibilityResponse parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.container.v1.CheckAutopilotCompatibilityResponse prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* CheckAutopilotCompatibilityResponse has a list of compatibility issues.
* </pre>
*
* Protobuf type {@code google.container.v1.CheckAutopilotCompatibilityResponse}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.container.v1.CheckAutopilotCompatibilityResponse)
com.google.container.v1.CheckAutopilotCompatibilityResponseOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.container.v1.ClusterServiceProto
.internal_static_google_container_v1_CheckAutopilotCompatibilityResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.container.v1.ClusterServiceProto
.internal_static_google_container_v1_CheckAutopilotCompatibilityResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.container.v1.CheckAutopilotCompatibilityResponse.class,
com.google.container.v1.CheckAutopilotCompatibilityResponse.Builder.class);
}
// Construct using com.google.container.v1.CheckAutopilotCompatibilityResponse.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
if (issuesBuilder_ == null) {
issues_ = java.util.Collections.emptyList();
} else {
issues_ = null;
issuesBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000001);
summary_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.container.v1.ClusterServiceProto
.internal_static_google_container_v1_CheckAutopilotCompatibilityResponse_descriptor;
}
@java.lang.Override
public com.google.container.v1.CheckAutopilotCompatibilityResponse getDefaultInstanceForType() {
return com.google.container.v1.CheckAutopilotCompatibilityResponse.getDefaultInstance();
}
@java.lang.Override
public com.google.container.v1.CheckAutopilotCompatibilityResponse build() {
com.google.container.v1.CheckAutopilotCompatibilityResponse result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.container.v1.CheckAutopilotCompatibilityResponse buildPartial() {
com.google.container.v1.CheckAutopilotCompatibilityResponse result =
new com.google.container.v1.CheckAutopilotCompatibilityResponse(this);
buildPartialRepeatedFields(result);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartialRepeatedFields(
com.google.container.v1.CheckAutopilotCompatibilityResponse result) {
if (issuesBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0)) {
issues_ = java.util.Collections.unmodifiableList(issues_);
bitField0_ = (bitField0_ & ~0x00000001);
}
result.issues_ = issues_;
} else {
result.issues_ = issuesBuilder_.build();
}
}
private void buildPartial0(com.google.container.v1.CheckAutopilotCompatibilityResponse result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000002) != 0)) {
result.summary_ = summary_;
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.container.v1.CheckAutopilotCompatibilityResponse) {
return mergeFrom((com.google.container.v1.CheckAutopilotCompatibilityResponse) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.container.v1.CheckAutopilotCompatibilityResponse other) {
if (other == com.google.container.v1.CheckAutopilotCompatibilityResponse.getDefaultInstance())
return this;
if (issuesBuilder_ == null) {
if (!other.issues_.isEmpty()) {
if (issues_.isEmpty()) {
issues_ = other.issues_;
bitField0_ = (bitField0_ & ~0x00000001);
} else {
ensureIssuesIsMutable();
issues_.addAll(other.issues_);
}
onChanged();
}
} else {
if (!other.issues_.isEmpty()) {
if (issuesBuilder_.isEmpty()) {
issuesBuilder_.dispose();
issuesBuilder_ = null;
issues_ = other.issues_;
bitField0_ = (bitField0_ & ~0x00000001);
issuesBuilder_ =
com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders
? getIssuesFieldBuilder()
: null;
} else {
issuesBuilder_.addAllMessages(other.issues_);
}
}
}
if (!other.getSummary().isEmpty()) {
summary_ = other.summary_;
bitField0_ |= 0x00000002;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
com.google.container.v1.AutopilotCompatibilityIssue m =
input.readMessage(
com.google.container.v1.AutopilotCompatibilityIssue.parser(),
extensionRegistry);
if (issuesBuilder_ == null) {
ensureIssuesIsMutable();
issues_.add(m);
} else {
issuesBuilder_.addMessage(m);
}
break;
} // case 10
case 18:
{
summary_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000002;
break;
} // case 18
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.util.List<com.google.container.v1.AutopilotCompatibilityIssue> issues_ =
java.util.Collections.emptyList();
private void ensureIssuesIsMutable() {
if (!((bitField0_ & 0x00000001) != 0)) {
issues_ =
new java.util.ArrayList<com.google.container.v1.AutopilotCompatibilityIssue>(issues_);
bitField0_ |= 0x00000001;
}
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.container.v1.AutopilotCompatibilityIssue,
com.google.container.v1.AutopilotCompatibilityIssue.Builder,
com.google.container.v1.AutopilotCompatibilityIssueOrBuilder>
issuesBuilder_;
/**
*
*
* <pre>
* The list of issues for the given operation.
* </pre>
*
* <code>repeated .google.container.v1.AutopilotCompatibilityIssue issues = 1;</code>
*/
public java.util.List<com.google.container.v1.AutopilotCompatibilityIssue> getIssuesList() {
if (issuesBuilder_ == null) {
return java.util.Collections.unmodifiableList(issues_);
} else {
return issuesBuilder_.getMessageList();
}
}
/**
*
*
* <pre>
* The list of issues for the given operation.
* </pre>
*
* <code>repeated .google.container.v1.AutopilotCompatibilityIssue issues = 1;</code>
*/
public int getIssuesCount() {
if (issuesBuilder_ == null) {
return issues_.size();
} else {
return issuesBuilder_.getCount();
}
}
/**
*
*
* <pre>
* The list of issues for the given operation.
* </pre>
*
* <code>repeated .google.container.v1.AutopilotCompatibilityIssue issues = 1;</code>
*/
public com.google.container.v1.AutopilotCompatibilityIssue getIssues(int index) {
if (issuesBuilder_ == null) {
return issues_.get(index);
} else {
return issuesBuilder_.getMessage(index);
}
}
/**
*
*
* <pre>
* The list of issues for the given operation.
* </pre>
*
* <code>repeated .google.container.v1.AutopilotCompatibilityIssue issues = 1;</code>
*/
public Builder setIssues(int index, com.google.container.v1.AutopilotCompatibilityIssue value) {
if (issuesBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureIssuesIsMutable();
issues_.set(index, value);
onChanged();
} else {
issuesBuilder_.setMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* The list of issues for the given operation.
* </pre>
*
* <code>repeated .google.container.v1.AutopilotCompatibilityIssue issues = 1;</code>
*/
public Builder setIssues(
int index, com.google.container.v1.AutopilotCompatibilityIssue.Builder builderForValue) {
if (issuesBuilder_ == null) {
ensureIssuesIsMutable();
issues_.set(index, builderForValue.build());
onChanged();
} else {
issuesBuilder_.setMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* The list of issues for the given operation.
* </pre>
*
* <code>repeated .google.container.v1.AutopilotCompatibilityIssue issues = 1;</code>
*/
public Builder addIssues(com.google.container.v1.AutopilotCompatibilityIssue value) {
if (issuesBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureIssuesIsMutable();
issues_.add(value);
onChanged();
} else {
issuesBuilder_.addMessage(value);
}
return this;
}
/**
*
*
* <pre>
* The list of issues for the given operation.
* </pre>
*
* <code>repeated .google.container.v1.AutopilotCompatibilityIssue issues = 1;</code>
*/
public Builder addIssues(int index, com.google.container.v1.AutopilotCompatibilityIssue value) {
if (issuesBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureIssuesIsMutable();
issues_.add(index, value);
onChanged();
} else {
issuesBuilder_.addMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* The list of issues for the given operation.
* </pre>
*
* <code>repeated .google.container.v1.AutopilotCompatibilityIssue issues = 1;</code>
*/
public Builder addIssues(
com.google.container.v1.AutopilotCompatibilityIssue.Builder builderForValue) {
if (issuesBuilder_ == null) {
ensureIssuesIsMutable();
issues_.add(builderForValue.build());
onChanged();
} else {
issuesBuilder_.addMessage(builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* The list of issues for the given operation.
* </pre>
*
* <code>repeated .google.container.v1.AutopilotCompatibilityIssue issues = 1;</code>
*/
public Builder addIssues(
int index, com.google.container.v1.AutopilotCompatibilityIssue.Builder builderForValue) {
if (issuesBuilder_ == null) {
ensureIssuesIsMutable();
issues_.add(index, builderForValue.build());
onChanged();
} else {
issuesBuilder_.addMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* The list of issues for the given operation.
* </pre>
*
* <code>repeated .google.container.v1.AutopilotCompatibilityIssue issues = 1;</code>
*/
public Builder addAllIssues(
java.lang.Iterable<? extends com.google.container.v1.AutopilotCompatibilityIssue> values) {
if (issuesBuilder_ == null) {
ensureIssuesIsMutable();
com.google.protobuf.AbstractMessageLite.Builder.addAll(values, issues_);
onChanged();
} else {
issuesBuilder_.addAllMessages(values);
}
return this;
}
/**
*
*
* <pre>
* The list of issues for the given operation.
* </pre>
*
* <code>repeated .google.container.v1.AutopilotCompatibilityIssue issues = 1;</code>
*/
public Builder clearIssues() {
if (issuesBuilder_ == null) {
issues_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
} else {
issuesBuilder_.clear();
}
return this;
}
/**
*
*
* <pre>
* The list of issues for the given operation.
* </pre>
*
* <code>repeated .google.container.v1.AutopilotCompatibilityIssue issues = 1;</code>
*/
public Builder removeIssues(int index) {
if (issuesBuilder_ == null) {
ensureIssuesIsMutable();
issues_.remove(index);
onChanged();
} else {
issuesBuilder_.remove(index);
}
return this;
}
/**
*
*
* <pre>
* The list of issues for the given operation.
* </pre>
*
* <code>repeated .google.container.v1.AutopilotCompatibilityIssue issues = 1;</code>
*/
public com.google.container.v1.AutopilotCompatibilityIssue.Builder getIssuesBuilder(int index) {
return getIssuesFieldBuilder().getBuilder(index);
}
/**
*
*
* <pre>
* The list of issues for the given operation.
* </pre>
*
* <code>repeated .google.container.v1.AutopilotCompatibilityIssue issues = 1;</code>
*/
public com.google.container.v1.AutopilotCompatibilityIssueOrBuilder getIssuesOrBuilder(
int index) {
if (issuesBuilder_ == null) {
return issues_.get(index);
} else {
return issuesBuilder_.getMessageOrBuilder(index);
}
}
/**
*
*
* <pre>
* The list of issues for the given operation.
* </pre>
*
* <code>repeated .google.container.v1.AutopilotCompatibilityIssue issues = 1;</code>
*/
public java.util.List<? extends com.google.container.v1.AutopilotCompatibilityIssueOrBuilder>
getIssuesOrBuilderList() {
if (issuesBuilder_ != null) {
return issuesBuilder_.getMessageOrBuilderList();
} else {
return java.util.Collections.unmodifiableList(issues_);
}
}
/**
*
*
* <pre>
* The list of issues for the given operation.
* </pre>
*
* <code>repeated .google.container.v1.AutopilotCompatibilityIssue issues = 1;</code>
*/
public com.google.container.v1.AutopilotCompatibilityIssue.Builder addIssuesBuilder() {
return getIssuesFieldBuilder()
.addBuilder(com.google.container.v1.AutopilotCompatibilityIssue.getDefaultInstance());
}
/**
*
*
* <pre>
* The list of issues for the given operation.
* </pre>
*
* <code>repeated .google.container.v1.AutopilotCompatibilityIssue issues = 1;</code>
*/
public com.google.container.v1.AutopilotCompatibilityIssue.Builder addIssuesBuilder(int index) {
return getIssuesFieldBuilder()
.addBuilder(
index, com.google.container.v1.AutopilotCompatibilityIssue.getDefaultInstance());
}
/**
*
*
* <pre>
* The list of issues for the given operation.
* </pre>
*
* <code>repeated .google.container.v1.AutopilotCompatibilityIssue issues = 1;</code>
*/
public java.util.List<com.google.container.v1.AutopilotCompatibilityIssue.Builder>
getIssuesBuilderList() {
return getIssuesFieldBuilder().getBuilderList();
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.container.v1.AutopilotCompatibilityIssue,
com.google.container.v1.AutopilotCompatibilityIssue.Builder,
com.google.container.v1.AutopilotCompatibilityIssueOrBuilder>
getIssuesFieldBuilder() {
if (issuesBuilder_ == null) {
issuesBuilder_ =
new com.google.protobuf.RepeatedFieldBuilderV3<
com.google.container.v1.AutopilotCompatibilityIssue,
com.google.container.v1.AutopilotCompatibilityIssue.Builder,
com.google.container.v1.AutopilotCompatibilityIssueOrBuilder>(
issues_, ((bitField0_ & 0x00000001) != 0), getParentForChildren(), isClean());
issues_ = null;
}
return issuesBuilder_;
}
private java.lang.Object summary_ = "";
/**
*
*
* <pre>
* The summary of the autopilot compatibility response.
* </pre>
*
* <code>string summary = 2;</code>
*
* @return The summary.
*/
public java.lang.String getSummary() {
java.lang.Object ref = summary_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
summary_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* The summary of the autopilot compatibility response.
* </pre>
*
* <code>string summary = 2;</code>
*
* @return The bytes for summary.
*/
public com.google.protobuf.ByteString getSummaryBytes() {
java.lang.Object ref = summary_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
summary_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* The summary of the autopilot compatibility response.
* </pre>
*
* <code>string summary = 2;</code>
*
* @param value The summary to set.
* @return This builder for chaining.
*/
public Builder setSummary(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
summary_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* The summary of the autopilot compatibility response.
* </pre>
*
* <code>string summary = 2;</code>
*
* @return This builder for chaining.
*/
public Builder clearSummary() {
summary_ = getDefaultInstance().getSummary();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
return this;
}
/**
*
*
* <pre>
* The summary of the autopilot compatibility response.
* </pre>
*
* <code>string summary = 2;</code>
*
* @param value The bytes for summary to set.
* @return This builder for chaining.
*/
public Builder setSummaryBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
summary_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.container.v1.CheckAutopilotCompatibilityResponse)
}
// @@protoc_insertion_point(class_scope:google.container.v1.CheckAutopilotCompatibilityResponse)
private static final com.google.container.v1.CheckAutopilotCompatibilityResponse DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.container.v1.CheckAutopilotCompatibilityResponse();
}
public static com.google.container.v1.CheckAutopilotCompatibilityResponse getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<CheckAutopilotCompatibilityResponse> PARSER =
new com.google.protobuf.AbstractParser<CheckAutopilotCompatibilityResponse>() {
@java.lang.Override
public CheckAutopilotCompatibilityResponse parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<CheckAutopilotCompatibilityResponse> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<CheckAutopilotCompatibilityResponse> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.container.v1.CheckAutopilotCompatibilityResponse getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleapis/google-cloud-java | 36,308 | java-cloudsupport/proto-google-cloud-cloudsupport-v2/src/main/java/com/google/cloud/support/v2/UpdateCaseRequest.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/support/v2/case_service.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.support.v2;
/**
*
*
* <pre>
* The request message for the UpdateCase endpoint
* </pre>
*
* Protobuf type {@code google.cloud.support.v2.UpdateCaseRequest}
*/
public final class UpdateCaseRequest extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.support.v2.UpdateCaseRequest)
UpdateCaseRequestOrBuilder {
private static final long serialVersionUID = 0L;
// Use UpdateCaseRequest.newBuilder() to construct.
private UpdateCaseRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private UpdateCaseRequest() {}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new UpdateCaseRequest();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.support.v2.CaseServiceProto
.internal_static_google_cloud_support_v2_UpdateCaseRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.support.v2.CaseServiceProto
.internal_static_google_cloud_support_v2_UpdateCaseRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.support.v2.UpdateCaseRequest.class,
com.google.cloud.support.v2.UpdateCaseRequest.Builder.class);
}
private int bitField0_;
public static final int CASE_FIELD_NUMBER = 1;
private com.google.cloud.support.v2.Case case_;
/**
*
*
* <pre>
* Required. The case to update.
* </pre>
*
* <code>.google.cloud.support.v2.Case case = 1 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return Whether the case field is set.
*/
@java.lang.Override
public boolean hasCase() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* Required. The case to update.
* </pre>
*
* <code>.google.cloud.support.v2.Case case = 1 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The case.
*/
@java.lang.Override
public com.google.cloud.support.v2.Case getCase() {
return case_ == null ? com.google.cloud.support.v2.Case.getDefaultInstance() : case_;
}
/**
*
*
* <pre>
* Required. The case to update.
* </pre>
*
* <code>.google.cloud.support.v2.Case case = 1 [(.google.api.field_behavior) = REQUIRED];</code>
*/
@java.lang.Override
public com.google.cloud.support.v2.CaseOrBuilder getCaseOrBuilder() {
return case_ == null ? com.google.cloud.support.v2.Case.getDefaultInstance() : case_;
}
public static final int UPDATE_MASK_FIELD_NUMBER = 2;
private com.google.protobuf.FieldMask updateMask_;
/**
*
*
* <pre>
* A list of attributes of the case that should be updated. Supported values
* are `priority`, `display_name`, and `subscriber_email_addresses`. If no
* fields are specified, all supported fields are updated.
*
* Be careful - if you do not provide a field mask, then you might
* accidentally clear some fields. For example, if you leave the field mask
* empty and do not provide a value for `subscriber_email_addresses`, then
* `subscriber_email_addresses` is updated to empty.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2;</code>
*
* @return Whether the updateMask field is set.
*/
@java.lang.Override
public boolean hasUpdateMask() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
*
*
* <pre>
* A list of attributes of the case that should be updated. Supported values
* are `priority`, `display_name`, and `subscriber_email_addresses`. If no
* fields are specified, all supported fields are updated.
*
* Be careful - if you do not provide a field mask, then you might
* accidentally clear some fields. For example, if you leave the field mask
* empty and do not provide a value for `subscriber_email_addresses`, then
* `subscriber_email_addresses` is updated to empty.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2;</code>
*
* @return The updateMask.
*/
@java.lang.Override
public com.google.protobuf.FieldMask getUpdateMask() {
return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_;
}
/**
*
*
* <pre>
* A list of attributes of the case that should be updated. Supported values
* are `priority`, `display_name`, and `subscriber_email_addresses`. If no
* fields are specified, all supported fields are updated.
*
* Be careful - if you do not provide a field mask, then you might
* accidentally clear some fields. For example, if you leave the field mask
* empty and do not provide a value for `subscriber_email_addresses`, then
* `subscriber_email_addresses` is updated to empty.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2;</code>
*/
@java.lang.Override
public com.google.protobuf.FieldMaskOrBuilder getUpdateMaskOrBuilder() {
return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_;
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (((bitField0_ & 0x00000001) != 0)) {
output.writeMessage(1, getCase());
}
if (((bitField0_ & 0x00000002) != 0)) {
output.writeMessage(2, getUpdateMask());
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) != 0)) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, getCase());
}
if (((bitField0_ & 0x00000002) != 0)) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, getUpdateMask());
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.support.v2.UpdateCaseRequest)) {
return super.equals(obj);
}
com.google.cloud.support.v2.UpdateCaseRequest other =
(com.google.cloud.support.v2.UpdateCaseRequest) obj;
if (hasCase() != other.hasCase()) return false;
if (hasCase()) {
if (!getCase().equals(other.getCase())) return false;
}
if (hasUpdateMask() != other.hasUpdateMask()) return false;
if (hasUpdateMask()) {
if (!getUpdateMask().equals(other.getUpdateMask())) return false;
}
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (hasCase()) {
hash = (37 * hash) + CASE_FIELD_NUMBER;
hash = (53 * hash) + getCase().hashCode();
}
if (hasUpdateMask()) {
hash = (37 * hash) + UPDATE_MASK_FIELD_NUMBER;
hash = (53 * hash) + getUpdateMask().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.support.v2.UpdateCaseRequest parseFrom(java.nio.ByteBuffer data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.support.v2.UpdateCaseRequest parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.support.v2.UpdateCaseRequest parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.support.v2.UpdateCaseRequest parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.support.v2.UpdateCaseRequest parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.support.v2.UpdateCaseRequest parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.support.v2.UpdateCaseRequest parseFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.support.v2.UpdateCaseRequest parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.support.v2.UpdateCaseRequest parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.support.v2.UpdateCaseRequest parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.support.v2.UpdateCaseRequest parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.support.v2.UpdateCaseRequest parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.cloud.support.v2.UpdateCaseRequest prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* The request message for the UpdateCase endpoint
* </pre>
*
* Protobuf type {@code google.cloud.support.v2.UpdateCaseRequest}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.support.v2.UpdateCaseRequest)
com.google.cloud.support.v2.UpdateCaseRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.support.v2.CaseServiceProto
.internal_static_google_cloud_support_v2_UpdateCaseRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.support.v2.CaseServiceProto
.internal_static_google_cloud_support_v2_UpdateCaseRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.support.v2.UpdateCaseRequest.class,
com.google.cloud.support.v2.UpdateCaseRequest.Builder.class);
}
// Construct using com.google.cloud.support.v2.UpdateCaseRequest.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {
getCaseFieldBuilder();
getUpdateMaskFieldBuilder();
}
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
case_ = null;
if (caseBuilder_ != null) {
caseBuilder_.dispose();
caseBuilder_ = null;
}
updateMask_ = null;
if (updateMaskBuilder_ != null) {
updateMaskBuilder_.dispose();
updateMaskBuilder_ = null;
}
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.support.v2.CaseServiceProto
.internal_static_google_cloud_support_v2_UpdateCaseRequest_descriptor;
}
@java.lang.Override
public com.google.cloud.support.v2.UpdateCaseRequest getDefaultInstanceForType() {
return com.google.cloud.support.v2.UpdateCaseRequest.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.support.v2.UpdateCaseRequest build() {
com.google.cloud.support.v2.UpdateCaseRequest result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.support.v2.UpdateCaseRequest buildPartial() {
com.google.cloud.support.v2.UpdateCaseRequest result =
new com.google.cloud.support.v2.UpdateCaseRequest(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(com.google.cloud.support.v2.UpdateCaseRequest result) {
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.case_ = caseBuilder_ == null ? case_ : caseBuilder_.build();
to_bitField0_ |= 0x00000001;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.updateMask_ = updateMaskBuilder_ == null ? updateMask_ : updateMaskBuilder_.build();
to_bitField0_ |= 0x00000002;
}
result.bitField0_ |= to_bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.support.v2.UpdateCaseRequest) {
return mergeFrom((com.google.cloud.support.v2.UpdateCaseRequest) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.support.v2.UpdateCaseRequest other) {
if (other == com.google.cloud.support.v2.UpdateCaseRequest.getDefaultInstance()) return this;
if (other.hasCase()) {
mergeCase(other.getCase());
}
if (other.hasUpdateMask()) {
mergeUpdateMask(other.getUpdateMask());
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
input.readMessage(getCaseFieldBuilder().getBuilder(), extensionRegistry);
bitField0_ |= 0x00000001;
break;
} // case 10
case 18:
{
input.readMessage(getUpdateMaskFieldBuilder().getBuilder(), extensionRegistry);
bitField0_ |= 0x00000002;
break;
} // case 18
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private com.google.cloud.support.v2.Case case_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.support.v2.Case,
com.google.cloud.support.v2.Case.Builder,
com.google.cloud.support.v2.CaseOrBuilder>
caseBuilder_;
/**
*
*
* <pre>
* Required. The case to update.
* </pre>
*
* <code>.google.cloud.support.v2.Case case = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the case field is set.
*/
public boolean hasCase() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* Required. The case to update.
* </pre>
*
* <code>.google.cloud.support.v2.Case case = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The case.
*/
public com.google.cloud.support.v2.Case getCase() {
if (caseBuilder_ == null) {
return case_ == null ? com.google.cloud.support.v2.Case.getDefaultInstance() : case_;
} else {
return caseBuilder_.getMessage();
}
}
/**
*
*
* <pre>
* Required. The case to update.
* </pre>
*
* <code>.google.cloud.support.v2.Case case = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setCase(com.google.cloud.support.v2.Case value) {
if (caseBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
case_ = value;
} else {
caseBuilder_.setMessage(value);
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The case to update.
* </pre>
*
* <code>.google.cloud.support.v2.Case case = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setCase(com.google.cloud.support.v2.Case.Builder builderForValue) {
if (caseBuilder_ == null) {
case_ = builderForValue.build();
} else {
caseBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The case to update.
* </pre>
*
* <code>.google.cloud.support.v2.Case case = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder mergeCase(com.google.cloud.support.v2.Case value) {
if (caseBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0)
&& case_ != null
&& case_ != com.google.cloud.support.v2.Case.getDefaultInstance()) {
getCaseBuilder().mergeFrom(value);
} else {
case_ = value;
}
} else {
caseBuilder_.mergeFrom(value);
}
if (case_ != null) {
bitField0_ |= 0x00000001;
onChanged();
}
return this;
}
/**
*
*
* <pre>
* Required. The case to update.
* </pre>
*
* <code>.google.cloud.support.v2.Case case = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder clearCase() {
bitField0_ = (bitField0_ & ~0x00000001);
case_ = null;
if (caseBuilder_ != null) {
caseBuilder_.dispose();
caseBuilder_ = null;
}
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The case to update.
* </pre>
*
* <code>.google.cloud.support.v2.Case case = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.cloud.support.v2.Case.Builder getCaseBuilder() {
bitField0_ |= 0x00000001;
onChanged();
return getCaseFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* Required. The case to update.
* </pre>
*
* <code>.google.cloud.support.v2.Case case = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.cloud.support.v2.CaseOrBuilder getCaseOrBuilder() {
if (caseBuilder_ != null) {
return caseBuilder_.getMessageOrBuilder();
} else {
return case_ == null ? com.google.cloud.support.v2.Case.getDefaultInstance() : case_;
}
}
/**
*
*
* <pre>
* Required. The case to update.
* </pre>
*
* <code>.google.cloud.support.v2.Case case = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.support.v2.Case,
com.google.cloud.support.v2.Case.Builder,
com.google.cloud.support.v2.CaseOrBuilder>
getCaseFieldBuilder() {
if (caseBuilder_ == null) {
caseBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.support.v2.Case,
com.google.cloud.support.v2.Case.Builder,
com.google.cloud.support.v2.CaseOrBuilder>(
getCase(), getParentForChildren(), isClean());
case_ = null;
}
return caseBuilder_;
}
private com.google.protobuf.FieldMask updateMask_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.FieldMask,
com.google.protobuf.FieldMask.Builder,
com.google.protobuf.FieldMaskOrBuilder>
updateMaskBuilder_;
/**
*
*
* <pre>
* A list of attributes of the case that should be updated. Supported values
* are `priority`, `display_name`, and `subscriber_email_addresses`. If no
* fields are specified, all supported fields are updated.
*
* Be careful - if you do not provide a field mask, then you might
* accidentally clear some fields. For example, if you leave the field mask
* empty and do not provide a value for `subscriber_email_addresses`, then
* `subscriber_email_addresses` is updated to empty.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2;</code>
*
* @return Whether the updateMask field is set.
*/
public boolean hasUpdateMask() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
*
*
* <pre>
* A list of attributes of the case that should be updated. Supported values
* are `priority`, `display_name`, and `subscriber_email_addresses`. If no
* fields are specified, all supported fields are updated.
*
* Be careful - if you do not provide a field mask, then you might
* accidentally clear some fields. For example, if you leave the field mask
* empty and do not provide a value for `subscriber_email_addresses`, then
* `subscriber_email_addresses` is updated to empty.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2;</code>
*
* @return The updateMask.
*/
public com.google.protobuf.FieldMask getUpdateMask() {
if (updateMaskBuilder_ == null) {
return updateMask_ == null
? com.google.protobuf.FieldMask.getDefaultInstance()
: updateMask_;
} else {
return updateMaskBuilder_.getMessage();
}
}
/**
*
*
* <pre>
* A list of attributes of the case that should be updated. Supported values
* are `priority`, `display_name`, and `subscriber_email_addresses`. If no
* fields are specified, all supported fields are updated.
*
* Be careful - if you do not provide a field mask, then you might
* accidentally clear some fields. For example, if you leave the field mask
* empty and do not provide a value for `subscriber_email_addresses`, then
* `subscriber_email_addresses` is updated to empty.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2;</code>
*/
public Builder setUpdateMask(com.google.protobuf.FieldMask value) {
if (updateMaskBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
updateMask_ = value;
} else {
updateMaskBuilder_.setMessage(value);
}
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* A list of attributes of the case that should be updated. Supported values
* are `priority`, `display_name`, and `subscriber_email_addresses`. If no
* fields are specified, all supported fields are updated.
*
* Be careful - if you do not provide a field mask, then you might
* accidentally clear some fields. For example, if you leave the field mask
* empty and do not provide a value for `subscriber_email_addresses`, then
* `subscriber_email_addresses` is updated to empty.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2;</code>
*/
public Builder setUpdateMask(com.google.protobuf.FieldMask.Builder builderForValue) {
if (updateMaskBuilder_ == null) {
updateMask_ = builderForValue.build();
} else {
updateMaskBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* A list of attributes of the case that should be updated. Supported values
* are `priority`, `display_name`, and `subscriber_email_addresses`. If no
* fields are specified, all supported fields are updated.
*
* Be careful - if you do not provide a field mask, then you might
* accidentally clear some fields. For example, if you leave the field mask
* empty and do not provide a value for `subscriber_email_addresses`, then
* `subscriber_email_addresses` is updated to empty.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2;</code>
*/
public Builder mergeUpdateMask(com.google.protobuf.FieldMask value) {
if (updateMaskBuilder_ == null) {
if (((bitField0_ & 0x00000002) != 0)
&& updateMask_ != null
&& updateMask_ != com.google.protobuf.FieldMask.getDefaultInstance()) {
getUpdateMaskBuilder().mergeFrom(value);
} else {
updateMask_ = value;
}
} else {
updateMaskBuilder_.mergeFrom(value);
}
if (updateMask_ != null) {
bitField0_ |= 0x00000002;
onChanged();
}
return this;
}
/**
*
*
* <pre>
* A list of attributes of the case that should be updated. Supported values
* are `priority`, `display_name`, and `subscriber_email_addresses`. If no
* fields are specified, all supported fields are updated.
*
* Be careful - if you do not provide a field mask, then you might
* accidentally clear some fields. For example, if you leave the field mask
* empty and do not provide a value for `subscriber_email_addresses`, then
* `subscriber_email_addresses` is updated to empty.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2;</code>
*/
public Builder clearUpdateMask() {
bitField0_ = (bitField0_ & ~0x00000002);
updateMask_ = null;
if (updateMaskBuilder_ != null) {
updateMaskBuilder_.dispose();
updateMaskBuilder_ = null;
}
onChanged();
return this;
}
/**
*
*
* <pre>
* A list of attributes of the case that should be updated. Supported values
* are `priority`, `display_name`, and `subscriber_email_addresses`. If no
* fields are specified, all supported fields are updated.
*
* Be careful - if you do not provide a field mask, then you might
* accidentally clear some fields. For example, if you leave the field mask
* empty and do not provide a value for `subscriber_email_addresses`, then
* `subscriber_email_addresses` is updated to empty.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2;</code>
*/
public com.google.protobuf.FieldMask.Builder getUpdateMaskBuilder() {
bitField0_ |= 0x00000002;
onChanged();
return getUpdateMaskFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* A list of attributes of the case that should be updated. Supported values
* are `priority`, `display_name`, and `subscriber_email_addresses`. If no
* fields are specified, all supported fields are updated.
*
* Be careful - if you do not provide a field mask, then you might
* accidentally clear some fields. For example, if you leave the field mask
* empty and do not provide a value for `subscriber_email_addresses`, then
* `subscriber_email_addresses` is updated to empty.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2;</code>
*/
public com.google.protobuf.FieldMaskOrBuilder getUpdateMaskOrBuilder() {
if (updateMaskBuilder_ != null) {
return updateMaskBuilder_.getMessageOrBuilder();
} else {
return updateMask_ == null
? com.google.protobuf.FieldMask.getDefaultInstance()
: updateMask_;
}
}
/**
*
*
* <pre>
* A list of attributes of the case that should be updated. Supported values
* are `priority`, `display_name`, and `subscriber_email_addresses`. If no
* fields are specified, all supported fields are updated.
*
* Be careful - if you do not provide a field mask, then you might
* accidentally clear some fields. For example, if you leave the field mask
* empty and do not provide a value for `subscriber_email_addresses`, then
* `subscriber_email_addresses` is updated to empty.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2;</code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.FieldMask,
com.google.protobuf.FieldMask.Builder,
com.google.protobuf.FieldMaskOrBuilder>
getUpdateMaskFieldBuilder() {
if (updateMaskBuilder_ == null) {
updateMaskBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.FieldMask,
com.google.protobuf.FieldMask.Builder,
com.google.protobuf.FieldMaskOrBuilder>(
getUpdateMask(), getParentForChildren(), isClean());
updateMask_ = null;
}
return updateMaskBuilder_;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.support.v2.UpdateCaseRequest)
}
// @@protoc_insertion_point(class_scope:google.cloud.support.v2.UpdateCaseRequest)
private static final com.google.cloud.support.v2.UpdateCaseRequest DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.support.v2.UpdateCaseRequest();
}
public static com.google.cloud.support.v2.UpdateCaseRequest getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<UpdateCaseRequest> PARSER =
new com.google.protobuf.AbstractParser<UpdateCaseRequest>() {
@java.lang.Override
public UpdateCaseRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<UpdateCaseRequest> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<UpdateCaseRequest> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.support.v2.UpdateCaseRequest getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleapis/google-cloud-java | 36,102 | java-recaptchaenterprise/proto-google-cloud-recaptchaenterprise-v1/src/main/java/com/google/recaptchaenterprise/v1/UserId.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/recaptchaenterprise/v1/recaptchaenterprise.proto
// Protobuf Java Version: 3.25.8
package com.google.recaptchaenterprise.v1;
/**
*
*
* <pre>
* An identifier associated with a user.
* </pre>
*
* Protobuf type {@code google.cloud.recaptchaenterprise.v1.UserId}
*/
public final class UserId extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.recaptchaenterprise.v1.UserId)
UserIdOrBuilder {
private static final long serialVersionUID = 0L;
// Use UserId.newBuilder() to construct.
private UserId(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private UserId() {}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new UserId();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.recaptchaenterprise.v1.RecaptchaEnterpriseProto
.internal_static_google_cloud_recaptchaenterprise_v1_UserId_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.recaptchaenterprise.v1.RecaptchaEnterpriseProto
.internal_static_google_cloud_recaptchaenterprise_v1_UserId_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.recaptchaenterprise.v1.UserId.class,
com.google.recaptchaenterprise.v1.UserId.Builder.class);
}
private int idOneofCase_ = 0;
@SuppressWarnings("serial")
private java.lang.Object idOneof_;
public enum IdOneofCase
implements
com.google.protobuf.Internal.EnumLite,
com.google.protobuf.AbstractMessage.InternalOneOfEnum {
EMAIL(1),
PHONE_NUMBER(2),
USERNAME(3),
IDONEOF_NOT_SET(0);
private final int value;
private IdOneofCase(int value) {
this.value = value;
}
/**
* @param value The number of the enum to look for.
* @return The enum associated with the given number.
* @deprecated Use {@link #forNumber(int)} instead.
*/
@java.lang.Deprecated
public static IdOneofCase valueOf(int value) {
return forNumber(value);
}
public static IdOneofCase forNumber(int value) {
switch (value) {
case 1:
return EMAIL;
case 2:
return PHONE_NUMBER;
case 3:
return USERNAME;
case 0:
return IDONEOF_NOT_SET;
default:
return null;
}
}
public int getNumber() {
return this.value;
}
};
public IdOneofCase getIdOneofCase() {
return IdOneofCase.forNumber(idOneofCase_);
}
public static final int EMAIL_FIELD_NUMBER = 1;
/**
*
*
* <pre>
* Optional. An email address.
* </pre>
*
* <code>string email = 1 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return Whether the email field is set.
*/
public boolean hasEmail() {
return idOneofCase_ == 1;
}
/**
*
*
* <pre>
* Optional. An email address.
* </pre>
*
* <code>string email = 1 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The email.
*/
public java.lang.String getEmail() {
java.lang.Object ref = "";
if (idOneofCase_ == 1) {
ref = idOneof_;
}
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
if (idOneofCase_ == 1) {
idOneof_ = s;
}
return s;
}
}
/**
*
*
* <pre>
* Optional. An email address.
* </pre>
*
* <code>string email = 1 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The bytes for email.
*/
public com.google.protobuf.ByteString getEmailBytes() {
java.lang.Object ref = "";
if (idOneofCase_ == 1) {
ref = idOneof_;
}
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
if (idOneofCase_ == 1) {
idOneof_ = b;
}
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int PHONE_NUMBER_FIELD_NUMBER = 2;
/**
*
*
* <pre>
* Optional. A phone number. Should use the E.164 format.
* </pre>
*
* <code>string phone_number = 2 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return Whether the phoneNumber field is set.
*/
public boolean hasPhoneNumber() {
return idOneofCase_ == 2;
}
/**
*
*
* <pre>
* Optional. A phone number. Should use the E.164 format.
* </pre>
*
* <code>string phone_number = 2 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The phoneNumber.
*/
public java.lang.String getPhoneNumber() {
java.lang.Object ref = "";
if (idOneofCase_ == 2) {
ref = idOneof_;
}
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
if (idOneofCase_ == 2) {
idOneof_ = s;
}
return s;
}
}
/**
*
*
* <pre>
* Optional. A phone number. Should use the E.164 format.
* </pre>
*
* <code>string phone_number = 2 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The bytes for phoneNumber.
*/
public com.google.protobuf.ByteString getPhoneNumberBytes() {
java.lang.Object ref = "";
if (idOneofCase_ == 2) {
ref = idOneof_;
}
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
if (idOneofCase_ == 2) {
idOneof_ = b;
}
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int USERNAME_FIELD_NUMBER = 3;
/**
*
*
* <pre>
* Optional. A unique username, if different from all the other identifiers
* and `account_id` that are provided. Can be a unique login handle or
* display name for a user.
* </pre>
*
* <code>string username = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return Whether the username field is set.
*/
public boolean hasUsername() {
return idOneofCase_ == 3;
}
/**
*
*
* <pre>
* Optional. A unique username, if different from all the other identifiers
* and `account_id` that are provided. Can be a unique login handle or
* display name for a user.
* </pre>
*
* <code>string username = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The username.
*/
public java.lang.String getUsername() {
java.lang.Object ref = "";
if (idOneofCase_ == 3) {
ref = idOneof_;
}
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
if (idOneofCase_ == 3) {
idOneof_ = s;
}
return s;
}
}
/**
*
*
* <pre>
* Optional. A unique username, if different from all the other identifiers
* and `account_id` that are provided. Can be a unique login handle or
* display name for a user.
* </pre>
*
* <code>string username = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The bytes for username.
*/
public com.google.protobuf.ByteString getUsernameBytes() {
java.lang.Object ref = "";
if (idOneofCase_ == 3) {
ref = idOneof_;
}
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
if (idOneofCase_ == 3) {
idOneof_ = b;
}
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (idOneofCase_ == 1) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 1, idOneof_);
}
if (idOneofCase_ == 2) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 2, idOneof_);
}
if (idOneofCase_ == 3) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 3, idOneof_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (idOneofCase_ == 1) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, idOneof_);
}
if (idOneofCase_ == 2) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, idOneof_);
}
if (idOneofCase_ == 3) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(3, idOneof_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.recaptchaenterprise.v1.UserId)) {
return super.equals(obj);
}
com.google.recaptchaenterprise.v1.UserId other = (com.google.recaptchaenterprise.v1.UserId) obj;
if (!getIdOneofCase().equals(other.getIdOneofCase())) return false;
switch (idOneofCase_) {
case 1:
if (!getEmail().equals(other.getEmail())) return false;
break;
case 2:
if (!getPhoneNumber().equals(other.getPhoneNumber())) return false;
break;
case 3:
if (!getUsername().equals(other.getUsername())) return false;
break;
case 0:
default:
}
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
switch (idOneofCase_) {
case 1:
hash = (37 * hash) + EMAIL_FIELD_NUMBER;
hash = (53 * hash) + getEmail().hashCode();
break;
case 2:
hash = (37 * hash) + PHONE_NUMBER_FIELD_NUMBER;
hash = (53 * hash) + getPhoneNumber().hashCode();
break;
case 3:
hash = (37 * hash) + USERNAME_FIELD_NUMBER;
hash = (53 * hash) + getUsername().hashCode();
break;
case 0:
default:
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.recaptchaenterprise.v1.UserId parseFrom(java.nio.ByteBuffer data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.recaptchaenterprise.v1.UserId parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.recaptchaenterprise.v1.UserId parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.recaptchaenterprise.v1.UserId parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.recaptchaenterprise.v1.UserId parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.recaptchaenterprise.v1.UserId parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.recaptchaenterprise.v1.UserId parseFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.recaptchaenterprise.v1.UserId parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.recaptchaenterprise.v1.UserId parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.recaptchaenterprise.v1.UserId parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.recaptchaenterprise.v1.UserId parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.recaptchaenterprise.v1.UserId parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.recaptchaenterprise.v1.UserId prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* An identifier associated with a user.
* </pre>
*
* Protobuf type {@code google.cloud.recaptchaenterprise.v1.UserId}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.recaptchaenterprise.v1.UserId)
com.google.recaptchaenterprise.v1.UserIdOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.recaptchaenterprise.v1.RecaptchaEnterpriseProto
.internal_static_google_cloud_recaptchaenterprise_v1_UserId_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.recaptchaenterprise.v1.RecaptchaEnterpriseProto
.internal_static_google_cloud_recaptchaenterprise_v1_UserId_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.recaptchaenterprise.v1.UserId.class,
com.google.recaptchaenterprise.v1.UserId.Builder.class);
}
// Construct using com.google.recaptchaenterprise.v1.UserId.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
idOneofCase_ = 0;
idOneof_ = null;
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.recaptchaenterprise.v1.RecaptchaEnterpriseProto
.internal_static_google_cloud_recaptchaenterprise_v1_UserId_descriptor;
}
@java.lang.Override
public com.google.recaptchaenterprise.v1.UserId getDefaultInstanceForType() {
return com.google.recaptchaenterprise.v1.UserId.getDefaultInstance();
}
@java.lang.Override
public com.google.recaptchaenterprise.v1.UserId build() {
com.google.recaptchaenterprise.v1.UserId result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.recaptchaenterprise.v1.UserId buildPartial() {
com.google.recaptchaenterprise.v1.UserId result =
new com.google.recaptchaenterprise.v1.UserId(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
buildPartialOneofs(result);
onBuilt();
return result;
}
private void buildPartial0(com.google.recaptchaenterprise.v1.UserId result) {
int from_bitField0_ = bitField0_;
}
private void buildPartialOneofs(com.google.recaptchaenterprise.v1.UserId result) {
result.idOneofCase_ = idOneofCase_;
result.idOneof_ = this.idOneof_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.recaptchaenterprise.v1.UserId) {
return mergeFrom((com.google.recaptchaenterprise.v1.UserId) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.recaptchaenterprise.v1.UserId other) {
if (other == com.google.recaptchaenterprise.v1.UserId.getDefaultInstance()) return this;
switch (other.getIdOneofCase()) {
case EMAIL:
{
idOneofCase_ = 1;
idOneof_ = other.idOneof_;
onChanged();
break;
}
case PHONE_NUMBER:
{
idOneofCase_ = 2;
idOneof_ = other.idOneof_;
onChanged();
break;
}
case USERNAME:
{
idOneofCase_ = 3;
idOneof_ = other.idOneof_;
onChanged();
break;
}
case IDONEOF_NOT_SET:
{
break;
}
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
java.lang.String s = input.readStringRequireUtf8();
idOneofCase_ = 1;
idOneof_ = s;
break;
} // case 10
case 18:
{
java.lang.String s = input.readStringRequireUtf8();
idOneofCase_ = 2;
idOneof_ = s;
break;
} // case 18
case 26:
{
java.lang.String s = input.readStringRequireUtf8();
idOneofCase_ = 3;
idOneof_ = s;
break;
} // case 26
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int idOneofCase_ = 0;
private java.lang.Object idOneof_;
public IdOneofCase getIdOneofCase() {
return IdOneofCase.forNumber(idOneofCase_);
}
public Builder clearIdOneof() {
idOneofCase_ = 0;
idOneof_ = null;
onChanged();
return this;
}
private int bitField0_;
/**
*
*
* <pre>
* Optional. An email address.
* </pre>
*
* <code>string email = 1 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return Whether the email field is set.
*/
@java.lang.Override
public boolean hasEmail() {
return idOneofCase_ == 1;
}
/**
*
*
* <pre>
* Optional. An email address.
* </pre>
*
* <code>string email = 1 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The email.
*/
@java.lang.Override
public java.lang.String getEmail() {
java.lang.Object ref = "";
if (idOneofCase_ == 1) {
ref = idOneof_;
}
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
if (idOneofCase_ == 1) {
idOneof_ = s;
}
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Optional. An email address.
* </pre>
*
* <code>string email = 1 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The bytes for email.
*/
@java.lang.Override
public com.google.protobuf.ByteString getEmailBytes() {
java.lang.Object ref = "";
if (idOneofCase_ == 1) {
ref = idOneof_;
}
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
if (idOneofCase_ == 1) {
idOneof_ = b;
}
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Optional. An email address.
* </pre>
*
* <code>string email = 1 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @param value The email to set.
* @return This builder for chaining.
*/
public Builder setEmail(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
idOneofCase_ = 1;
idOneof_ = value;
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. An email address.
* </pre>
*
* <code>string email = 1 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return This builder for chaining.
*/
public Builder clearEmail() {
if (idOneofCase_ == 1) {
idOneofCase_ = 0;
idOneof_ = null;
onChanged();
}
return this;
}
/**
*
*
* <pre>
* Optional. An email address.
* </pre>
*
* <code>string email = 1 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @param value The bytes for email to set.
* @return This builder for chaining.
*/
public Builder setEmailBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
idOneofCase_ = 1;
idOneof_ = value;
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. A phone number. Should use the E.164 format.
* </pre>
*
* <code>string phone_number = 2 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return Whether the phoneNumber field is set.
*/
@java.lang.Override
public boolean hasPhoneNumber() {
return idOneofCase_ == 2;
}
/**
*
*
* <pre>
* Optional. A phone number. Should use the E.164 format.
* </pre>
*
* <code>string phone_number = 2 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The phoneNumber.
*/
@java.lang.Override
public java.lang.String getPhoneNumber() {
java.lang.Object ref = "";
if (idOneofCase_ == 2) {
ref = idOneof_;
}
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
if (idOneofCase_ == 2) {
idOneof_ = s;
}
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Optional. A phone number. Should use the E.164 format.
* </pre>
*
* <code>string phone_number = 2 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The bytes for phoneNumber.
*/
@java.lang.Override
public com.google.protobuf.ByteString getPhoneNumberBytes() {
java.lang.Object ref = "";
if (idOneofCase_ == 2) {
ref = idOneof_;
}
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
if (idOneofCase_ == 2) {
idOneof_ = b;
}
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Optional. A phone number. Should use the E.164 format.
* </pre>
*
* <code>string phone_number = 2 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @param value The phoneNumber to set.
* @return This builder for chaining.
*/
public Builder setPhoneNumber(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
idOneofCase_ = 2;
idOneof_ = value;
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. A phone number. Should use the E.164 format.
* </pre>
*
* <code>string phone_number = 2 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return This builder for chaining.
*/
public Builder clearPhoneNumber() {
if (idOneofCase_ == 2) {
idOneofCase_ = 0;
idOneof_ = null;
onChanged();
}
return this;
}
/**
*
*
* <pre>
* Optional. A phone number. Should use the E.164 format.
* </pre>
*
* <code>string phone_number = 2 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @param value The bytes for phoneNumber to set.
* @return This builder for chaining.
*/
public Builder setPhoneNumberBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
idOneofCase_ = 2;
idOneof_ = value;
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. A unique username, if different from all the other identifiers
* and `account_id` that are provided. Can be a unique login handle or
* display name for a user.
* </pre>
*
* <code>string username = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return Whether the username field is set.
*/
@java.lang.Override
public boolean hasUsername() {
return idOneofCase_ == 3;
}
/**
*
*
* <pre>
* Optional. A unique username, if different from all the other identifiers
* and `account_id` that are provided. Can be a unique login handle or
* display name for a user.
* </pre>
*
* <code>string username = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The username.
*/
@java.lang.Override
public java.lang.String getUsername() {
java.lang.Object ref = "";
if (idOneofCase_ == 3) {
ref = idOneof_;
}
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
if (idOneofCase_ == 3) {
idOneof_ = s;
}
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Optional. A unique username, if different from all the other identifiers
* and `account_id` that are provided. Can be a unique login handle or
* display name for a user.
* </pre>
*
* <code>string username = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The bytes for username.
*/
@java.lang.Override
public com.google.protobuf.ByteString getUsernameBytes() {
java.lang.Object ref = "";
if (idOneofCase_ == 3) {
ref = idOneof_;
}
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
if (idOneofCase_ == 3) {
idOneof_ = b;
}
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Optional. A unique username, if different from all the other identifiers
* and `account_id` that are provided. Can be a unique login handle or
* display name for a user.
* </pre>
*
* <code>string username = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @param value The username to set.
* @return This builder for chaining.
*/
public Builder setUsername(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
idOneofCase_ = 3;
idOneof_ = value;
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. A unique username, if different from all the other identifiers
* and `account_id` that are provided. Can be a unique login handle or
* display name for a user.
* </pre>
*
* <code>string username = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return This builder for chaining.
*/
public Builder clearUsername() {
if (idOneofCase_ == 3) {
idOneofCase_ = 0;
idOneof_ = null;
onChanged();
}
return this;
}
/**
*
*
* <pre>
* Optional. A unique username, if different from all the other identifiers
* and `account_id` that are provided. Can be a unique login handle or
* display name for a user.
* </pre>
*
* <code>string username = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @param value The bytes for username to set.
* @return This builder for chaining.
*/
public Builder setUsernameBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
idOneofCase_ = 3;
idOneof_ = value;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.recaptchaenterprise.v1.UserId)
}
// @@protoc_insertion_point(class_scope:google.cloud.recaptchaenterprise.v1.UserId)
private static final com.google.recaptchaenterprise.v1.UserId DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.recaptchaenterprise.v1.UserId();
}
public static com.google.recaptchaenterprise.v1.UserId getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<UserId> PARSER =
new com.google.protobuf.AbstractParser<UserId>() {
@java.lang.Override
public UserId parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<UserId> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<UserId> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.recaptchaenterprise.v1.UserId getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleapis/google-cloud-java | 36,175 | java-shopping-merchant-accounts/proto-google-shopping-merchant-accounts-v1/src/main/java/com/google/shopping/merchant/accounts/v1/CreateRegionRequest.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/shopping/merchant/accounts/v1/regions.proto
// Protobuf Java Version: 3.25.8
package com.google.shopping.merchant.accounts.v1;
/**
*
*
* <pre>
* Request message for the `CreateRegion` method.
* </pre>
*
* Protobuf type {@code google.shopping.merchant.accounts.v1.CreateRegionRequest}
*/
public final class CreateRegionRequest extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.shopping.merchant.accounts.v1.CreateRegionRequest)
CreateRegionRequestOrBuilder {
private static final long serialVersionUID = 0L;
// Use CreateRegionRequest.newBuilder() to construct.
private CreateRegionRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private CreateRegionRequest() {
parent_ = "";
regionId_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new CreateRegionRequest();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.shopping.merchant.accounts.v1.RegionsProto
.internal_static_google_shopping_merchant_accounts_v1_CreateRegionRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.shopping.merchant.accounts.v1.RegionsProto
.internal_static_google_shopping_merchant_accounts_v1_CreateRegionRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.shopping.merchant.accounts.v1.CreateRegionRequest.class,
com.google.shopping.merchant.accounts.v1.CreateRegionRequest.Builder.class);
}
private int bitField0_;
public static final int PARENT_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private volatile java.lang.Object parent_ = "";
/**
*
*
* <pre>
* Required. The account to create a region for.
* Format: `accounts/{account}`
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The parent.
*/
@java.lang.Override
public java.lang.String getParent() {
java.lang.Object ref = parent_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
parent_ = s;
return s;
}
}
/**
*
*
* <pre>
* Required. The account to create a region for.
* Format: `accounts/{account}`
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for parent.
*/
@java.lang.Override
public com.google.protobuf.ByteString getParentBytes() {
java.lang.Object ref = parent_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
parent_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int REGION_ID_FIELD_NUMBER = 2;
@SuppressWarnings("serial")
private volatile java.lang.Object regionId_ = "";
/**
*
*
* <pre>
* Required. The identifier for the region, unique over all regions of the
* same account.
* </pre>
*
* <code>string region_id = 2 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The regionId.
*/
@java.lang.Override
public java.lang.String getRegionId() {
java.lang.Object ref = regionId_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
regionId_ = s;
return s;
}
}
/**
*
*
* <pre>
* Required. The identifier for the region, unique over all regions of the
* same account.
* </pre>
*
* <code>string region_id = 2 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The bytes for regionId.
*/
@java.lang.Override
public com.google.protobuf.ByteString getRegionIdBytes() {
java.lang.Object ref = regionId_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
regionId_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int REGION_FIELD_NUMBER = 3;
private com.google.shopping.merchant.accounts.v1.Region region_;
/**
*
*
* <pre>
* Required. The region to create.
* </pre>
*
* <code>
* .google.shopping.merchant.accounts.v1.Region region = 3 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the region field is set.
*/
@java.lang.Override
public boolean hasRegion() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* Required. The region to create.
* </pre>
*
* <code>
* .google.shopping.merchant.accounts.v1.Region region = 3 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The region.
*/
@java.lang.Override
public com.google.shopping.merchant.accounts.v1.Region getRegion() {
return region_ == null
? com.google.shopping.merchant.accounts.v1.Region.getDefaultInstance()
: region_;
}
/**
*
*
* <pre>
* Required. The region to create.
* </pre>
*
* <code>
* .google.shopping.merchant.accounts.v1.Region region = 3 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
@java.lang.Override
public com.google.shopping.merchant.accounts.v1.RegionOrBuilder getRegionOrBuilder() {
return region_ == null
? com.google.shopping.merchant.accounts.v1.Region.getDefaultInstance()
: region_;
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 1, parent_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(regionId_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 2, regionId_);
}
if (((bitField0_ & 0x00000001) != 0)) {
output.writeMessage(3, getRegion());
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, parent_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(regionId_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, regionId_);
}
if (((bitField0_ & 0x00000001) != 0)) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(3, getRegion());
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.shopping.merchant.accounts.v1.CreateRegionRequest)) {
return super.equals(obj);
}
com.google.shopping.merchant.accounts.v1.CreateRegionRequest other =
(com.google.shopping.merchant.accounts.v1.CreateRegionRequest) obj;
if (!getParent().equals(other.getParent())) return false;
if (!getRegionId().equals(other.getRegionId())) return false;
if (hasRegion() != other.hasRegion()) return false;
if (hasRegion()) {
if (!getRegion().equals(other.getRegion())) return false;
}
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + PARENT_FIELD_NUMBER;
hash = (53 * hash) + getParent().hashCode();
hash = (37 * hash) + REGION_ID_FIELD_NUMBER;
hash = (53 * hash) + getRegionId().hashCode();
if (hasRegion()) {
hash = (37 * hash) + REGION_FIELD_NUMBER;
hash = (53 * hash) + getRegion().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.shopping.merchant.accounts.v1.CreateRegionRequest parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.shopping.merchant.accounts.v1.CreateRegionRequest parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.shopping.merchant.accounts.v1.CreateRegionRequest parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.shopping.merchant.accounts.v1.CreateRegionRequest parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.shopping.merchant.accounts.v1.CreateRegionRequest parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.shopping.merchant.accounts.v1.CreateRegionRequest parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.shopping.merchant.accounts.v1.CreateRegionRequest parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.shopping.merchant.accounts.v1.CreateRegionRequest parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.shopping.merchant.accounts.v1.CreateRegionRequest parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.shopping.merchant.accounts.v1.CreateRegionRequest parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.shopping.merchant.accounts.v1.CreateRegionRequest parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.shopping.merchant.accounts.v1.CreateRegionRequest parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.shopping.merchant.accounts.v1.CreateRegionRequest prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Request message for the `CreateRegion` method.
* </pre>
*
* Protobuf type {@code google.shopping.merchant.accounts.v1.CreateRegionRequest}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.shopping.merchant.accounts.v1.CreateRegionRequest)
com.google.shopping.merchant.accounts.v1.CreateRegionRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.shopping.merchant.accounts.v1.RegionsProto
.internal_static_google_shopping_merchant_accounts_v1_CreateRegionRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.shopping.merchant.accounts.v1.RegionsProto
.internal_static_google_shopping_merchant_accounts_v1_CreateRegionRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.shopping.merchant.accounts.v1.CreateRegionRequest.class,
com.google.shopping.merchant.accounts.v1.CreateRegionRequest.Builder.class);
}
// Construct using com.google.shopping.merchant.accounts.v1.CreateRegionRequest.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {
getRegionFieldBuilder();
}
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
parent_ = "";
regionId_ = "";
region_ = null;
if (regionBuilder_ != null) {
regionBuilder_.dispose();
regionBuilder_ = null;
}
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.shopping.merchant.accounts.v1.RegionsProto
.internal_static_google_shopping_merchant_accounts_v1_CreateRegionRequest_descriptor;
}
@java.lang.Override
public com.google.shopping.merchant.accounts.v1.CreateRegionRequest
getDefaultInstanceForType() {
return com.google.shopping.merchant.accounts.v1.CreateRegionRequest.getDefaultInstance();
}
@java.lang.Override
public com.google.shopping.merchant.accounts.v1.CreateRegionRequest build() {
com.google.shopping.merchant.accounts.v1.CreateRegionRequest result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.shopping.merchant.accounts.v1.CreateRegionRequest buildPartial() {
com.google.shopping.merchant.accounts.v1.CreateRegionRequest result =
new com.google.shopping.merchant.accounts.v1.CreateRegionRequest(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(
com.google.shopping.merchant.accounts.v1.CreateRegionRequest result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.parent_ = parent_;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.regionId_ = regionId_;
}
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000004) != 0)) {
result.region_ = regionBuilder_ == null ? region_ : regionBuilder_.build();
to_bitField0_ |= 0x00000001;
}
result.bitField0_ |= to_bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.shopping.merchant.accounts.v1.CreateRegionRequest) {
return mergeFrom((com.google.shopping.merchant.accounts.v1.CreateRegionRequest) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.shopping.merchant.accounts.v1.CreateRegionRequest other) {
if (other
== com.google.shopping.merchant.accounts.v1.CreateRegionRequest.getDefaultInstance())
return this;
if (!other.getParent().isEmpty()) {
parent_ = other.parent_;
bitField0_ |= 0x00000001;
onChanged();
}
if (!other.getRegionId().isEmpty()) {
regionId_ = other.regionId_;
bitField0_ |= 0x00000002;
onChanged();
}
if (other.hasRegion()) {
mergeRegion(other.getRegion());
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
parent_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000001;
break;
} // case 10
case 18:
{
regionId_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000002;
break;
} // case 18
case 26:
{
input.readMessage(getRegionFieldBuilder().getBuilder(), extensionRegistry);
bitField0_ |= 0x00000004;
break;
} // case 26
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.lang.Object parent_ = "";
/**
*
*
* <pre>
* Required. The account to create a region for.
* Format: `accounts/{account}`
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The parent.
*/
public java.lang.String getParent() {
java.lang.Object ref = parent_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
parent_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Required. The account to create a region for.
* Format: `accounts/{account}`
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for parent.
*/
public com.google.protobuf.ByteString getParentBytes() {
java.lang.Object ref = parent_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
parent_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Required. The account to create a region for.
* Format: `accounts/{account}`
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The parent to set.
* @return This builder for chaining.
*/
public Builder setParent(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
parent_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The account to create a region for.
* Format: `accounts/{account}`
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return This builder for chaining.
*/
public Builder clearParent() {
parent_ = getDefaultInstance().getParent();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The account to create a region for.
* Format: `accounts/{account}`
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The bytes for parent to set.
* @return This builder for chaining.
*/
public Builder setParentBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
parent_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
private java.lang.Object regionId_ = "";
/**
*
*
* <pre>
* Required. The identifier for the region, unique over all regions of the
* same account.
* </pre>
*
* <code>string region_id = 2 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The regionId.
*/
public java.lang.String getRegionId() {
java.lang.Object ref = regionId_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
regionId_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Required. The identifier for the region, unique over all regions of the
* same account.
* </pre>
*
* <code>string region_id = 2 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The bytes for regionId.
*/
public com.google.protobuf.ByteString getRegionIdBytes() {
java.lang.Object ref = regionId_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
regionId_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Required. The identifier for the region, unique over all regions of the
* same account.
* </pre>
*
* <code>string region_id = 2 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @param value The regionId to set.
* @return This builder for chaining.
*/
public Builder setRegionId(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
regionId_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The identifier for the region, unique over all regions of the
* same account.
* </pre>
*
* <code>string region_id = 2 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return This builder for chaining.
*/
public Builder clearRegionId() {
regionId_ = getDefaultInstance().getRegionId();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The identifier for the region, unique over all regions of the
* same account.
* </pre>
*
* <code>string region_id = 2 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @param value The bytes for regionId to set.
* @return This builder for chaining.
*/
public Builder setRegionIdBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
regionId_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
private com.google.shopping.merchant.accounts.v1.Region region_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.shopping.merchant.accounts.v1.Region,
com.google.shopping.merchant.accounts.v1.Region.Builder,
com.google.shopping.merchant.accounts.v1.RegionOrBuilder>
regionBuilder_;
/**
*
*
* <pre>
* Required. The region to create.
* </pre>
*
* <code>
* .google.shopping.merchant.accounts.v1.Region region = 3 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the region field is set.
*/
public boolean hasRegion() {
return ((bitField0_ & 0x00000004) != 0);
}
/**
*
*
* <pre>
* Required. The region to create.
* </pre>
*
* <code>
* .google.shopping.merchant.accounts.v1.Region region = 3 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The region.
*/
public com.google.shopping.merchant.accounts.v1.Region getRegion() {
if (regionBuilder_ == null) {
return region_ == null
? com.google.shopping.merchant.accounts.v1.Region.getDefaultInstance()
: region_;
} else {
return regionBuilder_.getMessage();
}
}
/**
*
*
* <pre>
* Required. The region to create.
* </pre>
*
* <code>
* .google.shopping.merchant.accounts.v1.Region region = 3 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setRegion(com.google.shopping.merchant.accounts.v1.Region value) {
if (regionBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
region_ = value;
} else {
regionBuilder_.setMessage(value);
}
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The region to create.
* </pre>
*
* <code>
* .google.shopping.merchant.accounts.v1.Region region = 3 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setRegion(
com.google.shopping.merchant.accounts.v1.Region.Builder builderForValue) {
if (regionBuilder_ == null) {
region_ = builderForValue.build();
} else {
regionBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The region to create.
* </pre>
*
* <code>
* .google.shopping.merchant.accounts.v1.Region region = 3 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder mergeRegion(com.google.shopping.merchant.accounts.v1.Region value) {
if (regionBuilder_ == null) {
if (((bitField0_ & 0x00000004) != 0)
&& region_ != null
&& region_ != com.google.shopping.merchant.accounts.v1.Region.getDefaultInstance()) {
getRegionBuilder().mergeFrom(value);
} else {
region_ = value;
}
} else {
regionBuilder_.mergeFrom(value);
}
if (region_ != null) {
bitField0_ |= 0x00000004;
onChanged();
}
return this;
}
/**
*
*
* <pre>
* Required. The region to create.
* </pre>
*
* <code>
* .google.shopping.merchant.accounts.v1.Region region = 3 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder clearRegion() {
bitField0_ = (bitField0_ & ~0x00000004);
region_ = null;
if (regionBuilder_ != null) {
regionBuilder_.dispose();
regionBuilder_ = null;
}
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The region to create.
* </pre>
*
* <code>
* .google.shopping.merchant.accounts.v1.Region region = 3 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.shopping.merchant.accounts.v1.Region.Builder getRegionBuilder() {
bitField0_ |= 0x00000004;
onChanged();
return getRegionFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* Required. The region to create.
* </pre>
*
* <code>
* .google.shopping.merchant.accounts.v1.Region region = 3 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.shopping.merchant.accounts.v1.RegionOrBuilder getRegionOrBuilder() {
if (regionBuilder_ != null) {
return regionBuilder_.getMessageOrBuilder();
} else {
return region_ == null
? com.google.shopping.merchant.accounts.v1.Region.getDefaultInstance()
: region_;
}
}
/**
*
*
* <pre>
* Required. The region to create.
* </pre>
*
* <code>
* .google.shopping.merchant.accounts.v1.Region region = 3 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.shopping.merchant.accounts.v1.Region,
com.google.shopping.merchant.accounts.v1.Region.Builder,
com.google.shopping.merchant.accounts.v1.RegionOrBuilder>
getRegionFieldBuilder() {
if (regionBuilder_ == null) {
regionBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.shopping.merchant.accounts.v1.Region,
com.google.shopping.merchant.accounts.v1.Region.Builder,
com.google.shopping.merchant.accounts.v1.RegionOrBuilder>(
getRegion(), getParentForChildren(), isClean());
region_ = null;
}
return regionBuilder_;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.shopping.merchant.accounts.v1.CreateRegionRequest)
}
// @@protoc_insertion_point(class_scope:google.shopping.merchant.accounts.v1.CreateRegionRequest)
private static final com.google.shopping.merchant.accounts.v1.CreateRegionRequest
DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.shopping.merchant.accounts.v1.CreateRegionRequest();
}
public static com.google.shopping.merchant.accounts.v1.CreateRegionRequest getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<CreateRegionRequest> PARSER =
new com.google.protobuf.AbstractParser<CreateRegionRequest>() {
@java.lang.Override
public CreateRegionRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<CreateRegionRequest> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<CreateRegionRequest> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.shopping.merchant.accounts.v1.CreateRegionRequest getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleapis/google-cloud-java | 36,177 | java-appengine-admin/proto-google-cloud-appengine-admin-v1/src/main/java/com/google/appengine/v1/UpdateIngressRuleRequest.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/appengine/v1/appengine.proto
// Protobuf Java Version: 3.25.8
package com.google.appengine.v1;
/**
*
*
* <pre>
* Request message for `Firewall.UpdateIngressRule`.
* </pre>
*
* Protobuf type {@code google.appengine.v1.UpdateIngressRuleRequest}
*/
public final class UpdateIngressRuleRequest extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.appengine.v1.UpdateIngressRuleRequest)
UpdateIngressRuleRequestOrBuilder {
private static final long serialVersionUID = 0L;
// Use UpdateIngressRuleRequest.newBuilder() to construct.
private UpdateIngressRuleRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private UpdateIngressRuleRequest() {
name_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new UpdateIngressRuleRequest();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.appengine.v1.AppengineProto
.internal_static_google_appengine_v1_UpdateIngressRuleRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.appengine.v1.AppengineProto
.internal_static_google_appengine_v1_UpdateIngressRuleRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.appengine.v1.UpdateIngressRuleRequest.class,
com.google.appengine.v1.UpdateIngressRuleRequest.Builder.class);
}
private int bitField0_;
public static final int NAME_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private volatile java.lang.Object name_ = "";
/**
*
*
* <pre>
* Name of the Firewall resource to update.
* Example: `apps/myapp/firewall/ingressRules/100`.
* </pre>
*
* <code>string name = 1;</code>
*
* @return The name.
*/
@java.lang.Override
public java.lang.String getName() {
java.lang.Object ref = name_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
name_ = s;
return s;
}
}
/**
*
*
* <pre>
* Name of the Firewall resource to update.
* Example: `apps/myapp/firewall/ingressRules/100`.
* </pre>
*
* <code>string name = 1;</code>
*
* @return The bytes for name.
*/
@java.lang.Override
public com.google.protobuf.ByteString getNameBytes() {
java.lang.Object ref = name_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
name_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int RULE_FIELD_NUMBER = 2;
private com.google.appengine.v1.firewall.FirewallRule rule_;
/**
*
*
* <pre>
* A FirewallRule containing the updated resource
* </pre>
*
* <code>.google.appengine.v1.FirewallRule rule = 2;</code>
*
* @return Whether the rule field is set.
*/
@java.lang.Override
public boolean hasRule() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* A FirewallRule containing the updated resource
* </pre>
*
* <code>.google.appengine.v1.FirewallRule rule = 2;</code>
*
* @return The rule.
*/
@java.lang.Override
public com.google.appengine.v1.firewall.FirewallRule getRule() {
return rule_ == null
? com.google.appengine.v1.firewall.FirewallRule.getDefaultInstance()
: rule_;
}
/**
*
*
* <pre>
* A FirewallRule containing the updated resource
* </pre>
*
* <code>.google.appengine.v1.FirewallRule rule = 2;</code>
*/
@java.lang.Override
public com.google.appengine.v1.firewall.FirewallRuleOrBuilder getRuleOrBuilder() {
return rule_ == null
? com.google.appengine.v1.firewall.FirewallRule.getDefaultInstance()
: rule_;
}
public static final int UPDATE_MASK_FIELD_NUMBER = 3;
private com.google.protobuf.FieldMask updateMask_;
/**
*
*
* <pre>
* Standard field mask for the set of fields to be updated.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 3;</code>
*
* @return Whether the updateMask field is set.
*/
@java.lang.Override
public boolean hasUpdateMask() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
*
*
* <pre>
* Standard field mask for the set of fields to be updated.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 3;</code>
*
* @return The updateMask.
*/
@java.lang.Override
public com.google.protobuf.FieldMask getUpdateMask() {
return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_;
}
/**
*
*
* <pre>
* Standard field mask for the set of fields to be updated.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 3;</code>
*/
@java.lang.Override
public com.google.protobuf.FieldMaskOrBuilder getUpdateMaskOrBuilder() {
return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_;
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(name_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 1, name_);
}
if (((bitField0_ & 0x00000001) != 0)) {
output.writeMessage(2, getRule());
}
if (((bitField0_ & 0x00000002) != 0)) {
output.writeMessage(3, getUpdateMask());
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(name_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, name_);
}
if (((bitField0_ & 0x00000001) != 0)) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, getRule());
}
if (((bitField0_ & 0x00000002) != 0)) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(3, getUpdateMask());
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.appengine.v1.UpdateIngressRuleRequest)) {
return super.equals(obj);
}
com.google.appengine.v1.UpdateIngressRuleRequest other =
(com.google.appengine.v1.UpdateIngressRuleRequest) obj;
if (!getName().equals(other.getName())) return false;
if (hasRule() != other.hasRule()) return false;
if (hasRule()) {
if (!getRule().equals(other.getRule())) return false;
}
if (hasUpdateMask() != other.hasUpdateMask()) return false;
if (hasUpdateMask()) {
if (!getUpdateMask().equals(other.getUpdateMask())) return false;
}
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + NAME_FIELD_NUMBER;
hash = (53 * hash) + getName().hashCode();
if (hasRule()) {
hash = (37 * hash) + RULE_FIELD_NUMBER;
hash = (53 * hash) + getRule().hashCode();
}
if (hasUpdateMask()) {
hash = (37 * hash) + UPDATE_MASK_FIELD_NUMBER;
hash = (53 * hash) + getUpdateMask().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.appengine.v1.UpdateIngressRuleRequest parseFrom(java.nio.ByteBuffer data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.appengine.v1.UpdateIngressRuleRequest parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.appengine.v1.UpdateIngressRuleRequest parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.appengine.v1.UpdateIngressRuleRequest parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.appengine.v1.UpdateIngressRuleRequest parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.appengine.v1.UpdateIngressRuleRequest parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.appengine.v1.UpdateIngressRuleRequest parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.appengine.v1.UpdateIngressRuleRequest parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.appengine.v1.UpdateIngressRuleRequest parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.appengine.v1.UpdateIngressRuleRequest parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.appengine.v1.UpdateIngressRuleRequest parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.appengine.v1.UpdateIngressRuleRequest parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.appengine.v1.UpdateIngressRuleRequest prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Request message for `Firewall.UpdateIngressRule`.
* </pre>
*
* Protobuf type {@code google.appengine.v1.UpdateIngressRuleRequest}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.appengine.v1.UpdateIngressRuleRequest)
com.google.appengine.v1.UpdateIngressRuleRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.appengine.v1.AppengineProto
.internal_static_google_appengine_v1_UpdateIngressRuleRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.appengine.v1.AppengineProto
.internal_static_google_appengine_v1_UpdateIngressRuleRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.appengine.v1.UpdateIngressRuleRequest.class,
com.google.appengine.v1.UpdateIngressRuleRequest.Builder.class);
}
// Construct using com.google.appengine.v1.UpdateIngressRuleRequest.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {
getRuleFieldBuilder();
getUpdateMaskFieldBuilder();
}
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
name_ = "";
rule_ = null;
if (ruleBuilder_ != null) {
ruleBuilder_.dispose();
ruleBuilder_ = null;
}
updateMask_ = null;
if (updateMaskBuilder_ != null) {
updateMaskBuilder_.dispose();
updateMaskBuilder_ = null;
}
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.appengine.v1.AppengineProto
.internal_static_google_appengine_v1_UpdateIngressRuleRequest_descriptor;
}
@java.lang.Override
public com.google.appengine.v1.UpdateIngressRuleRequest getDefaultInstanceForType() {
return com.google.appengine.v1.UpdateIngressRuleRequest.getDefaultInstance();
}
@java.lang.Override
public com.google.appengine.v1.UpdateIngressRuleRequest build() {
com.google.appengine.v1.UpdateIngressRuleRequest result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.appengine.v1.UpdateIngressRuleRequest buildPartial() {
com.google.appengine.v1.UpdateIngressRuleRequest result =
new com.google.appengine.v1.UpdateIngressRuleRequest(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(com.google.appengine.v1.UpdateIngressRuleRequest result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.name_ = name_;
}
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000002) != 0)) {
result.rule_ = ruleBuilder_ == null ? rule_ : ruleBuilder_.build();
to_bitField0_ |= 0x00000001;
}
if (((from_bitField0_ & 0x00000004) != 0)) {
result.updateMask_ = updateMaskBuilder_ == null ? updateMask_ : updateMaskBuilder_.build();
to_bitField0_ |= 0x00000002;
}
result.bitField0_ |= to_bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.appengine.v1.UpdateIngressRuleRequest) {
return mergeFrom((com.google.appengine.v1.UpdateIngressRuleRequest) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.appengine.v1.UpdateIngressRuleRequest other) {
if (other == com.google.appengine.v1.UpdateIngressRuleRequest.getDefaultInstance())
return this;
if (!other.getName().isEmpty()) {
name_ = other.name_;
bitField0_ |= 0x00000001;
onChanged();
}
if (other.hasRule()) {
mergeRule(other.getRule());
}
if (other.hasUpdateMask()) {
mergeUpdateMask(other.getUpdateMask());
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
name_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000001;
break;
} // case 10
case 18:
{
input.readMessage(getRuleFieldBuilder().getBuilder(), extensionRegistry);
bitField0_ |= 0x00000002;
break;
} // case 18
case 26:
{
input.readMessage(getUpdateMaskFieldBuilder().getBuilder(), extensionRegistry);
bitField0_ |= 0x00000004;
break;
} // case 26
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.lang.Object name_ = "";
/**
*
*
* <pre>
* Name of the Firewall resource to update.
* Example: `apps/myapp/firewall/ingressRules/100`.
* </pre>
*
* <code>string name = 1;</code>
*
* @return The name.
*/
public java.lang.String getName() {
java.lang.Object ref = name_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
name_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Name of the Firewall resource to update.
* Example: `apps/myapp/firewall/ingressRules/100`.
* </pre>
*
* <code>string name = 1;</code>
*
* @return The bytes for name.
*/
public com.google.protobuf.ByteString getNameBytes() {
java.lang.Object ref = name_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
name_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Name of the Firewall resource to update.
* Example: `apps/myapp/firewall/ingressRules/100`.
* </pre>
*
* <code>string name = 1;</code>
*
* @param value The name to set.
* @return This builder for chaining.
*/
public Builder setName(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
name_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Name of the Firewall resource to update.
* Example: `apps/myapp/firewall/ingressRules/100`.
* </pre>
*
* <code>string name = 1;</code>
*
* @return This builder for chaining.
*/
public Builder clearName() {
name_ = getDefaultInstance().getName();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
return this;
}
/**
*
*
* <pre>
* Name of the Firewall resource to update.
* Example: `apps/myapp/firewall/ingressRules/100`.
* </pre>
*
* <code>string name = 1;</code>
*
* @param value The bytes for name to set.
* @return This builder for chaining.
*/
public Builder setNameBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
name_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
private com.google.appengine.v1.firewall.FirewallRule rule_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.appengine.v1.firewall.FirewallRule,
com.google.appengine.v1.firewall.FirewallRule.Builder,
com.google.appengine.v1.firewall.FirewallRuleOrBuilder>
ruleBuilder_;
/**
*
*
* <pre>
* A FirewallRule containing the updated resource
* </pre>
*
* <code>.google.appengine.v1.FirewallRule rule = 2;</code>
*
* @return Whether the rule field is set.
*/
public boolean hasRule() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
*
*
* <pre>
* A FirewallRule containing the updated resource
* </pre>
*
* <code>.google.appengine.v1.FirewallRule rule = 2;</code>
*
* @return The rule.
*/
public com.google.appengine.v1.firewall.FirewallRule getRule() {
if (ruleBuilder_ == null) {
return rule_ == null
? com.google.appengine.v1.firewall.FirewallRule.getDefaultInstance()
: rule_;
} else {
return ruleBuilder_.getMessage();
}
}
/**
*
*
* <pre>
* A FirewallRule containing the updated resource
* </pre>
*
* <code>.google.appengine.v1.FirewallRule rule = 2;</code>
*/
public Builder setRule(com.google.appengine.v1.firewall.FirewallRule value) {
if (ruleBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
rule_ = value;
} else {
ruleBuilder_.setMessage(value);
}
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* A FirewallRule containing the updated resource
* </pre>
*
* <code>.google.appengine.v1.FirewallRule rule = 2;</code>
*/
public Builder setRule(com.google.appengine.v1.firewall.FirewallRule.Builder builderForValue) {
if (ruleBuilder_ == null) {
rule_ = builderForValue.build();
} else {
ruleBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* A FirewallRule containing the updated resource
* </pre>
*
* <code>.google.appengine.v1.FirewallRule rule = 2;</code>
*/
public Builder mergeRule(com.google.appengine.v1.firewall.FirewallRule value) {
if (ruleBuilder_ == null) {
if (((bitField0_ & 0x00000002) != 0)
&& rule_ != null
&& rule_ != com.google.appengine.v1.firewall.FirewallRule.getDefaultInstance()) {
getRuleBuilder().mergeFrom(value);
} else {
rule_ = value;
}
} else {
ruleBuilder_.mergeFrom(value);
}
if (rule_ != null) {
bitField0_ |= 0x00000002;
onChanged();
}
return this;
}
/**
*
*
* <pre>
* A FirewallRule containing the updated resource
* </pre>
*
* <code>.google.appengine.v1.FirewallRule rule = 2;</code>
*/
public Builder clearRule() {
bitField0_ = (bitField0_ & ~0x00000002);
rule_ = null;
if (ruleBuilder_ != null) {
ruleBuilder_.dispose();
ruleBuilder_ = null;
}
onChanged();
return this;
}
/**
*
*
* <pre>
* A FirewallRule containing the updated resource
* </pre>
*
* <code>.google.appengine.v1.FirewallRule rule = 2;</code>
*/
public com.google.appengine.v1.firewall.FirewallRule.Builder getRuleBuilder() {
bitField0_ |= 0x00000002;
onChanged();
return getRuleFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* A FirewallRule containing the updated resource
* </pre>
*
* <code>.google.appengine.v1.FirewallRule rule = 2;</code>
*/
public com.google.appengine.v1.firewall.FirewallRuleOrBuilder getRuleOrBuilder() {
if (ruleBuilder_ != null) {
return ruleBuilder_.getMessageOrBuilder();
} else {
return rule_ == null
? com.google.appengine.v1.firewall.FirewallRule.getDefaultInstance()
: rule_;
}
}
/**
*
*
* <pre>
* A FirewallRule containing the updated resource
* </pre>
*
* <code>.google.appengine.v1.FirewallRule rule = 2;</code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.appengine.v1.firewall.FirewallRule,
com.google.appengine.v1.firewall.FirewallRule.Builder,
com.google.appengine.v1.firewall.FirewallRuleOrBuilder>
getRuleFieldBuilder() {
if (ruleBuilder_ == null) {
ruleBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.appengine.v1.firewall.FirewallRule,
com.google.appengine.v1.firewall.FirewallRule.Builder,
com.google.appengine.v1.firewall.FirewallRuleOrBuilder>(
getRule(), getParentForChildren(), isClean());
rule_ = null;
}
return ruleBuilder_;
}
private com.google.protobuf.FieldMask updateMask_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.FieldMask,
com.google.protobuf.FieldMask.Builder,
com.google.protobuf.FieldMaskOrBuilder>
updateMaskBuilder_;
/**
*
*
* <pre>
* Standard field mask for the set of fields to be updated.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 3;</code>
*
* @return Whether the updateMask field is set.
*/
public boolean hasUpdateMask() {
return ((bitField0_ & 0x00000004) != 0);
}
/**
*
*
* <pre>
* Standard field mask for the set of fields to be updated.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 3;</code>
*
* @return The updateMask.
*/
public com.google.protobuf.FieldMask getUpdateMask() {
if (updateMaskBuilder_ == null) {
return updateMask_ == null
? com.google.protobuf.FieldMask.getDefaultInstance()
: updateMask_;
} else {
return updateMaskBuilder_.getMessage();
}
}
/**
*
*
* <pre>
* Standard field mask for the set of fields to be updated.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 3;</code>
*/
public Builder setUpdateMask(com.google.protobuf.FieldMask value) {
if (updateMaskBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
updateMask_ = value;
} else {
updateMaskBuilder_.setMessage(value);
}
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
*
*
* <pre>
* Standard field mask for the set of fields to be updated.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 3;</code>
*/
public Builder setUpdateMask(com.google.protobuf.FieldMask.Builder builderForValue) {
if (updateMaskBuilder_ == null) {
updateMask_ = builderForValue.build();
} else {
updateMaskBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
*
*
* <pre>
* Standard field mask for the set of fields to be updated.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 3;</code>
*/
public Builder mergeUpdateMask(com.google.protobuf.FieldMask value) {
if (updateMaskBuilder_ == null) {
if (((bitField0_ & 0x00000004) != 0)
&& updateMask_ != null
&& updateMask_ != com.google.protobuf.FieldMask.getDefaultInstance()) {
getUpdateMaskBuilder().mergeFrom(value);
} else {
updateMask_ = value;
}
} else {
updateMaskBuilder_.mergeFrom(value);
}
if (updateMask_ != null) {
bitField0_ |= 0x00000004;
onChanged();
}
return this;
}
/**
*
*
* <pre>
* Standard field mask for the set of fields to be updated.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 3;</code>
*/
public Builder clearUpdateMask() {
bitField0_ = (bitField0_ & ~0x00000004);
updateMask_ = null;
if (updateMaskBuilder_ != null) {
updateMaskBuilder_.dispose();
updateMaskBuilder_ = null;
}
onChanged();
return this;
}
/**
*
*
* <pre>
* Standard field mask for the set of fields to be updated.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 3;</code>
*/
public com.google.protobuf.FieldMask.Builder getUpdateMaskBuilder() {
bitField0_ |= 0x00000004;
onChanged();
return getUpdateMaskFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* Standard field mask for the set of fields to be updated.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 3;</code>
*/
public com.google.protobuf.FieldMaskOrBuilder getUpdateMaskOrBuilder() {
if (updateMaskBuilder_ != null) {
return updateMaskBuilder_.getMessageOrBuilder();
} else {
return updateMask_ == null
? com.google.protobuf.FieldMask.getDefaultInstance()
: updateMask_;
}
}
/**
*
*
* <pre>
* Standard field mask for the set of fields to be updated.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 3;</code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.FieldMask,
com.google.protobuf.FieldMask.Builder,
com.google.protobuf.FieldMaskOrBuilder>
getUpdateMaskFieldBuilder() {
if (updateMaskBuilder_ == null) {
updateMaskBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.FieldMask,
com.google.protobuf.FieldMask.Builder,
com.google.protobuf.FieldMaskOrBuilder>(
getUpdateMask(), getParentForChildren(), isClean());
updateMask_ = null;
}
return updateMaskBuilder_;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.appengine.v1.UpdateIngressRuleRequest)
}
// @@protoc_insertion_point(class_scope:google.appengine.v1.UpdateIngressRuleRequest)
private static final com.google.appengine.v1.UpdateIngressRuleRequest DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.appengine.v1.UpdateIngressRuleRequest();
}
public static com.google.appengine.v1.UpdateIngressRuleRequest getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<UpdateIngressRuleRequest> PARSER =
new com.google.protobuf.AbstractParser<UpdateIngressRuleRequest>() {
@java.lang.Override
public UpdateIngressRuleRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<UpdateIngressRuleRequest> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<UpdateIngressRuleRequest> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.appengine.v1.UpdateIngressRuleRequest getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleapis/google-cloud-java | 36,248 | java-container/proto-google-cloud-container-v1beta1/src/main/java/com/google/container/v1beta1/AdditionalPodNetworkConfig.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/container/v1beta1/cluster_service.proto
// Protobuf Java Version: 3.25.8
package com.google.container.v1beta1;
/**
*
*
* <pre>
* AdditionalPodNetworkConfig is the configuration for additional pod networks
* within the NodeNetworkConfig message
* </pre>
*
* Protobuf type {@code google.container.v1beta1.AdditionalPodNetworkConfig}
*/
public final class AdditionalPodNetworkConfig extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.container.v1beta1.AdditionalPodNetworkConfig)
AdditionalPodNetworkConfigOrBuilder {
private static final long serialVersionUID = 0L;
// Use AdditionalPodNetworkConfig.newBuilder() to construct.
private AdditionalPodNetworkConfig(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private AdditionalPodNetworkConfig() {
subnetwork_ = "";
secondaryPodRange_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new AdditionalPodNetworkConfig();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.container.v1beta1.ClusterServiceProto
.internal_static_google_container_v1beta1_AdditionalPodNetworkConfig_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.container.v1beta1.ClusterServiceProto
.internal_static_google_container_v1beta1_AdditionalPodNetworkConfig_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.container.v1beta1.AdditionalPodNetworkConfig.class,
com.google.container.v1beta1.AdditionalPodNetworkConfig.Builder.class);
}
private int bitField0_;
public static final int SUBNETWORK_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private volatile java.lang.Object subnetwork_ = "";
/**
*
*
* <pre>
* Name of the subnetwork where the additional pod network belongs.
* </pre>
*
* <code>string subnetwork = 1;</code>
*
* @return The subnetwork.
*/
@java.lang.Override
public java.lang.String getSubnetwork() {
java.lang.Object ref = subnetwork_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
subnetwork_ = s;
return s;
}
}
/**
*
*
* <pre>
* Name of the subnetwork where the additional pod network belongs.
* </pre>
*
* <code>string subnetwork = 1;</code>
*
* @return The bytes for subnetwork.
*/
@java.lang.Override
public com.google.protobuf.ByteString getSubnetworkBytes() {
java.lang.Object ref = subnetwork_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
subnetwork_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int SECONDARY_POD_RANGE_FIELD_NUMBER = 2;
@SuppressWarnings("serial")
private volatile java.lang.Object secondaryPodRange_ = "";
/**
*
*
* <pre>
* The name of the secondary range on the subnet which provides IP address for
* this pod range.
* </pre>
*
* <code>string secondary_pod_range = 2;</code>
*
* @return The secondaryPodRange.
*/
@java.lang.Override
public java.lang.String getSecondaryPodRange() {
java.lang.Object ref = secondaryPodRange_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
secondaryPodRange_ = s;
return s;
}
}
/**
*
*
* <pre>
* The name of the secondary range on the subnet which provides IP address for
* this pod range.
* </pre>
*
* <code>string secondary_pod_range = 2;</code>
*
* @return The bytes for secondaryPodRange.
*/
@java.lang.Override
public com.google.protobuf.ByteString getSecondaryPodRangeBytes() {
java.lang.Object ref = secondaryPodRange_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
secondaryPodRange_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int MAX_PODS_PER_NODE_FIELD_NUMBER = 3;
private com.google.container.v1beta1.MaxPodsConstraint maxPodsPerNode_;
/**
*
*
* <pre>
* The maximum number of pods per node which use this pod network.
* </pre>
*
* <code>optional .google.container.v1beta1.MaxPodsConstraint max_pods_per_node = 3;</code>
*
* @return Whether the maxPodsPerNode field is set.
*/
@java.lang.Override
public boolean hasMaxPodsPerNode() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* The maximum number of pods per node which use this pod network.
* </pre>
*
* <code>optional .google.container.v1beta1.MaxPodsConstraint max_pods_per_node = 3;</code>
*
* @return The maxPodsPerNode.
*/
@java.lang.Override
public com.google.container.v1beta1.MaxPodsConstraint getMaxPodsPerNode() {
return maxPodsPerNode_ == null
? com.google.container.v1beta1.MaxPodsConstraint.getDefaultInstance()
: maxPodsPerNode_;
}
/**
*
*
* <pre>
* The maximum number of pods per node which use this pod network.
* </pre>
*
* <code>optional .google.container.v1beta1.MaxPodsConstraint max_pods_per_node = 3;</code>
*/
@java.lang.Override
public com.google.container.v1beta1.MaxPodsConstraintOrBuilder getMaxPodsPerNodeOrBuilder() {
return maxPodsPerNode_ == null
? com.google.container.v1beta1.MaxPodsConstraint.getDefaultInstance()
: maxPodsPerNode_;
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(subnetwork_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 1, subnetwork_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(secondaryPodRange_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 2, secondaryPodRange_);
}
if (((bitField0_ & 0x00000001) != 0)) {
output.writeMessage(3, getMaxPodsPerNode());
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(subnetwork_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, subnetwork_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(secondaryPodRange_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, secondaryPodRange_);
}
if (((bitField0_ & 0x00000001) != 0)) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(3, getMaxPodsPerNode());
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.container.v1beta1.AdditionalPodNetworkConfig)) {
return super.equals(obj);
}
com.google.container.v1beta1.AdditionalPodNetworkConfig other =
(com.google.container.v1beta1.AdditionalPodNetworkConfig) obj;
if (!getSubnetwork().equals(other.getSubnetwork())) return false;
if (!getSecondaryPodRange().equals(other.getSecondaryPodRange())) return false;
if (hasMaxPodsPerNode() != other.hasMaxPodsPerNode()) return false;
if (hasMaxPodsPerNode()) {
if (!getMaxPodsPerNode().equals(other.getMaxPodsPerNode())) return false;
}
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + SUBNETWORK_FIELD_NUMBER;
hash = (53 * hash) + getSubnetwork().hashCode();
hash = (37 * hash) + SECONDARY_POD_RANGE_FIELD_NUMBER;
hash = (53 * hash) + getSecondaryPodRange().hashCode();
if (hasMaxPodsPerNode()) {
hash = (37 * hash) + MAX_PODS_PER_NODE_FIELD_NUMBER;
hash = (53 * hash) + getMaxPodsPerNode().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.container.v1beta1.AdditionalPodNetworkConfig parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.container.v1beta1.AdditionalPodNetworkConfig parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.container.v1beta1.AdditionalPodNetworkConfig parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.container.v1beta1.AdditionalPodNetworkConfig parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.container.v1beta1.AdditionalPodNetworkConfig parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.container.v1beta1.AdditionalPodNetworkConfig parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.container.v1beta1.AdditionalPodNetworkConfig parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.container.v1beta1.AdditionalPodNetworkConfig parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.container.v1beta1.AdditionalPodNetworkConfig parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.container.v1beta1.AdditionalPodNetworkConfig parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.container.v1beta1.AdditionalPodNetworkConfig parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.container.v1beta1.AdditionalPodNetworkConfig parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.container.v1beta1.AdditionalPodNetworkConfig prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* AdditionalPodNetworkConfig is the configuration for additional pod networks
* within the NodeNetworkConfig message
* </pre>
*
* Protobuf type {@code google.container.v1beta1.AdditionalPodNetworkConfig}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.container.v1beta1.AdditionalPodNetworkConfig)
com.google.container.v1beta1.AdditionalPodNetworkConfigOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.container.v1beta1.ClusterServiceProto
.internal_static_google_container_v1beta1_AdditionalPodNetworkConfig_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.container.v1beta1.ClusterServiceProto
.internal_static_google_container_v1beta1_AdditionalPodNetworkConfig_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.container.v1beta1.AdditionalPodNetworkConfig.class,
com.google.container.v1beta1.AdditionalPodNetworkConfig.Builder.class);
}
// Construct using com.google.container.v1beta1.AdditionalPodNetworkConfig.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {
getMaxPodsPerNodeFieldBuilder();
}
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
subnetwork_ = "";
secondaryPodRange_ = "";
maxPodsPerNode_ = null;
if (maxPodsPerNodeBuilder_ != null) {
maxPodsPerNodeBuilder_.dispose();
maxPodsPerNodeBuilder_ = null;
}
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.container.v1beta1.ClusterServiceProto
.internal_static_google_container_v1beta1_AdditionalPodNetworkConfig_descriptor;
}
@java.lang.Override
public com.google.container.v1beta1.AdditionalPodNetworkConfig getDefaultInstanceForType() {
return com.google.container.v1beta1.AdditionalPodNetworkConfig.getDefaultInstance();
}
@java.lang.Override
public com.google.container.v1beta1.AdditionalPodNetworkConfig build() {
com.google.container.v1beta1.AdditionalPodNetworkConfig result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.container.v1beta1.AdditionalPodNetworkConfig buildPartial() {
com.google.container.v1beta1.AdditionalPodNetworkConfig result =
new com.google.container.v1beta1.AdditionalPodNetworkConfig(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(com.google.container.v1beta1.AdditionalPodNetworkConfig result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.subnetwork_ = subnetwork_;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.secondaryPodRange_ = secondaryPodRange_;
}
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000004) != 0)) {
result.maxPodsPerNode_ =
maxPodsPerNodeBuilder_ == null ? maxPodsPerNode_ : maxPodsPerNodeBuilder_.build();
to_bitField0_ |= 0x00000001;
}
result.bitField0_ |= to_bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.container.v1beta1.AdditionalPodNetworkConfig) {
return mergeFrom((com.google.container.v1beta1.AdditionalPodNetworkConfig) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.container.v1beta1.AdditionalPodNetworkConfig other) {
if (other == com.google.container.v1beta1.AdditionalPodNetworkConfig.getDefaultInstance())
return this;
if (!other.getSubnetwork().isEmpty()) {
subnetwork_ = other.subnetwork_;
bitField0_ |= 0x00000001;
onChanged();
}
if (!other.getSecondaryPodRange().isEmpty()) {
secondaryPodRange_ = other.secondaryPodRange_;
bitField0_ |= 0x00000002;
onChanged();
}
if (other.hasMaxPodsPerNode()) {
mergeMaxPodsPerNode(other.getMaxPodsPerNode());
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
subnetwork_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000001;
break;
} // case 10
case 18:
{
secondaryPodRange_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000002;
break;
} // case 18
case 26:
{
input.readMessage(getMaxPodsPerNodeFieldBuilder().getBuilder(), extensionRegistry);
bitField0_ |= 0x00000004;
break;
} // case 26
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.lang.Object subnetwork_ = "";
/**
*
*
* <pre>
* Name of the subnetwork where the additional pod network belongs.
* </pre>
*
* <code>string subnetwork = 1;</code>
*
* @return The subnetwork.
*/
public java.lang.String getSubnetwork() {
java.lang.Object ref = subnetwork_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
subnetwork_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Name of the subnetwork where the additional pod network belongs.
* </pre>
*
* <code>string subnetwork = 1;</code>
*
* @return The bytes for subnetwork.
*/
public com.google.protobuf.ByteString getSubnetworkBytes() {
java.lang.Object ref = subnetwork_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
subnetwork_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Name of the subnetwork where the additional pod network belongs.
* </pre>
*
* <code>string subnetwork = 1;</code>
*
* @param value The subnetwork to set.
* @return This builder for chaining.
*/
public Builder setSubnetwork(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
subnetwork_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Name of the subnetwork where the additional pod network belongs.
* </pre>
*
* <code>string subnetwork = 1;</code>
*
* @return This builder for chaining.
*/
public Builder clearSubnetwork() {
subnetwork_ = getDefaultInstance().getSubnetwork();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
return this;
}
/**
*
*
* <pre>
* Name of the subnetwork where the additional pod network belongs.
* </pre>
*
* <code>string subnetwork = 1;</code>
*
* @param value The bytes for subnetwork to set.
* @return This builder for chaining.
*/
public Builder setSubnetworkBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
subnetwork_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
private java.lang.Object secondaryPodRange_ = "";
/**
*
*
* <pre>
* The name of the secondary range on the subnet which provides IP address for
* this pod range.
* </pre>
*
* <code>string secondary_pod_range = 2;</code>
*
* @return The secondaryPodRange.
*/
public java.lang.String getSecondaryPodRange() {
java.lang.Object ref = secondaryPodRange_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
secondaryPodRange_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* The name of the secondary range on the subnet which provides IP address for
* this pod range.
* </pre>
*
* <code>string secondary_pod_range = 2;</code>
*
* @return The bytes for secondaryPodRange.
*/
public com.google.protobuf.ByteString getSecondaryPodRangeBytes() {
java.lang.Object ref = secondaryPodRange_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
secondaryPodRange_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* The name of the secondary range on the subnet which provides IP address for
* this pod range.
* </pre>
*
* <code>string secondary_pod_range = 2;</code>
*
* @param value The secondaryPodRange to set.
* @return This builder for chaining.
*/
public Builder setSecondaryPodRange(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
secondaryPodRange_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* The name of the secondary range on the subnet which provides IP address for
* this pod range.
* </pre>
*
* <code>string secondary_pod_range = 2;</code>
*
* @return This builder for chaining.
*/
public Builder clearSecondaryPodRange() {
secondaryPodRange_ = getDefaultInstance().getSecondaryPodRange();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
return this;
}
/**
*
*
* <pre>
* The name of the secondary range on the subnet which provides IP address for
* this pod range.
* </pre>
*
* <code>string secondary_pod_range = 2;</code>
*
* @param value The bytes for secondaryPodRange to set.
* @return This builder for chaining.
*/
public Builder setSecondaryPodRangeBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
secondaryPodRange_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
private com.google.container.v1beta1.MaxPodsConstraint maxPodsPerNode_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.container.v1beta1.MaxPodsConstraint,
com.google.container.v1beta1.MaxPodsConstraint.Builder,
com.google.container.v1beta1.MaxPodsConstraintOrBuilder>
maxPodsPerNodeBuilder_;
/**
*
*
* <pre>
* The maximum number of pods per node which use this pod network.
* </pre>
*
* <code>optional .google.container.v1beta1.MaxPodsConstraint max_pods_per_node = 3;</code>
*
* @return Whether the maxPodsPerNode field is set.
*/
public boolean hasMaxPodsPerNode() {
return ((bitField0_ & 0x00000004) != 0);
}
/**
*
*
* <pre>
* The maximum number of pods per node which use this pod network.
* </pre>
*
* <code>optional .google.container.v1beta1.MaxPodsConstraint max_pods_per_node = 3;</code>
*
* @return The maxPodsPerNode.
*/
public com.google.container.v1beta1.MaxPodsConstraint getMaxPodsPerNode() {
if (maxPodsPerNodeBuilder_ == null) {
return maxPodsPerNode_ == null
? com.google.container.v1beta1.MaxPodsConstraint.getDefaultInstance()
: maxPodsPerNode_;
} else {
return maxPodsPerNodeBuilder_.getMessage();
}
}
/**
*
*
* <pre>
* The maximum number of pods per node which use this pod network.
* </pre>
*
* <code>optional .google.container.v1beta1.MaxPodsConstraint max_pods_per_node = 3;</code>
*/
public Builder setMaxPodsPerNode(com.google.container.v1beta1.MaxPodsConstraint value) {
if (maxPodsPerNodeBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
maxPodsPerNode_ = value;
} else {
maxPodsPerNodeBuilder_.setMessage(value);
}
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
*
*
* <pre>
* The maximum number of pods per node which use this pod network.
* </pre>
*
* <code>optional .google.container.v1beta1.MaxPodsConstraint max_pods_per_node = 3;</code>
*/
public Builder setMaxPodsPerNode(
com.google.container.v1beta1.MaxPodsConstraint.Builder builderForValue) {
if (maxPodsPerNodeBuilder_ == null) {
maxPodsPerNode_ = builderForValue.build();
} else {
maxPodsPerNodeBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
*
*
* <pre>
* The maximum number of pods per node which use this pod network.
* </pre>
*
* <code>optional .google.container.v1beta1.MaxPodsConstraint max_pods_per_node = 3;</code>
*/
public Builder mergeMaxPodsPerNode(com.google.container.v1beta1.MaxPodsConstraint value) {
if (maxPodsPerNodeBuilder_ == null) {
if (((bitField0_ & 0x00000004) != 0)
&& maxPodsPerNode_ != null
&& maxPodsPerNode_
!= com.google.container.v1beta1.MaxPodsConstraint.getDefaultInstance()) {
getMaxPodsPerNodeBuilder().mergeFrom(value);
} else {
maxPodsPerNode_ = value;
}
} else {
maxPodsPerNodeBuilder_.mergeFrom(value);
}
if (maxPodsPerNode_ != null) {
bitField0_ |= 0x00000004;
onChanged();
}
return this;
}
/**
*
*
* <pre>
* The maximum number of pods per node which use this pod network.
* </pre>
*
* <code>optional .google.container.v1beta1.MaxPodsConstraint max_pods_per_node = 3;</code>
*/
public Builder clearMaxPodsPerNode() {
bitField0_ = (bitField0_ & ~0x00000004);
maxPodsPerNode_ = null;
if (maxPodsPerNodeBuilder_ != null) {
maxPodsPerNodeBuilder_.dispose();
maxPodsPerNodeBuilder_ = null;
}
onChanged();
return this;
}
/**
*
*
* <pre>
* The maximum number of pods per node which use this pod network.
* </pre>
*
* <code>optional .google.container.v1beta1.MaxPodsConstraint max_pods_per_node = 3;</code>
*/
public com.google.container.v1beta1.MaxPodsConstraint.Builder getMaxPodsPerNodeBuilder() {
bitField0_ |= 0x00000004;
onChanged();
return getMaxPodsPerNodeFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* The maximum number of pods per node which use this pod network.
* </pre>
*
* <code>optional .google.container.v1beta1.MaxPodsConstraint max_pods_per_node = 3;</code>
*/
public com.google.container.v1beta1.MaxPodsConstraintOrBuilder getMaxPodsPerNodeOrBuilder() {
if (maxPodsPerNodeBuilder_ != null) {
return maxPodsPerNodeBuilder_.getMessageOrBuilder();
} else {
return maxPodsPerNode_ == null
? com.google.container.v1beta1.MaxPodsConstraint.getDefaultInstance()
: maxPodsPerNode_;
}
}
/**
*
*
* <pre>
* The maximum number of pods per node which use this pod network.
* </pre>
*
* <code>optional .google.container.v1beta1.MaxPodsConstraint max_pods_per_node = 3;</code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.container.v1beta1.MaxPodsConstraint,
com.google.container.v1beta1.MaxPodsConstraint.Builder,
com.google.container.v1beta1.MaxPodsConstraintOrBuilder>
getMaxPodsPerNodeFieldBuilder() {
if (maxPodsPerNodeBuilder_ == null) {
maxPodsPerNodeBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.container.v1beta1.MaxPodsConstraint,
com.google.container.v1beta1.MaxPodsConstraint.Builder,
com.google.container.v1beta1.MaxPodsConstraintOrBuilder>(
getMaxPodsPerNode(), getParentForChildren(), isClean());
maxPodsPerNode_ = null;
}
return maxPodsPerNodeBuilder_;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.container.v1beta1.AdditionalPodNetworkConfig)
}
// @@protoc_insertion_point(class_scope:google.container.v1beta1.AdditionalPodNetworkConfig)
private static final com.google.container.v1beta1.AdditionalPodNetworkConfig DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.container.v1beta1.AdditionalPodNetworkConfig();
}
public static com.google.container.v1beta1.AdditionalPodNetworkConfig getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<AdditionalPodNetworkConfig> PARSER =
new com.google.protobuf.AbstractParser<AdditionalPodNetworkConfig>() {
@java.lang.Override
public AdditionalPodNetworkConfig parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<AdditionalPodNetworkConfig> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<AdditionalPodNetworkConfig> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.container.v1beta1.AdditionalPodNetworkConfig getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
apache/nifi | 36,519 | nifi-framework-bundle/nifi-framework/nifi-framework-components/src/main/java/org/apache/nifi/controller/service/StandardControllerServiceProvider.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.nifi.controller.service;
import org.apache.nifi.components.PropertyDescriptor;
import org.apache.nifi.controller.ComponentNode;
import org.apache.nifi.controller.ControllerService;
import org.apache.nifi.controller.FlowAnalysisRuleNode;
import org.apache.nifi.controller.ParameterProviderNode;
import org.apache.nifi.controller.ProcessScheduler;
import org.apache.nifi.controller.ProcessorNode;
import org.apache.nifi.controller.ReportingTaskNode;
import org.apache.nifi.controller.ScheduledState;
import org.apache.nifi.controller.flow.FlowManager;
import org.apache.nifi.events.BulletinFactory;
import org.apache.nifi.groups.ComponentScheduler;
import org.apache.nifi.groups.DefaultComponentScheduler;
import org.apache.nifi.groups.ProcessGroup;
import org.apache.nifi.logging.LogRepositoryFactory;
import org.apache.nifi.nar.ExtensionDefinition;
import org.apache.nifi.nar.ExtensionManager;
import org.apache.nifi.registry.flow.FlowRegistryClientNode;
import org.apache.nifi.registry.flow.mapping.VersionedComponentStateLookup;
import org.apache.nifi.reporting.BulletinRepository;
import org.apache.nifi.reporting.Severity;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.Future;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.TimeoutException;
import java.util.function.BooleanSupplier;
import java.util.stream.Collectors;
import static java.util.Objects.requireNonNull;
public class StandardControllerServiceProvider implements ControllerServiceProvider {
private static final Logger logger = LoggerFactory.getLogger(StandardControllerServiceProvider.class);
private final ProcessScheduler processScheduler;
private final BulletinRepository bulletinRepo;
private final FlowManager flowManager;
private final ExtensionManager extensionManager;
private final ConcurrentMap<String, ControllerServiceNode> serviceCache = new ConcurrentHashMap<>();
public StandardControllerServiceProvider(final ProcessScheduler scheduler, final BulletinRepository bulletinRepo, final FlowManager flowManager, final ExtensionManager extensionManager) {
this.processScheduler = scheduler;
this.bulletinRepo = bulletinRepo;
this.flowManager = flowManager;
this.extensionManager = extensionManager;
}
@Override
public void onControllerServiceAdded(final ControllerServiceNode serviceNode) {
serviceCache.putIfAbsent(serviceNode.getIdentifier(), serviceNode);
}
@Override
public Set<ComponentNode> disableReferencingServices(final ControllerServiceNode serviceNode) {
// Get a list of all Controller Services that need to be disabled, in the order that they need to be disabled.
final List<ControllerServiceNode> toDisable = serviceNode.getReferences().findRecursiveReferences(ControllerServiceNode.class);
final Set<ControllerServiceNode> serviceSet = new HashSet<>(toDisable);
final Set<ComponentNode> updated = new HashSet<>();
for (final ControllerServiceNode nodeToDisable : toDisable) {
if (nodeToDisable.isActive()) {
nodeToDisable.verifyCanDisable(serviceSet);
updated.add(nodeToDisable);
}
}
Collections.reverse(toDisable);
processScheduler.disableControllerServices(toDisable);
return updated;
}
@Override
public Set<ComponentNode> scheduleReferencingComponents(final ControllerServiceNode serviceNode) {
return scheduleReferencingComponents(serviceNode, null, new DefaultComponentScheduler(this, VersionedComponentStateLookup.IDENTITY_LOOKUP));
}
@Override
public Set<ComponentNode> scheduleReferencingComponents(final ControllerServiceNode serviceNode, final Set<ComponentNode> candidates, final ComponentScheduler componentScheduler) {
// find all of the schedulable components (processors, reporting tasks) that refer to this Controller Service,
// or a service that references this controller service, etc.
final List<ProcessorNode> processors = serviceNode.getReferences().findRecursiveReferences(ProcessorNode.class);
final List<ReportingTaskNode> reportingTasks = serviceNode.getReferences().findRecursiveReferences(ReportingTaskNode.class);
final List<FlowAnalysisRuleNode> flowAnalysisRuleNodes = serviceNode.getReferences().findRecursiveReferences(FlowAnalysisRuleNode.class);
// verify that we can start all components (that are not disabled) before doing anything
for (final ProcessorNode node : processors) {
if (candidates != null && !candidates.contains(node)) {
continue;
}
if (node.getScheduledState() != ScheduledState.DISABLED) {
node.verifyCanStart();
}
}
for (final ReportingTaskNode node : reportingTasks) {
if (candidates != null && !candidates.contains(node)) {
continue;
}
if (node.getScheduledState() != ScheduledState.DISABLED) {
node.verifyCanStart();
}
}
for (final FlowAnalysisRuleNode node : flowAnalysisRuleNodes) {
if (candidates != null && !candidates.contains(node)) {
continue;
}
if (!node.isEnabled()) {
node.verifyCanEnable();
}
}
// start all of the components that are not disabled
final Set<ComponentNode> updated = new HashSet<>();
for (final ProcessorNode node : processors) {
if (candidates != null && !candidates.contains(node)) {
continue;
}
if (node.getScheduledState() != ScheduledState.DISABLED) {
componentScheduler.startComponent(node);
updated.add(node);
}
}
for (final ReportingTaskNode node : reportingTasks) {
if (candidates != null && !candidates.contains(node)) {
continue;
}
if (node.getScheduledState() != ScheduledState.DISABLED) {
componentScheduler.startReportingTask(node);
updated.add(node);
}
}
for (final FlowAnalysisRuleNode node : flowAnalysisRuleNodes) {
if (candidates != null && !candidates.contains(node)) {
continue;
}
if (!node.isEnabled()) {
node.enable();
updated.add(node);
}
}
return updated;
}
@Override
public Map<ComponentNode, Future<Void>> unscheduleReferencingComponents(final ControllerServiceNode serviceNode) {
// find all of the schedulable components (processors, reporting tasks) that refer to this Controller Service,
// or a service that references this controller service, etc.
final List<ProcessorNode> processors = serviceNode.getReferences().findRecursiveReferences(ProcessorNode.class);
final List<ReportingTaskNode> reportingTasks = serviceNode.getReferences().findRecursiveReferences(ReportingTaskNode.class);
final List<FlowAnalysisRuleNode> flowAnalysisRuleNodes = serviceNode.getReferences().findRecursiveReferences(FlowAnalysisRuleNode.class);
final Map<ComponentNode, Future<Void>> updated = new HashMap<>();
// verify that we can stop all components (that are running) before doing anything
for (final ProcessorNode node : processors) {
if (node.getScheduledState() == ScheduledState.RUNNING) {
node.verifyCanStop();
}
}
for (final ReportingTaskNode node : reportingTasks) {
if (node.getScheduledState() == ScheduledState.RUNNING) {
node.verifyCanStop();
}
}
for (final FlowAnalysisRuleNode node : flowAnalysisRuleNodes) {
if (node.isEnabled()) {
node.verifyCanDisable();
}
}
// stop all of the components that are running
for (final ProcessorNode node : processors) {
if (node.getScheduledState() == ScheduledState.RUNNING) {
final Future<Void> future = node.getProcessGroup().stopProcessor(node);
updated.put(node, future);
}
}
for (final ReportingTaskNode node : reportingTasks) {
if (node.getScheduledState() == ScheduledState.RUNNING) {
final Future<Void> future = processScheduler.unschedule(node);
updated.put(node, future);
}
}
for (final FlowAnalysisRuleNode node : flowAnalysisRuleNodes) {
if (node.isEnabled()) {
final CompletableFuture<Void> future = new CompletableFuture<>();
processScheduler.submitFrameworkTask(() -> {
try {
node.disable();
future.complete(null);
} catch (final Exception e) {
future.completeExceptionally(e);
}
});
updated.put(node, future);
}
}
return updated;
}
@Override
public CompletableFuture<Void> enableControllerService(final ControllerServiceNode serviceNode) {
if (serviceNode.isActive()) {
final CompletableFuture<Void> future = CompletableFuture.completedFuture(null);
return future;
}
serviceNode.verifyCanEnable();
serviceNode.reloadAdditionalResourcesIfNecessary();
return processScheduler.enableControllerService(serviceNode);
}
@Override
public void enableControllerServices(final Collection<ControllerServiceNode> serviceNodesIn) {
Collection<ControllerServiceNode> serviceNodes = new HashSet<>(serviceNodesIn);
for (ControllerServiceNode controllerServiceNode : removeControllerServicesWithUnavailableRequirements(serviceNodes)) {
try {
final Future<Void> future = enableControllerServiceAndDependencies(controllerServiceNode);
future.get(30, TimeUnit.SECONDS);
logger.debug("Successfully enabled {}; service state = {}", controllerServiceNode, controllerServiceNode.getState());
} catch (final ControllerServiceNotValidException csnve) {
logger.warn("Failed to enable service {} because it is not currently valid", controllerServiceNode);
} catch (Exception e) {
logger.error("Failed to enable {}", controllerServiceNode, e);
if (this.bulletinRepo != null) {
this.bulletinRepo.addBulletin(BulletinFactory.createBulletin("Controller Service",
Severity.ERROR.name(), "Could not start " + controllerServiceNode + " due to " + e));
}
}
}
}
private Collection<ControllerServiceNode> removeControllerServicesWithUnavailableRequirements(final Collection<ControllerServiceNode> serviceNodes) {
boolean recheckNeeded;
do {
recheckNeeded = false;
for (Iterator<ControllerServiceNode> iter = serviceNodes.iterator(); iter.hasNext();) {
boolean skipStarting = false;
final ControllerServiceNode serviceNode = iter.next();
final List<ControllerServiceNode> requiredServices = serviceNode.getRequiredControllerServices();
for (ControllerServiceNode requiredService : requiredServices) {
if (!requiredService.isActive() && !serviceNodes.contains(requiredService)) {
skipStarting = true;
logger.error("Will not start {} because its required service {} is not active and is not part of the collection of things to start", serviceNode, requiredService);
}
}
if (skipStarting) {
// If any service was removed, then recheck all remaining services because the removed one might be required by another service in the list.
recheckNeeded = true;
iter.remove();
}
}
} while (recheckNeeded);
return serviceNodes;
}
@Override
public Future<Void> enableControllerServicesAsync(final Collection<ControllerServiceNode> serviceNodes) {
final CompletableFuture<Void> future = new CompletableFuture<>();
processScheduler.submitFrameworkTask(() -> {
try {
enableControllerServices(serviceNodes, future);
future.complete(null);
} catch (final Exception e) {
future.completeExceptionally(e);
}
});
return future;
}
private void enableControllerServices(final Collection<ControllerServiceNode> serviceNodes, final CompletableFuture<Void> completableFuture) throws Exception {
Exception firstFailure = null;
// validate that we are able to start all of the services.
for (final ControllerServiceNode controllerServiceNode : serviceNodes) {
if (completableFuture.isCancelled()) {
return;
}
try {
// If service is already active, just move on to the next
if (controllerServiceNode.isActive()) {
continue;
}
final Future<Void> future = enableControllerServiceAndDependencies(controllerServiceNode);
// Wait for the future to complete. But if the completableFuture ever is canceled, we want to stop waiting and return.
while (true) {
try {
future.get(1, TimeUnit.SECONDS);
logger.debug("Successfully enabled {}; service state = {}", controllerServiceNode, controllerServiceNode.getState());
break;
} catch (final TimeoutException e) {
if (completableFuture.isCancelled()) {
return;
}
} catch (final Exception e) {
logger.warn("Failed to enable service {}", controllerServiceNode, e);
if (firstFailure == null) {
firstFailure = e;
} else {
firstFailure.addSuppressed(e);
}
if (this.bulletinRepo != null) {
this.bulletinRepo.addBulletin(BulletinFactory.createBulletin("Controller Service",
Severity.ERROR.name(), "Could not enable " + controllerServiceNode + " due to " + e));
}
break;
}
}
} catch (Exception e) {
logger.error("Failed to enable {}", controllerServiceNode, e);
if (this.bulletinRepo != null) {
this.bulletinRepo.addBulletin(BulletinFactory.createBulletin("Controller Service",
Severity.ERROR.name(), "Could not start " + controllerServiceNode + " due to " + e));
}
}
}
if (firstFailure != null) {
throw firstFailure;
}
}
@Override
public Future<Void> enableControllerServiceAndDependencies(final ControllerServiceNode serviceNode) {
if (serviceNode.isActive()) {
logger.debug("Enabling of Controller Service {} triggered but service already enabled", serviceNode);
return CompletableFuture.completedFuture(null);
}
final List<ControllerServiceNode> dependentServices = serviceNode.getRequiredControllerServices();
for (final ControllerServiceNode depNode : dependentServices) {
if (!depNode.isActive()) {
logger.debug("Before enabling {}, will enable dependent Controller Service {}", serviceNode, depNode);
enableControllerServiceAndDependencies(depNode);
}
}
if (logger.isDebugEnabled()) {
logger.debug("All dependent services for {} have now begun enabling. Will wait for them to complete", serviceNode);
}
for (final ControllerServiceNode dependentService : dependentServices) {
try {
final boolean enabled = dependentService.awaitEnabled(30, TimeUnit.SECONDS);
if (enabled) {
logger.debug("Successfully enabled dependent service {}; service state = {}", dependentService, dependentService.getState());
} else {
logger.debug("After 30 seconds, {} is still not enabled. Will continue attempting to enable additional Controller Services", dependentService);
}
} catch (final Exception e) {
logger.error("Failed to enable service {}, so may be unable to enable {}", dependentService, serviceNode, e);
// Nothing we can really do. Will attempt to enable this service anyway.
}
}
logger.debug("All dependent services have been enabled for {}; will now start service itself", serviceNode);
return this.enableControllerService(serviceNode);
}
static List<List<ControllerServiceNode>> determineEnablingOrder(final Map<String, ControllerServiceNode> serviceNodeMap) {
final List<List<ControllerServiceNode>> orderedNodeLists = new ArrayList<>();
for (final ControllerServiceNode node : serviceNodeMap.values()) {
final List<ControllerServiceNode> branch = new ArrayList<>();
determineEnablingOrder(serviceNodeMap, node, branch, new HashSet<>());
orderedNodeLists.add(branch);
}
return orderedNodeLists;
}
private static void determineEnablingOrder(final Map<String, ControllerServiceNode> serviceNodeMap, final ControllerServiceNode contextNode,
final List<ControllerServiceNode> orderedNodes, final Set<ControllerServiceNode> visited) {
if (visited.contains(contextNode)) {
return;
}
for (final Map.Entry<PropertyDescriptor, String> entry : contextNode.getEffectivePropertyValues().entrySet()) {
if (entry.getKey().getControllerServiceDefinition() != null) {
final String referencedServiceId = entry.getValue();
if (referencedServiceId != null) {
final ControllerServiceNode referencedNode = serviceNodeMap.get(referencedServiceId);
if (!orderedNodes.contains(referencedNode)) {
visited.add(contextNode);
determineEnablingOrder(serviceNodeMap, referencedNode, orderedNodes, visited);
}
}
}
}
if (!orderedNodes.contains(contextNode)) {
orderedNodes.add(contextNode);
}
}
@Override
public CompletableFuture<Void> disableControllerService(final ControllerServiceNode serviceNode) {
serviceNode.verifyCanDisable();
return processScheduler.disableControllerService(serviceNode);
}
@Override
public CompletableFuture<Void> disableControllerServicesAsync(final Collection<ControllerServiceNode> serviceNodes) {
final CompletableFuture<Void> future = new CompletableFuture<>();
processScheduler.submitFrameworkTask(() -> {
try {
disableControllerServices(serviceNodes, future);
future.complete(null);
} catch (final Exception e) {
logger.error("Failed to disable Controller Services {}", serviceNodes, e);
future.completeExceptionally(e);
}
});
return future;
}
private void disableControllerServices(final Collection<ControllerServiceNode> serviceNodes, final CompletableFuture<Void> future) throws Exception {
final Set<ControllerServiceNode> serviceNodeSet = new HashSet<>(serviceNodes);
// Verify that for each Controller Service given, any service that references it is either disabled or is also in the given collection
for (final ControllerServiceNode serviceNode : serviceNodes) {
final List<ControllerServiceNode> references = serviceNode.getReferences().findRecursiveReferences(ControllerServiceNode.class);
for (final ControllerServiceNode reference : references) {
if (reference.isActive()) {
try {
reference.verifyCanDisable(serviceNodeSet);
} catch (final Exception e) {
future.completeExceptionally(e);
}
}
}
}
Exception firstFailure = null;
for (final ControllerServiceNode serviceNode : serviceNodes) {
if (serviceNode.isActive()) {
try {
disableControllerServiceAndReferencingServices(serviceNode, future::isCancelled);
} catch (final Exception e) {
if (firstFailure == null) {
firstFailure = e;
} else {
firstFailure.addSuppressed(e);
}
}
} else {
boolean disabled = false;
while (!disabled) {
try {
disabled = serviceNode.awaitDisabled(1, TimeUnit.SECONDS);
} catch (final Exception e) {
logger.error("Failed to disable {}", serviceNode, e);
future.completeExceptionally(e);
}
}
}
}
if (firstFailure != null) {
throw firstFailure;
}
}
private void disableControllerServiceAndReferencingServices(final ControllerServiceNode serviceNode, final BooleanSupplier cancelSupplier) throws ExecutionException, InterruptedException {
logger.debug("Disabling referencing services for {} before disabling service", serviceNode);
disableReferencingServices(serviceNode);
logger.debug("Disabling service {}", serviceNode);
final CompletableFuture<?> serviceFuture = disableControllerService(serviceNode);
while (true) {
try {
serviceFuture.get(1, TimeUnit.SECONDS);
break;
} catch (final TimeoutException e) {
if (cancelSupplier.getAsBoolean()) {
return;
}
}
}
logger.debug("{} is now disabled", serviceNode);
}
@Override
public ControllerService getControllerService(final String serviceIdentifier) {
final ControllerServiceNode node = getControllerServiceNode(serviceIdentifier);
return node == null ? null : node.getProxiedControllerService();
}
private ProcessGroup getRootGroup() {
return flowManager.getRootGroup();
}
@Override
public ControllerService getControllerServiceForComponent(final String serviceIdentifier, final String componentId) {
// Find the Process Group that owns the component.
ProcessGroup groupOfInterest;
final ProcessorNode procNode = flowManager.getProcessorNode(componentId);
if (procNode == null) {
final ControllerServiceNode serviceNode = getControllerServiceNode(componentId);
if (serviceNode == null) {
final ReportingTaskNode taskNode = flowManager.getReportingTaskNode(componentId);
if (taskNode == null) {
final FlowAnalysisRuleNode flowAnalysisRuleNode = flowManager.getFlowAnalysisRuleNode(componentId);
if (flowAnalysisRuleNode == null) {
final ParameterProviderNode parameterProviderNode = flowManager.getParameterProvider(componentId);
if (parameterProviderNode == null) {
final FlowRegistryClientNode flowRegistryClientNode = flowManager.getFlowRegistryClient(componentId);
if (flowRegistryClientNode == null) {
throw new IllegalStateException("Could not find any Processor, Reporting Task, Parameter Provider, or Controller Service with identifier " + componentId);
}
}
}
}
// We have confirmed that the component is a reporting task or a flow analysis rule or parameter provider. We can only reference Controller Services
// that are scoped at the FlowController level in this case.
final ControllerServiceNode rootServiceNode = flowManager.getRootControllerService(serviceIdentifier);
return (rootServiceNode == null) ? null : rootServiceNode.getProxiedControllerService();
} else {
groupOfInterest = serviceNode.getProcessGroup();
}
} else {
groupOfInterest = procNode.getProcessGroup();
}
if (groupOfInterest == null) {
final ControllerServiceNode rootServiceNode = flowManager.getRootControllerService(serviceIdentifier);
return (rootServiceNode == null) ? null : rootServiceNode.getProxiedControllerService();
}
final ControllerServiceNode serviceNode = groupOfInterest.findControllerService(serviceIdentifier, false, true);
if (serviceNode != null) {
return serviceNode.getProxiedControllerService();
}
return null;
}
@Override
public boolean isControllerServiceEnabled(final ControllerService service) {
return isControllerServiceEnabled(service.getIdentifier());
}
@Override
public boolean isControllerServiceEnabled(final String serviceIdentifier) {
final ControllerServiceNode node = getControllerServiceNode(serviceIdentifier);
return node != null && ControllerServiceState.ENABLED == node.getState();
}
@Override
public boolean isControllerServiceEnabling(final String serviceIdentifier) {
final ControllerServiceNode node = getControllerServiceNode(serviceIdentifier);
return node != null && ControllerServiceState.ENABLING == node.getState();
}
@Override
public ControllerServiceNode getControllerServiceNode(final String serviceIdentifier) {
final ControllerServiceNode rootServiceNode = flowManager.getRootControllerService(serviceIdentifier);
if (rootServiceNode != null) {
return rootServiceNode;
}
return serviceCache.get(serviceIdentifier);
}
@Override
public Set<String> getControllerServiceIdentifiers(final Class<? extends ControllerService> serviceType, final String groupId) {
final Set<ControllerServiceNode> serviceNodes;
if (groupId == null) {
serviceNodes = flowManager.getRootControllerServices();
} else {
ProcessGroup group = getRootGroup();
if (!FlowManager.ROOT_GROUP_ID_ALIAS.equals(groupId) && !group.getIdentifier().equals(groupId)) {
group = group.findProcessGroup(groupId);
}
if (group == null) {
return Collections.emptySet();
}
serviceNodes = group.getControllerServices(true);
}
return serviceNodes.stream()
.filter(service -> serviceType.isAssignableFrom(service.getProxiedControllerService().getClass()))
.map(ControllerServiceNode::getIdentifier)
.collect(Collectors.toSet());
}
@Override
public Class<? extends ControllerService> getControllerServiceType(final String serviceTypeName) {
final Set<ExtensionDefinition> serviceDefinitions = extensionManager.getExtensions(ControllerService.class);
for (final ExtensionDefinition definition : serviceDefinitions) {
final Class<?> serviceClass = extensionManager.getClass(definition);
final Class<? extends ControllerService> serviceType = getServiceInterfaceByName(serviceClass, serviceTypeName);
if (serviceType != null) {
return serviceType;
}
}
return null;
}
private Class<? extends ControllerService> getServiceInterfaceByName(final Class<?> serviceClass, final String type) {
for (final Class<?> serviceInterface : serviceClass.getInterfaces()) {
if (!ControllerService.class.isAssignableFrom(serviceInterface)) {
continue;
}
if (type.equals(serviceInterface.getSimpleName()) || type.equals(serviceInterface.getCanonicalName())) {
if (ControllerService.class.isAssignableFrom(serviceInterface)) {
return (Class<? extends ControllerService>) serviceInterface;
}
}
}
return null;
}
@Override
public String getControllerServiceName(final String serviceIdentifier) {
final ControllerServiceNode node = getControllerServiceNode(serviceIdentifier);
return node == null ? null : node.getName();
}
@Override
public void removeControllerService(final ControllerServiceNode serviceNode) {
requireNonNull(serviceNode);
final ProcessGroup group = serviceNode.getProcessGroup();
if (group == null) {
flowManager.removeRootControllerService(serviceNode);
} else {
group.removeControllerService(serviceNode);
}
LogRepositoryFactory.removeRepository(serviceNode.getIdentifier());
extensionManager.removeInstanceClassLoader(serviceNode.getIdentifier());
serviceCache.remove(serviceNode.getIdentifier());
flowManager.getRuleViolationsManager().ifPresent(
ruleViolationsManager -> ruleViolationsManager.removeRuleViolationsForSubject(serviceNode.getIdentifier())
);
}
@Override
public Collection<ControllerServiceNode> getNonRootControllerServices() {
return serviceCache.values().stream()
.filter(serviceNode -> serviceNode.getProcessGroup() != null)
.collect(Collectors.toSet());
}
@Override
public Set<ComponentNode> enableReferencingServices(final ControllerServiceNode serviceNode) {
final List<ControllerServiceNode> recursiveReferences = serviceNode.getReferences().findRecursiveReferences(ControllerServiceNode.class);
logger.debug("Enabling the following Referencing Services for {}: {}", serviceNode, recursiveReferences);
return enableReferencingServices(serviceNode, recursiveReferences);
}
private Set<ComponentNode> enableReferencingServices(final ControllerServiceNode serviceNode, final List<ControllerServiceNode> recursiveReferences) {
if (!serviceNode.isActive()) {
serviceNode.verifyCanEnable(new HashSet<>(recursiveReferences));
}
final Set<ComponentNode> updated = new HashSet<>();
final Set<ControllerServiceNode> ifEnabled = new HashSet<>();
for (final ControllerServiceNode nodeToEnable : recursiveReferences) {
if (!nodeToEnable.isActive()) {
nodeToEnable.verifyCanEnable(ifEnabled);
ifEnabled.add(nodeToEnable);
}
}
for (final ControllerServiceNode nodeToEnable : recursiveReferences) {
if (!nodeToEnable.isActive()) {
logger.debug("Enabling {} because it references {}", nodeToEnable, serviceNode);
enableControllerService(nodeToEnable);
updated.add(nodeToEnable);
}
}
return updated;
}
@Override
public void verifyCanEnableReferencingServices(final ControllerServiceNode serviceNode) {
final List<ControllerServiceNode> referencingServices = serviceNode.getReferences().findRecursiveReferences(ControllerServiceNode.class);
final Set<ControllerServiceNode> referencingServiceSet = new HashSet<>(referencingServices);
for (final ControllerServiceNode referencingService : referencingServices) {
referencingService.verifyCanEnable(referencingServiceSet);
}
}
@Override
public void verifyCanScheduleReferencingComponents(final ControllerServiceNode serviceNode) {
final List<ControllerServiceNode> referencingServices = serviceNode.getReferences().findRecursiveReferences(ControllerServiceNode.class);
final List<ReportingTaskNode> referencingReportingTasks = serviceNode.getReferences().findRecursiveReferences(ReportingTaskNode.class);
final List<FlowAnalysisRuleNode> referencingFlowAnalysisRuleNodes = serviceNode.getReferences().findRecursiveReferences(FlowAnalysisRuleNode.class);
final List<ProcessorNode> referencingProcessors = serviceNode.getReferences().findRecursiveReferences(ProcessorNode.class);
final Set<ControllerServiceNode> referencingServiceSet = new HashSet<>(referencingServices);
for (final ReportingTaskNode taskNode : referencingReportingTasks) {
if (taskNode.getScheduledState() != ScheduledState.DISABLED) {
taskNode.verifyCanStart(referencingServiceSet);
}
}
for (final FlowAnalysisRuleNode ruleNode : referencingFlowAnalysisRuleNodes) {
ruleNode.verifyCanEnable(referencingServiceSet);
}
for (final ProcessorNode procNode : referencingProcessors) {
if (procNode.getScheduledState() != ScheduledState.DISABLED) {
procNode.verifyCanStart();
}
}
}
@Override
public void verifyCanDisableReferencingServices(final ControllerServiceNode serviceNode) {
// Get a list of all Controller Services that need to be disabled, in the order that they need to be disabled.
final List<ControllerServiceNode> toDisable = serviceNode.getReferences().findRecursiveReferences(ControllerServiceNode.class);
final Set<ControllerServiceNode> serviceSet = new HashSet<>(toDisable);
for (final ControllerServiceNode nodeToDisable : toDisable) {
if (nodeToDisable.isActive()) {
nodeToDisable.verifyCanDisable(serviceSet);
}
}
}
@Override
public void verifyCanStopReferencingComponents(final ControllerServiceNode serviceNode) {
// we can always stop referencing components
}
@Override
public Set<String> getControllerServiceIdentifiers(final Class<? extends ControllerService> serviceType) throws IllegalArgumentException {
throw new UnsupportedOperationException("Cannot obtain Controller Service Identifiers for service type " + serviceType + " without providing a Process Group Identifier");
}
@Override
public ExtensionManager getExtensionManager() {
return extensionManager;
}
}
|
google/copybara | 34,896 | javatests/com/google/copybara/git/GitHubEndpointTest.java | /*
* Copyright (C) 2018 Google Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.copybara.git;
import static com.google.common.truth.Truth.assertThat;
import static com.google.copybara.testing.git.GitTestUtil.ALWAYS_TRUE;
import static com.google.copybara.testing.git.GitTestUtil.mockGitHubNotFound;
import static com.google.copybara.testing.git.GitTestUtil.mockGitHubUnauthorized;
import static com.google.copybara.testing.git.GitTestUtil.mockResponse;
import static com.google.copybara.testing.git.GitTestUtil.mockResponseAndValidateRequest;
import static com.google.copybara.testing.git.GitTestUtil.mockResponseWithStatus;
import static java.nio.charset.StandardCharsets.UTF_8;
import static org.junit.Assert.assertThrows;
import static org.mockito.ArgumentMatchers.anyString;
import static org.mockito.ArgumentMatchers.contains;
import static org.mockito.ArgumentMatchers.eq;
import static org.mockito.Mockito.times;
import static org.mockito.Mockito.verify;
import com.google.api.client.json.gson.GsonFactory;
import com.google.common.base.Strings;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.ImmutableSet;
import com.google.common.jimfs.Jimfs;
import com.google.copybara.ActionMigration;
import com.google.copybara.EndpointProvider;
import com.google.copybara.exception.ValidationException;
import com.google.copybara.testing.DummyChecker;
import com.google.copybara.testing.DummyTrigger;
import com.google.copybara.testing.OptionsBuilder;
import com.google.copybara.testing.SkylarkTestExecutor;
import com.google.copybara.testing.git.GitTestUtil;
import com.google.copybara.testing.git.GitTestUtil.MockRequestAssertion;
import com.google.copybara.util.console.Message.MessageType;
import com.google.copybara.util.console.testing.TestingConsole;
import java.io.IOException;
import java.io.Serializable;
import java.nio.file.Files;
import java.nio.file.Path;
import java.util.Iterator;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.stream.Collectors;
import net.starlark.java.eval.Starlark;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.junit.runners.JUnit4;
@RunWith(JUnit4.class)
public class GitHubEndpointTest {
private SkylarkTestExecutor skylark;
private TestingConsole console;
private DummyTrigger dummyTrigger;
private Path workdir;
private GitTestUtil gitUtil;
@Before
public void setup() throws Exception {
workdir = Jimfs.newFileSystem().getPath("/");
console = new TestingConsole();
OptionsBuilder options = new OptionsBuilder();
options.setConsole(console)
.setOutputRootToTmpDir();
gitUtil = new GitTestUtil(options);
gitUtil.mockRemoteGitRepos();
dummyTrigger = new DummyTrigger();
options.testingOptions.feedbackTrigger = dummyTrigger;
options.testingOptions.checker = new DummyChecker(ImmutableSet.of("badword"));
gitUtil.mockApi(eq("GET"), contains("master/status"),
mockResponse("{\n"
+ " \"state\": \"failure\",\n"
+ " \"total_count\": 2,\n"
+ " \"sha\": \"aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa\",\n"
+ " \"statuses\" : [\n"
+ " { \"state\": \"failure\", \"context\": \"some/context\"},\n"
+ " { \"state\": \"success\", \"context\": \"other/context\"}\n"
+ " ]\n"
+ "}"));
gitUtil.mockApi(eq("GET"), contains("/commits/aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa"),
mockResponse("{\n"
+ " \"sha\" : \"aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa\",\n"
+ " \"commit\" : {\n"
+ " \"author\": { \"name\": \"theauthor\", \"email\": \"author@example.com\"},\n"
+ " \"committer\": { \"name\": \"thecommitter\", "
+ "\"email\": \"committer@example.com\"},\n"
+ " \"message\": \"This is a message\\n\\nWith body\\n\"\n"
+ " },\n"
+ " \"committer\": { \"login\" : \"github_committer\"},\n"
+ " \"author\": { \"login\" : \"github_author\"}\n"
+ "}"));
gitUtil.mockApi(eq("POST"), contains("/statuses/e59774"),
mockResponse("{\n"
+ " \"state\": \"success\",\n"
+ " \"target_url\": \"https://github.com/google/example\",\n"
+ " \"description\": \"Observed foo\",\n"
+ " \"context\": \"test\"\n"
+ "}"));
gitUtil.mockApi(
anyString(),
contains("/git/refs/heads/test"),
mockResponse(
"{\n"
+ " \"ref\": \"refs/heads/test\",\n"
+ " \"url\": \"https://github.com/google/example/git/refs/heads/test\",\n"
+ " \"object\": { \n"
+ " \"type\": \"commit\",\n"
+ " \"sha\": \"e597746de9c1704e648ddc3ffa0d2096b146d600\", \n"
+ " \"url\":"
+ " \"https://github.com/google/example/git/commits/e597746de9c1704e648ddc3ffa0d2096b146d600\"\n"
+ " } \n"
+ "}"));
gitUtil.mockApi(
eq("GET"),
contains("git/refs?per_page=100"),
mockResponse(
"[{\n"
+ " \"ref\": \"refs/heads/test\",\n"
+ " \"url\": \"https://github.com/google/example/git/refs/heads/test\",\n"
+ " \"object\": { \n"
+ " \"type\": \"commit\",\n"
+ " \"sha\": \"e597746de9c1704e648ddc3ffa0d2096b146d600\", \n"
+ " \"url\":"
+ " \"https://github.com/google/example/git/commits/e597746de9c1704e648ddc3ffa0d2096b146d600\"\n"
+ " } \n"
+ "}]"));
gitUtil.mockApi(
eq("GET"),
contains("commits/e597746de9c1704e648ddc3ffa0d2096b146d610/check-runs"),
mockResponse(
"{\n"
+ " \"total_count\": 1,\n"
+ " \"check_runs\": [\n"
+ " {\n"
+ " \"id\": 4,\n"
+ " \"details_url\": \"https://example.com\",\n"
+ " \"status\": \"completed\",\n"
+ " \"conclusion\": \"neutral\",\n"
+ " \"name\": \"mighty_readme\",\n"
+ " \"output\": {\n"
+ " \"title\": \"Mighty Readme report\",\n"
+ " \"summary\": \"test_summary\",\n"
+ " \"text\": \"test_text\"\n"
+ " },\n"
+ " \"app\": {\n"
+ " \"id\": 1,\n"
+ " \"slug\": \"octoapp\",\n"
+ " \"name\": \"Octocat App\"\n"
+ " }\n"
+ " }\n"
+ " ]\n"
+ "}"
));
Path credentialsFile = Files.createTempFile("credentials", "test");
Files.write(credentialsFile, "https://user:SECRET@github.com".getBytes(UTF_8));
options.git.credentialHelperStorePath = credentialsFile.toString();
skylark = new SkylarkTestExecutor(options);
}
@Test
public void testParsing() throws Exception {
skylark.eval(
"e",
"e = git.github_api(url = 'https://github.com/google/example')");
}
@Test
public void testParsingWithChecker() throws Exception {
skylark.eval(
"e",
"e = git.github_api(\n"
+ "url = 'https://github.com/google/example', \n"
+ "checker = testing.dummy_checker(),\n"
+ ")\n");
}
@Test
public void testCheckerIsHonored() throws Exception {
String config =
""
+ "def test_action(ctx):\n"
+ " ctx.destination.update_reference(\n"
+ " 'e597746de9c1704e648ddc3ffa0d2096b146d600', 'foo_badword_bar', True)\n"
+ " return ctx.success()\n"
+ "\n"
+ "core.feedback(\n"
+ " name = 'default',\n"
+ " origin = testing.dummy_trigger(),\n"
+ " destination = git.github_api("
+ " url = 'https://github.com/google/example',\n"
+ " checker = testing.dummy_checker(),\n"
+ " ),\n"
+ " actions = [test_action,],\n"
+ ")\n"
+ "\n";
ActionMigration actionMigration = (ActionMigration) skylark.loadConfig(config)
.getMigration("default");
assertThat(actionMigration.getDestinationDescription().get("url"))
.containsExactly("https://github.com/google/example");
ValidationException expected = assertThrows(
ValidationException.class,
() -> actionMigration.run(workdir, ImmutableList.of("12345")));
assertThat(expected)
.hasMessageThat()
.contains("Bad word 'badword' found: field 'path'.");
}
@Test
public void testParsingEmptyUrl() {
skylark.evalFails("git.github_api(url = '')", "Invalid empty field 'url'");
}
@Test
public void testOriginRef() throws Exception {
runFeedback(ImmutableList.<String>builder()
.add("res = ctx.destination.new_origin_ref('12345')")
.addAll(checkFieldStarLark("res", "ref", "'12345'"))
.build());
}
/**
* A test that uses feedback.
*
* <p>Does not verify all the fields, see {@link #testCreateStatusExhaustive()} for that.
*/
@Test
public void testFeedbackCreateStatus() throws Exception{
dummyTrigger.addAll("Foo", "Bar");
ActionMigration actionMigration =
feedback(
""
+ "def test_action(ctx):\n"
+ " ref = 'None'\n"
+ " if len(ctx.refs) > 0:\n"
+ " ref = ctx.refs[0]\n"
+ " \n"
+ " for m in ctx.origin.get_messages:\n"
+ " status = ctx.destination.create_status(\n"
+ " sha = ref,\n"
+ " state = 'success',\n"
+ " context = 'test',\n"
+ " description = 'Observed ' + m,\n"
+ " )\n"
+ " ctx.console.info('Created status')\n"
+ " return ctx.success()\n"
+ "\n");
Iterator<String> createValues = ImmutableList.of("Observed Foo", "Observed Bar").iterator();
gitUtil.mockApi(eq("POST"), contains("/status"),
mockResponseAndValidateRequest("{\n"
+ " \"state\" : \"success\",\n"
+ " \"target_url\" : \"https://github.com/google/example\",\n"
+ " \"description\" : \"Observed foo\",\n"
+ " \"context\" : \"test\"\n"
+ "}",
new MockRequestAssertion(String.format(
"Requests were expected to cycle through the values of %s", createValues),
r -> r.contains(createValues.next()))));
actionMigration.run(workdir, ImmutableList.of("e597746de9c1704e648ddc3ffa0d2096b146d600"));
console.assertThat().timesInLog(2, MessageType.INFO, "Created status");
verify(gitUtil.httpTransport(), times(2)).buildRequest(eq("POST"), contains("/status"));
}
@Test
public void testCreateStatusExhaustive() throws Exception {
runFeedback(ImmutableList.<String>builder()
.add("res = ctx.destination.create_status(sha = 'e597746de9c1704e648ddc3ffa0d2096b146d600',"
+ " state = 'success', context = 'test', description = 'Observed foo')")
.addAll(checkFieldStarLark("res", "state", "'success'"))
.addAll(checkFieldStarLark("res", "target_url", "'https://github.com/google/example'"))
.addAll(checkFieldStarLark("res", "description", "'Observed foo'"))
.addAll(checkFieldStarLark("res", "context", "'test'"))
.build());
}
@Test
public void testCreateStatusLimitReached() throws Exception {
gitUtil.mockApi(eq("POST"), contains("/statuses/c59774"),
mockResponseWithStatus(
"{\n"
+ "\"message\" : \"This SHA and context has reached the maximum number of statuses\",\n"
+ "\"documentation_url\" : \"https://developer.github.com/v3\"\n"
+ "}", 422, ALWAYS_TRUE));
ValidationException expected =
assertThrows(ValidationException.class, () -> runFeedback(ImmutableList.<String>builder()
.add("ctx.destination.create_status("
+ "sha = 'c597746de9c1704e648ddc3ffa0d2096b146d600',"
+ " state = 'success', context = 'test', description = 'Observed foo')").build()));
assertThat(expected).hasMessageThat()
.contains("This SHA and context has reached the maximum number of statuses");
}
@Test
public void testGetCombinedStatus() throws Exception {
runFeedback(ImmutableList.<String>builder()
.add("res = ctx.destination.get_combined_status(ref = 'master')")
.addAll(checkFieldStarLark("res", "state", "'failure'"))
.addAll(checkFieldStarLark("res", "total_count", "2"))
.addAll(checkFieldStarLark("res", "statuses[0].context", "'some/context'"))
.addAll(checkFieldStarLark("res", "statuses[0].state", "'failure'"))
.addAll(checkFieldStarLark("res", "statuses[1].context", "'other/context'"))
.addAll(checkFieldStarLark("res", "statuses[1].state", "'success'"))
.build());
}
@Test
public void testGetCheckRuns() throws Exception {
runFeedback(
ImmutableList.<String>builder()
.add(
"res ="
+ " ctx.destination.get_check_runs(sha='e597746de9c1704e648ddc3ffa0d2096b146d610')[0]")
.addAll(checkFieldStarLark("res", "detail_url", "'https://example.com'"))
.addAll(checkFieldStarLark("res", "status", "'completed'"))
.addAll(checkFieldStarLark("res", "conclusion", "'neutral'"))
.addAll(checkFieldStarLark("res", "name", "'mighty_readme'"))
.addAll(checkFieldStarLark("res", "app.id", "1"))
.addAll(checkFieldStarLark("res", "app.slug", "'octoapp'"))
.addAll(checkFieldStarLark("res", "app.name", "'Octocat App'"))
.addAll(checkFieldStarLark("res", "output.title", "'Mighty Readme report'"))
.addAll(checkFieldStarLark("res", "output.summary", "'test_summary'"))
.addAll(checkFieldStarLark("res", "output.text", "'test_text'"))
.build());
}
@Test
public void testGetCombinedStatus_notFound() throws Exception {
gitUtil.mockApi(
eq("GET"),
eq("https://api.github.com/repos/google/example/commits/heads/not_found/"
+ "status?per_page=100"),
mockGitHubNotFound());
runFeedback(ImmutableList.<String>builder()
.add("res = {}")
.add("res['foo'] = ctx.destination.get_combined_status(ref = 'heads/not_found')")
.addAll(checkFieldStarLark("res", "get('foo')", "None"))
.build());
}
@Test
public void testGetPullRequestComment() throws Exception {
gitUtil.mockApi(
eq("GET"),
eq("https://api.github.com/repos/google/example/pulls/comments/12345"),
mockResponse(toJson(jsonComment())));
runFeedback(ImmutableList.<String>builder()
.add("res = ctx.destination.get_pull_request_comment(comment_id = '12345')")
.addAll(checkFieldStarLark("res", "id", "'12345'"))
.addAll(checkFieldStarLark("res", "path", "'foo/Bar.java'"))
.addAll(checkFieldStarLark("res", "body", "'This needs to be fixed.'"))
.addAll(checkFieldStarLark("res", "diff_hunk", "'@@ -36,11 +35,16 @@ foo bar'"))
.build());
}
private static ImmutableMap<String, ? extends Serializable> jsonComment() {
return ImmutableMap.of(
"id", 12345,
"path", "foo/Bar.java",
"body", "This needs to be fixed.",
"diff_hunk", "@@ -36,11 +35,16 @@ foo bar");
}
@Test
public void testGetPullRequestComment_notFound() {
gitUtil.mockApi(
eq("GET"),
eq("https://api.github.com/repos/google/example/pulls/comments/12345"),
mockGitHubNotFound());
ValidationException expected = assertThrows(ValidationException.class, () -> runFeedback(
ImmutableList.of("ctx.destination.get_pull_request_comment(comment_id = '12345')")));
assertThat(expected).hasMessageThat().contains("Pull Request Comment not found");
}
@Test
public void testGetPullRequestComment_invalidId() {
ValidationException expected = assertThrows(ValidationException.class, () -> runFeedback(
ImmutableList.of("ctx.destination.get_pull_request_comment(comment_id = 'foo')")));
assertThat(expected).hasMessageThat().contains("Invalid comment id foo");
}
@Test
public void testGetPullRequestComments() throws Exception {
gitUtil.mockApi(
eq("GET"),
eq("https://api.github.com/repos/google/example/pulls/12345/comments?per_page=100"),
mockResponse(toJson(ImmutableList.of(jsonComment(), jsonComment()))));
runFeedback(ImmutableList.<String>builder()
.add("res = ctx.destination.get_pull_request_comments(number = 12345)")
.addAll(checkFieldStarLark("res[0]", "id", "'12345'"))
.addAll(checkFieldStarLark("res[0]", "path", "'foo/Bar.java'"))
.addAll(checkFieldStarLark("res[0]", "body", "'This needs to be fixed.'"))
.addAll(checkFieldStarLark("res[0]", "diff_hunk", "'@@ -36,11 +35,16 @@ foo bar'"))
.addAll(checkFieldStarLark("res[1]", "id", "'12345'"))
.build());
}
@Test
public void testGetPullRequestComments_notFound() {
gitUtil.mockApi(
eq("GET"),
eq("https://api.github.com/repos/google/example/pulls/12345/comments?per_page=100"),
mockGitHubNotFound());
ValidationException expected = assertThrows(ValidationException.class, () ->
runFeedback(ImmutableList.of("ctx.destination.get_pull_request_comments(number = 12345)")));
assertThat(expected).hasMessageThat().contains("Pull Request Comments not found");
}
@Test
public void testGetCommit() throws Exception {
runFeedback(ImmutableList.<String>builder()
.add("res = ctx.destination.get_commit(ref = 'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa')")
.addAll(checkFieldStarLark("res", "sha", "'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa'"))
.addAll(checkFieldStarLark("res", "commit.author.name", "'theauthor'"))
.addAll(checkFieldStarLark("res", "commit.author.email", "'author@example.com'"))
.addAll(checkFieldStarLark("res", "commit.committer.name", "'thecommitter'"))
.addAll(checkFieldStarLark("res", "commit.committer.email", "'committer@example.com'"))
.addAll(checkFieldStarLark("res", "commit.message",
"'This is a message\\n\\nWith body\\n'"))
.addAll(checkFieldStarLark("res", "author.login", "'github_author'"))
.addAll(checkFieldStarLark("res", "committer.login", "'github_committer'"))
.build());
}
@Test
public void testGetCommitNotFound() throws Exception {
gitUtil.mockApi(eq("GET"), eq("https://api.github.com/repos/google/example/commits/"
+ "bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb"),
mockGitHubNotFound());
runFeedback(ImmutableList.<String>builder()
.add("res = {}")
.add("res['foo'] = ctx.destination.get_commit("
+ "ref = 'bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb')")
.addAll(checkFieldStarLark("res", "get('foo')", "None"))
.build());
}
@Test
public void testGetReferenceNotFound() throws Exception {
gitUtil.mockApi(eq("GET"),
eq("https://api.github.com/repos/google/example/git/refs/heads/not_found"),
mockGitHubNotFound());
runFeedback(ImmutableList.<String>builder()
.add("res = {}")
.add("res['foo'] = ctx.destination.get_reference(ref = 'refs/heads/not_found')")
.addAll(checkFieldStarLark("res", "get('foo')", "None"))
.build());
}
/**
* A test that uses update_reference.
*
*/
@Test
public void testFeedbackUpdateReference() throws Exception{
runFeedback(ImmutableList.<String>builder()
.add("res = ctx.destination.update_reference('e597746de9c1704e648ddc3ffa0d2096b146d600',"
+ "'refs/heads/test', True)")
.addAll(checkFieldStarLark("res", "ref", "'refs/heads/test'"))
.addAll(checkFieldStarLark("res", "url",
"'https://github.com/google/example/git/refs/heads/test'"))
.addAll(checkFieldStarLark("res", "sha", "'e597746de9c1704e648ddc3ffa0d2096b146d600'"))
.build());
}
@Test
public void testFeedbackUpdateReferenceShortRef() throws Exception{
runFeedback(ImmutableList.<String>builder()
.add("res = ctx.destination.update_reference('e597746de9c1704e648ddc3ffa0d2096b146d600',"
+ " 'test', True)")
.addAll(checkFieldStarLark("res", "ref", "'refs/heads/test'"))
.addAll(checkFieldStarLark("res", "url",
"'https://github.com/google/example/git/refs/heads/test'"))
.addAll(checkFieldStarLark("res", "sha", "'e597746de9c1704e648ddc3ffa0d2096b146d600'"))
.build());
}
@Test
public void testFeedbackDeleteReference() throws Exception{
AtomicBoolean called = new AtomicBoolean(false);
gitUtil.mockApi(eq("DELETE"), contains("/git/refs/heads/test"),
mockResponseWithStatus("", 202,
new MockRequestAssertion("Always true with side-effect",
s -> {
called.set(true);
return true;
})));
runFeedback(ImmutableList.of("ctx.destination.delete_reference('refs/heads/test')"));
assertThat(called.get()).isTrue();
}
@Test
public void testFeedbackDeleteReference_masterCheck() {
AtomicBoolean called = new AtomicBoolean(false);
gitUtil.mockApi(eq("DELETE"), contains("/git/refs/heads/master"),
mockResponseWithStatus("", 202,
new MockRequestAssertion("Always true with side-effect",
s -> {
called.set(true);
return true;
})));
ValidationException expected = assertThrows(ValidationException.class, () ->
runFeedback(ImmutableList.of("ctx.destination.delete_reference('refs/heads/master')")));
assertThat(expected).hasMessageThat().contains("Copybara doesn't allow to delete master");
assertThat(called.get()).isFalse();
}
/**
* A test that uses get_reference.
*
*/
@Test
public void testGetReference() throws Exception{
runFeedback(ImmutableList.<String>builder()
.add("res = ctx.destination.get_reference('refs/heads/test')")
.addAll(checkFieldStarLark("res", "ref", "'refs/heads/test'"))
.addAll(checkFieldStarLark("res", "url",
"'https://github.com/google/example/git/refs/heads/test'"))
.addAll(checkFieldStarLark("res", "sha", "'e597746de9c1704e648ddc3ffa0d2096b146d600'"))
.build());
}
/**
* A test that uses get_references.
*
*/
@Test
public void testGetReferences() throws Exception{
runFeedback(ImmutableList.<String>builder()
.add("res = ctx.destination.get_references()")
.addAll(checkFieldStarLark("res[0]", "ref", "'refs/heads/test'"))
.addAll(checkFieldStarLark("res[0]", "url",
"'https://github.com/google/example/git/refs/heads/test'"))
.addAll(checkFieldStarLark("res[0]", "sha", "'e597746de9c1704e648ddc3ffa0d2096b146d600'"))
.build());
}
/**
* A test that uses get_pull_requests.
*/
@Test
public void testPullRequests() throws Exception {
gitUtil.mockApi(
anyString(),
contains(
"repos/google/example/pulls?per_page=100&state=open&head=head_&sort=created"
+ "&direction=asc"),
mockResponse(
toJson(
ImmutableList.of(
ImmutableMap.of(
"number",
12345,
"state",
"open",
"head",
ImmutableMap.of(
"label", "someuser:somebranch",
"sha", Strings.repeat("a", 40),
"ref", "somebranch"))))));
runFeedback(
ImmutableList.<String>builder()
.add("res = ctx.destination.get_pull_requests(state='OPEN', head_prefix='head_')")
.addAll(checkFieldStarLark("res[0]", "number", "12345"))
.addAll(checkFieldStarLark("res[0]", "state", "'OPEN'"))
.addAll(checkFieldStarLark("res[0]", "head.label", "'someuser:somebranch'"))
.addAll(checkFieldStarLark("res[0]", "head.sha", "'" + Strings.repeat("a", 40) + "'"))
.addAll(checkFieldStarLark("res[0]", "head.ref", "'somebranch'"))
.build());
}
/** A test that uses get_pull_requests. */
@Test
public void testPullRequests_withBase() throws Exception {
gitUtil.mockApi(
anyString(),
contains(
"repos/google/example/pulls?per_page=100&state=open&head=head_&base=base_&sort=created"
+ "&direction=asc"),
mockResponse(
toJson(
ImmutableList.of(
ImmutableMap.of(
"number",
12345,
"state",
"open",
"head",
ImmutableMap.of(
"label", "someuser:somebranch",
"sha", Strings.repeat("a", 40),
"ref", "somebranch"))))));
runFeedback(
ImmutableList.<String>builder()
.add(
"res = ctx.destination.get_pull_requests(state='OPEN', head_prefix='head_',"
+ " base_prefix='base_')")
.addAll(checkFieldStarLark("res[0]", "number", "12345"))
.addAll(checkFieldStarLark("res[0]", "state", "'OPEN'"))
.addAll(checkFieldStarLark("res[0]", "head.label", "'someuser:somebranch'"))
.addAll(checkFieldStarLark("res[0]", "head.sha", "'" + Strings.repeat("a", 40) + "'"))
.addAll(checkFieldStarLark("res[0]", "head.ref", "'somebranch'"))
.build());
}
@Test
public void testUpdatePullRequest() throws Exception {
gitUtil.mockApi(eq("POST"), contains("repos/google/example/pulls/12345"),
mockResponseAndValidateRequest(toJson(
ImmutableMap.of(
"number", 12345,
"state", "closed",
"head", ImmutableMap.of(
"label", "someuser:somebranch",
"sha", Strings.repeat("a", 40),
"ref", "somebranch"
))), MockRequestAssertion.contains("{\"state\":\"closed\"}")));
runFeedback(ImmutableList.<String>builder()
.add("res = ctx.destination.update_pull_request(12345, state='CLOSED')")
.addAll(checkFieldStarLark("res", "number", "12345"))
.addAll(checkFieldStarLark("res", "state", "'CLOSED'"))
.addAll(checkFieldStarLark("res", "head.label", "'someuser:somebranch'"))
.addAll(checkFieldStarLark("res", "head.sha", "'" + Strings.repeat("a", 40) + "'"))
.addAll(checkFieldStarLark("res", "head.ref", "'somebranch'"))
.build());
}
@Test
public void testGetAuthenticatedUser() throws Exception {
gitUtil.mockApi(eq("GET"), contains("user"),
mockResponse(toJson(ImmutableMap.of("login", "tester"))));
runFeedback(ImmutableList.<String>builder()
.add("res = ctx.destination.get_authenticated_user()")
.addAll(checkFieldStarLark("res", "login", "'tester'"))
.build());
}
@Test
public void testGetAuthenticatedUser_not_authorized() throws Exception {
gitUtil.mockApi(eq("GET"), contains("user"), mockGitHubUnauthorized());
runFeedback(ImmutableList.<String>builder()
.add("res = ctx.destination.get_authenticated_user()")
.add("if res:\n"
+ " fail('Should return none')")
.build());
}
/**
* A test that uses get_pull_requests.
*/
@Test
public void testPullRequests_badPrefix() throws Exception {
ValidationException expected = assertThrows(ValidationException.class, () ->
runFeedback(ImmutableList.of("ctx.destination.get_pull_requests(head_prefix = 'bad@*')")));
assertThat(expected).hasMessageThat().contains("'bad@*' is not a valid head_prefix");
}
@Test
public void testAddlabel() throws Exception {
gitUtil.mockApi(
eq("POST"),
contains("12345/labels"),
mockResponse(
"[\n"
+ " {\n"
+ " \"id\": 123456,\n"
+ " \"node_id\": \"BASE64=\",\n"
+ " \"url\": \"https://api.github.com/repos/google/example/labels/run_kokoro\",\n"
+ " \"name\": \"run_kokoro\",\n"
+ " \"description\": \"Run me!\",\n"
+ " \"color\": \"ffffff\",\n"
+ " \"default\": true\n"
+ " }"
+ "]"
));
runFeedback(ImmutableList.<String>builder()
.add("res = ctx.destination.add_label(number = 12345, labels = ['run_kokoro'])")
.build());
verify(gitUtil.httpTransport())
.buildRequest(eq("POST"), contains("google/example/issues/12345/labels"));
}
@Test
public void testPostComment() throws Exception {
gitUtil.mockApi(eq("POST"), contains("12345/comments"), mockResponse("{}"));
runFeedback(ImmutableList.<String>builder()
.add("res = ctx.destination.post_issue_comment(number = 12345,"
+ " comment = 'This is a comment')")
.build());
verify(gitUtil.httpTransport())
.buildRequest(eq("POST"), contains("google/example/issues/12345/comments"));
}
@Test
public void testCreateIssue() throws Exception {
gitUtil.mockApi(eq("POST"), contains("/issues"), mockResponse(toJson(
ImmutableMap.of(
"number", 123456,
"title", "This is an issue"
))));
runFeedback(ImmutableList.<String>builder()
.add("res = ctx.destination.create_issue("
+ "title='This is an issue', body='body', assignees=['foo'])")
.addAll(checkFieldStarLark("res", "number", "123456"))
.addAll(checkFieldStarLark("res", "title", "'This is an issue'"))
.build());
verify(gitUtil.httpTransport())
.buildRequest(eq("POST"), contains("google/example/issues"));
}
@Test
public void testListComments() throws Exception {
gitUtil.mockApi(
eq("GET"),
contains("/comments"),
mockResponse(toJson(ImmutableList.of(ImmutableMap.of("id", 1, "body", "Me too")))));
runFeedback(
ImmutableList.<String>builder()
.add("res = ctx.destination.list_issue_comments(number = 12345)[0]")
.addAll(checkFieldStarLark("res", "id", "1"))
.addAll(checkFieldStarLark("res", "body", "'Me too'"))
.build());
verify(gitUtil.httpTransport()).buildRequest(eq("GET"), contains("issues/12345/comments"));
}
@Test
public void testCreateRelease() throws Exception {
gitUtil.mockApi(eq("POST"), contains("/releases"), mockResponse(toJson(
ImmutableMap.of(
"id", 123456,
"zipball_url", "https://github.com/zip",
"tarball_url", "https://github.com/tar"
))));
runFeedback(
ImmutableList.<String>builder()
.add("req = ctx.destination.new_release_request(tag_name='v1.0.2').with_name('1.0.2')")
.add("res = ctx.destination.create_release(req)")
.addAll(checkFieldStarLark("res", "id", "123456"))
.addAll(checkFieldStarLark("res", "zip", "'https://github.com/zip'"))
.addAll(checkFieldStarLark("res", "tarball", "'https://github.com/tar'"))
.build());
verify(gitUtil.httpTransport())
.buildRequest(eq("POST"), contains("google/example/releases"));
}
@Test
public void testConfigCredentials() throws Exception {
EndpointProvider<GitHubEndPoint> endpoint = skylark.eval("e",
"e = git.github_api(url = 'https://github.com/google/example',"
+ " credentials = credentials.username_password(\n"
+ " credentials.static_value('test@example.com'),\n"
+ " credentials.static_secret('password', 'top_secret'))\n"
+ ")");
assertThat(endpoint.getEndpoint().describeCredentials()).isNotEmpty();
}
private String toJson(Object obj) throws IOException {
return GsonFactory.getDefaultInstance().toPrettyString(obj);
}
// var, field, and value are all Starlark expressions.
private static ImmutableList<String> checkFieldStarLark(String var, String field, String value) {
return ImmutableList.of(
String.format("if %s.%s != %s:", var, field, value),
String.format(
" fail('unexpected value for '+%1$s+'.'+%2$s+' (expected '+%3$s+'): ' + %4$s.%5$s)",
Starlark.repr(var), // string literal
Starlark.repr(field), // string literal
Starlark.repr(value), // string literal
var, // expression
field)); // expression
}
private void runFeedback(ImmutableList<String> funBody) throws Exception {
ActionMigration test = feedback("def test_action(ctx):\n"
+ funBody.stream().map(s -> " " + s).collect(Collectors.joining("\n"))
+ "\n return ctx.success()\n");
test.run(workdir, ImmutableList.of("e597746de9c1704e648ddc3ffa0d2096b146d600"));
}
private ActionMigration feedback(String actionFunction) throws IOException, ValidationException {
String config =
actionFunction
+ "\n"
+ "core.feedback(\n"
+ " name = 'default',\n"
+ " origin = testing.dummy_trigger(),\n"
+ " destination = git.github_api(\n"
+ " url = 'https://github.com/google/example',\n"
+ " ),\n"
+ " actions = [test_action,],\n"
+ ")\n"
+ "\n";
System.err.println(config);
return (ActionMigration) skylark.loadConfig(config).getMigration("default");
}
}
|
apache/sqoop | 36,078 | src/java/org/apache/sqoop/manager/OracleManager.java | /**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.sqoop.manager;
import static org.apache.sqoop.manager.JdbcDrivers.ORACLE;
import java.io.IOException;
import java.lang.reflect.Method;
import java.sql.Connection;
import java.sql.DatabaseMetaData;
import java.sql.DriverManager;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.Statement;
import java.sql.Timestamp;
import java.sql.Types;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.HashSet;
import java.util.LinkedHashSet;
import java.util.List;
import java.util.Map;
import java.util.Properties;
import java.util.Set;
import java.util.StringTokenizer;
import java.util.TimeZone;
import java.util.TreeMap;
import org.apache.avro.LogicalType;
import org.apache.commons.lang.StringUtils;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.sqoop.manager.oracle.OracleUtils;
import org.apache.sqoop.util.LoggingUtils;
import org.apache.sqoop.SqoopOptions;
import org.apache.sqoop.SqoopOptions.UpdateMode;
import org.apache.sqoop.mapreduce.ExportBatchOutputFormat;
import org.apache.sqoop.mapreduce.JdbcExportJob;
import org.apache.sqoop.mapreduce.JdbcUpsertExportJob;
import org.apache.sqoop.mapreduce.OracleUpsertOutputFormat;
import org.apache.sqoop.mapreduce.db.OracleDataDrivenDBInputFormat;
import org.apache.sqoop.util.ExportException;
import org.apache.sqoop.util.ImportException;
/**
* Manages connections to Oracle databases.
* Requires the Oracle JDBC driver.
*/
public class OracleManager
extends org.apache.sqoop.manager.GenericJdbcManager {
public static final Log LOG = LogFactory.getLog(
OracleManager.class.getName());
/**
* ORA-00942: Table or view does not exist. Indicates that the user does
* not have permissions.
*/
public static final int ERROR_TABLE_OR_VIEW_DOES_NOT_EXIST = 942;
/**
* This is a catalog view query to list the databases. For Oracle we map the
* concept of a database to a schema, and a schema is identified by a user.
* In order for the catalog view DBA_USERS be visible to the user who executes
* this query, they must have the DBA privilege.
*/
public static final String QUERY_LIST_DATABASES =
"SELECT USERNAME FROM DBA_USERS";
/**
* Query to list all tables visible to the current user. Note that this list
* does not identify the table owners which is required in order to
* ensure that the table can be operated on for import/export purposes.
*/
public static final String QUERY_LIST_TABLES =
"SELECT TABLE_NAME FROM ALL_TABLES WHERE OWNER = ?";
/**
* Query to list all columns of the given table. Even if the user has the
* privileges to access table objects from another schema, this query will
* limit it to explore tables only from within the active schema.
*/
public static final String QUERY_COLUMNS_FOR_TABLE =
"SELECT COLUMN_NAME FROM ALL_TAB_COLUMNS WHERE "
+ "OWNER = ? AND TABLE_NAME = ? ORDER BY COLUMN_ID";
/**
* Query to find the primary key column name for a given table. This query
* is restricted to the current schema.
*/
public static final String QUERY_PRIMARY_KEY_FOR_TABLE =
"SELECT ALL_CONS_COLUMNS.COLUMN_NAME FROM ALL_CONS_COLUMNS, "
+ "ALL_CONSTRAINTS WHERE ALL_CONS_COLUMNS.CONSTRAINT_NAME = "
+ "ALL_CONSTRAINTS.CONSTRAINT_NAME AND "
+ "ALL_CONSTRAINTS.CONSTRAINT_TYPE = 'P' AND "
+ "ALL_CONS_COLUMNS.TABLE_NAME = ? AND "
+ "ALL_CONS_COLUMNS.OWNER = ?";
/**
* Query to get the current user for the DB session. Used in case of
* wallet logins.
*/
public static final String QUERY_GET_SESSIONUSER =
"SELECT USER FROM DUAL";
// Configuration key to use to set the session timezone.
public static final String ORACLE_TIMEZONE_KEY = "oracle.sessionTimeZone";
// Oracle XE does a poor job of releasing server-side resources for
// closed connections. So we actually want to cache connections as
// much as possible. This is especially important for JUnit tests which
// may need to make 60 or more connections (serially), since each test
// uses a different OracleManager instance.
private static class ConnCache {
public static final Log LOG = LogFactory.getLog(ConnCache.class.getName());
private static class CacheKey {
private final String connectString;
private final String username;
public CacheKey(String connect, String user) {
this.connectString = connect;
this.username = user; // note: may be null.
}
@Override
public boolean equals(Object o) {
if (o instanceof CacheKey) {
CacheKey k = (CacheKey) o;
if (null == username) {
return k.username == null && k.connectString.equals(connectString);
} else {
return k.username.equals(username)
&& k.connectString.equals(connectString);
}
} else {
return false;
}
}
@Override
public int hashCode() {
if (null == username) {
return connectString.hashCode();
} else {
return username.hashCode() ^ connectString.hashCode();
}
}
@Override
public String toString() {
return connectString + "/" + username;
}
}
private Map<CacheKey, Connection> connectionMap;
public ConnCache() {
LOG.debug("Instantiated new connection cache.");
connectionMap = new HashMap<CacheKey, Connection>();
}
/**
* @return a Connection instance that can be used to connect to the
* given database, if a previously-opened connection is available in
* the cache. Returns null if none is available in the map.
*/
public synchronized Connection getConnection(String connectStr,
String username) throws SQLException {
CacheKey key = new CacheKey(connectStr, username);
Connection cached = connectionMap.get(key);
if (null != cached) {
connectionMap.remove(key);
if (cached.isReadOnly()) {
// Read-only mode? Don't want it.
cached.close();
}
if (cached.isClosed()) {
// This connection isn't usable.
return null;
}
cached.rollback(); // Reset any transaction state.
cached.clearWarnings();
LOG.debug("Got cached connection for " + key);
}
return cached;
}
/**
* Returns a connection to the cache pool for future use. If a connection
* is already cached for the connectstring/username pair, then this
* connection is closed and discarded.
*/
public synchronized void recycle(String connectStr, String username,
Connection conn) throws SQLException {
CacheKey key = new CacheKey(connectStr, username);
Connection existing = connectionMap.get(key);
if (null != existing) {
// Cache is already full for this entry.
LOG.debug("Discarding additional connection for " + key);
conn.close();
return;
}
// Put it in the map for later use.
LOG.debug("Caching released connection for " + key);
connectionMap.put(key, conn);
}
@Override
protected synchronized void finalize() throws Throwable {
for (Connection c : connectionMap.values()) {
c.close();
}
super.finalize();
}
}
private static final ConnCache CACHE;
static {
CACHE = new ConnCache();
}
public OracleManager(final SqoopOptions opts) {
super(ORACLE.getDriverClass(), opts);
}
public void close() throws SQLException {
release(); // Release any open statements associated with the connection.
if (hasOpenConnection()) {
// Release our open connection back to the cache.
CACHE.recycle(options.getConnectString(), options.getUsername(),
getConnection());
discardConnection(false);
}
}
protected String getColNamesQuery(String tableName) {
// SqlManager uses "tableName AS t" which doesn't work in Oracle.
String query = "SELECT t.* FROM " + escapeTableName(tableName)
+ " t WHERE 1=0";
LOG.debug("Using column names query: " + query);
return query;
}
/**
* Create a connection to the database; usually used only from within
* getConnection(), which enforces a singleton guarantee around the
* Connection object.
*
* Oracle-specific driver uses READ_COMMITTED which is the weakest
* semantics Oracle supports.
*/
protected Connection makeConnection() throws SQLException {
Connection connection;
String driverClass = getDriverClass();
try {
Class.forName(driverClass);
} catch (ClassNotFoundException cnfe) {
throw new RuntimeException("Could not load db driver class: "
+ driverClass);
}
String username = options.getUsername();
String password = options.getPassword();
String connectStr = options.getConnectString();
try {
connection = CACHE.getConnection(connectStr, username);
} catch (SQLException e) {
connection = null;
LOG.debug("Cached connecion has expired.");
}
if (null == connection) {
// Couldn't pull one from the cache. Get a new one.
LOG.debug("Creating a new connection for "
+ connectStr + ", using username: " + username);
Properties connectionParams = options.getConnectionParams();
if (connectionParams != null && connectionParams.size() > 0) {
LOG.debug("User specified connection params. "
+ "Using properties specific API for making connection.");
Properties props = new Properties();
if (username != null) {
props.put("user", username);
}
if (password != null) {
props.put("password", password);
}
props.putAll(connectionParams);
connection = DriverManager.getConnection(connectStr, props);
} else {
LOG.debug("No connection paramenters specified. "
+ "Using regular API for making connection.");
if (username == null) {
connection = DriverManager.getConnection(connectStr);
} else {
connection = DriverManager.getConnection(
connectStr, username, password);
}
}
}
// We only use this for metadata queries. Loosest semantics are okay.
connection.setTransactionIsolation(Connection.TRANSACTION_READ_COMMITTED);
// Setting session time zone
setSessionTimeZone(connection);
// Rest of the Sqoop code expects that the connection will have be running
// without autoCommit, so we need to explicitly set it to false. This is
// usually done directly by SqlManager in the makeConnection method, but
// since we are overriding it, we have to do it ourselves.
connection.setAutoCommit(false);
return connection;
}
public static String getSessionUser(Connection conn) {
Statement stmt = null;
ResultSet rset = null;
String user = null;
try {
stmt = conn.createStatement(ResultSet.TYPE_FORWARD_ONLY,
ResultSet.CONCUR_READ_ONLY);
rset = stmt.executeQuery(QUERY_GET_SESSIONUSER);
if (rset.next()) {
user = rset.getString(1);
}
conn.commit();
} catch (SQLException e) {
try {
conn.rollback();
} catch (SQLException ex) {
LoggingUtils.logAll(LOG, "Failed to rollback transaction", ex);
}
} finally {
if (rset != null) {
try {
rset.close();
} catch (SQLException ex) {
LoggingUtils.logAll(LOG, "Failed to close resultset", ex);
}
}
if (stmt != null) {
try {
stmt.close();
} catch (SQLException ex) {
LoggingUtils.logAll(LOG, "Failed to close statement", ex);
}
}
}
if (user == null) {
throw new RuntimeException("Unable to get current session user");
}
return user;
}
/**
* Set session time zone.
* @param conn Connection object
* @throws SQLException instance
*/
private void setSessionTimeZone(Connection conn) throws SQLException {
// Need to use reflection to call the method setSessionTimeZone on the
// OracleConnection class because oracle specific java libraries are not
// accessible in this context.
Method methodSession;
Method methodDefaultTimezone;
try {
methodSession = conn.getClass().getMethod(
"setSessionTimeZone", new Class [] {String.class});
methodDefaultTimezone = conn.getClass().getMethod("setDefaultTimeZone", TimeZone.class);
} catch (Exception ex) {
LOG.error("Could not find method setSessionTimeZone in "
+ conn.getClass().getName(), ex);
// rethrow SQLException
throw new SQLException(ex);
}
// Need to set the time zone in order for Java to correctly access the
// column "TIMESTAMP WITH LOCAL TIME ZONE". The user may have set this in
// the configuration as 'oracle.sessionTimeZone'.
String clientTimeZoneStr = options.getConf().get(ORACLE_TIMEZONE_KEY,
"GMT");
TimeZone timeZone = TimeZone.getTimeZone(clientTimeZoneStr);
TimeZone.setDefault(timeZone);
try {
methodSession.setAccessible(true);
methodSession.invoke(conn, clientTimeZoneStr);
methodDefaultTimezone.setAccessible(true);
methodDefaultTimezone.invoke(conn, timeZone);
LOG.info("Time zone has been set to " + clientTimeZoneStr);
} catch (Exception ex) {
LOG.warn("Time zone " + clientTimeZoneStr
+ " could not be set on Oracle database.");
LOG.info("Setting default time zone: GMT");
try {
// Per the documentation at:
// http://download-west.oracle.com/docs/cd/B19306_01
// /server.102/b14225/applocaledata.htm#i637736
// The "GMT" timezone is guaranteed to exist in the available timezone
// regions, whereas others (e.g., "UTC") are not.
methodSession.invoke(conn, "GMT");
methodDefaultTimezone.invoke(conn, "GMT");
TimeZone.setDefault(TimeZone.getTimeZone("GMT"));
} catch (Exception ex2) {
LOG.error("Could not set time zone for oracle connection", ex2);
// rethrow SQLException
throw new SQLException(ex);
}
}
}
@Override
public void importTable(
org.apache.sqoop.manager.ImportJobContext context)
throws IOException, ImportException {
context.setConnManager(this);
// Specify the Oracle-specific DBInputFormat for import.
context.setInputFormat(OracleDataDrivenDBInputFormat.class);
super.importTable(context);
}
/**
* Export data stored in HDFS into a table in a database.
*/
public void exportTable(org.apache.sqoop.manager.ExportJobContext context)
throws IOException, ExportException {
context.setConnManager(this);
JdbcExportJob exportJob = new JdbcExportJob(context,
null, null, ExportBatchOutputFormat.class, getParquetJobConfigurator().createParquetExportJobConfigurator());
exportJob.runExport();
}
@Override
/**
* {@inheritDoc}
*/
public void upsertTable(org.apache.sqoop.manager.ExportJobContext context)
throws IOException, ExportException {
context.setConnManager(this);
JdbcUpsertExportJob exportJob =
new JdbcUpsertExportJob(context, OracleUpsertOutputFormat.class, getParquetJobConfigurator().createParquetExportJobConfigurator());
exportJob.runExport();
}
@Override
/**
* {@inheritDoc}
*/
public void configureDbOutputColumns(SqoopOptions options) {
if (options.getUpdateMode() == UpdateMode.UpdateOnly) {
super.configureDbOutputColumns(options);
} else {
// We're in upsert mode. We need to explicitly set
// the database output column ordering in the codeGenerator.
Set<String> updateKeys = new LinkedHashSet<String>();
Set<String> updateKeysUppercase = new HashSet<String>();
String updateKeyValue = options.getUpdateKeyCol();
StringTokenizer stok = new StringTokenizer(updateKeyValue, ",");
while (stok.hasMoreTokens()) {
String nextUpdateColumn = stok.nextToken().trim();
if (nextUpdateColumn.length() > 0) {
updateKeys.add(nextUpdateColumn);
updateKeysUppercase.add(nextUpdateColumn.toUpperCase());
} else {
throw new RuntimeException("Invalid update key column value specified"
+ ": '" + updateKeyValue + "'");
}
}
String [] allColNames = getColumnNames(options.getTableName());
List<String> dbOutCols = new ArrayList<String>();
dbOutCols.addAll(updateKeys);
for (String col : allColNames) {
if (!updateKeysUppercase.contains(col.toUpperCase())) {
dbOutCols.add(col); // add update columns to the output order list.
}
}
for (String col : allColNames) {
dbOutCols.add(col); // add insert columns to the output order list.
}
options.setDbOutputColumns(dbOutCols.toArray(
new String[dbOutCols.size()]));
}
}
@Override
public ResultSet readTable(String tableName, String[] columns)
throws SQLException {
if (columns == null) {
columns = getColumnNames(tableName);
}
StringBuilder sb = new StringBuilder();
sb.append("SELECT ");
boolean first = true;
for (String col : columns) {
if (!first) {
sb.append(", ");
}
sb.append(escapeColName(col));
first = false;
}
sb.append(" FROM ");
sb.append(escapeTableName(tableName));
String sqlCmd = sb.toString();
LOG.debug("Reading table with command: " + sqlCmd);
return execute(sqlCmd);
}
private Map<String, String> columnTypeNames;
/**
* Resolve a database-specific type to the Java type that should contain it.
* @param tableName table name
* @param colName column name
* @return the name of a Java type to hold the sql datatype, or null if none.
*/
private String toDbSpecificJavaType(String tableName, String colName) {
if (columnTypeNames == null) {
columnTypeNames = getColumnTypeNames(tableName, options.getCall(),
options.getSqlQuery());
}
String colTypeName = columnTypeNames.get(colName);
if (colTypeName != null) {
if (colTypeName.equalsIgnoreCase("BINARY_FLOAT")) {
return "Float";
}
if (colTypeName.equalsIgnoreCase("FLOAT")) {
return "Float";
}
if (colTypeName.equalsIgnoreCase("BINARY_DOUBLE")) {
return "Double";
}
if (colTypeName.equalsIgnoreCase("DOUBLE")) {
return "Double";
}
if (colTypeName.toUpperCase().startsWith("TIMESTAMP")) {
return "java.sql.Timestamp";
}
}
return null;
}
/**
* Resolve a database-specific type to the Hive type that should contain it.
* @param tableName table name
* @param colName column name
* @return the name of a Hive type to hold the sql datatype, or null if none.
*/
private String toDbSpecificHiveType(String tableName, String colName) {
if (columnTypeNames == null) {
columnTypeNames = getColumnTypeNames(tableName, options.getCall(),
options.getSqlQuery());
}
LOG.debug("Column Types and names returned = ("
+ StringUtils.join(columnTypeNames.keySet(), ",")
+ ")=>("
+ StringUtils.join(columnTypeNames.values(), ",")
+ ")");
String colTypeName = columnTypeNames.get(colName);
if (colTypeName != null) {
if (colTypeName.equalsIgnoreCase("BINARY_FLOAT")) {
return "FLOAT";
}
if (colTypeName.equalsIgnoreCase("BINARY_DOUBLE")) {
return "DOUBLE";
}
if (colTypeName.toUpperCase().startsWith("TIMESTAMP")) {
return "STRING";
}
}
return null;
}
/**
* Return java type for SQL type.
* @param tableName table name
* @param columnName column name
* @param sqlType sql type
* @return java type
*/
@Override
public String toJavaType(String tableName, String columnName, int sqlType) {
String javaType = super.toJavaType(tableName, columnName, sqlType);
if (javaType == null) {
javaType = toDbSpecificJavaType(tableName, columnName);
}
return javaType;
}
/**
* Return hive type for SQL type.
* @param tableName table name
* @param columnName column name
* @param sqlType sql data type
* @return hive type
*/
@Override
public String toHiveType(String tableName, String columnName, int sqlType) {
String hiveType = super.toHiveType(tableName, columnName, sqlType);
if (hiveType == null) {
hiveType = toDbSpecificHiveType(tableName, columnName);
}
return hiveType;
}
@Override
protected void finalize() throws Throwable {
close();
super.finalize();
}
@Override
protected String getCurTimestampQuery() {
return "SELECT CURRENT_TIMESTAMP FROM dual";
}
@Override
public String timestampToQueryString(Timestamp ts) {
return "TO_TIMESTAMP('" + ts + "', 'YYYY-MM-DD HH24:MI:SS.FF')";
}
@Override
public String datetimeToQueryString(String datetime, int columnType) {
if (columnType == Types.TIMESTAMP) {
return "TO_TIMESTAMP('" + datetime + "', 'YYYY-MM-DD HH24:MI:SS.FF')";
} else if (columnType == Types.DATE) {
// converting timestamp of the form 2012-11-11 11:11:11.00 to
// date of the form 2011:11:11 11:11:11
datetime = datetime.split("\\.")[0];
return "TO_DATE('" + datetime + "', 'YYYY-MM-DD HH24:MI:SS')";
} else {
String msg = "Column type is neither timestamp nor date!";
LOG.error(msg);
throw new RuntimeException(msg);
}
}
@Override
public boolean supportsStagingForExport() {
return true;
}
/**
* The concept of database in Oracle is mapped to schemas. Each schema
* is identified by the corresponding username.
*/
@Override
public String[] listDatabases() {
Connection conn = null;
Statement stmt = null;
ResultSet rset = null;
List<String> databases = new ArrayList<String>();
try {
conn = getConnection();
stmt = conn.createStatement(ResultSet.TYPE_FORWARD_ONLY,
ResultSet.CONCUR_READ_ONLY);
rset = stmt.executeQuery(QUERY_LIST_DATABASES);
while (rset.next()) {
databases.add(rset.getString(1));
}
conn.commit();
} catch (SQLException e) {
try {
conn.rollback();
} catch (SQLException ex) {
LoggingUtils.logAll(LOG, "Failed to rollback transaction", ex);
}
if (e.getErrorCode() == ERROR_TABLE_OR_VIEW_DOES_NOT_EXIST) {
LOG.error("The catalog view DBA_USERS was not found. "
+ "This may happen if the user does not have DBA privileges. "
+ "Please check privileges and try again.");
LOG.debug("Full trace for ORA-00942 exception", e);
} else {
LoggingUtils.logAll(LOG, "Failed to list databases", e);
}
} finally {
if (rset != null) {
try {
rset.close();
} catch (SQLException ex) {
LoggingUtils.logAll(LOG, "Failed to close resultset", ex);
}
}
if (stmt != null) {
try {
stmt.close();
} catch (SQLException ex) {
LoggingUtils.logAll(LOG, "Failed to close statement", ex);
}
}
try {
close();
} catch (SQLException ex) {
LoggingUtils.logAll(LOG, "Unable to discard connection", ex);
}
}
return databases.toArray(new String[databases.size()]);
}
@Override
public String[] listTables() {
Connection conn = null;
PreparedStatement pStmt = null;
ResultSet rset = null;
List<String> tables = new ArrayList<String>();
String tableOwner = null;
try {
conn = getConnection();
tableOwner = getSessionUser(conn);
pStmt = conn.prepareStatement(QUERY_LIST_TABLES,
ResultSet.TYPE_FORWARD_ONLY,
ResultSet.CONCUR_READ_ONLY);
pStmt.setString(1, tableOwner);
rset = pStmt.executeQuery();
while (rset.next()) {
tables.add(rset.getString(1));
}
conn.commit();
} catch (SQLException e) {
try {
conn.rollback();
} catch (SQLException ex) {
LoggingUtils.logAll(LOG, "Failed to rollback transaction", ex);
}
LoggingUtils.logAll(LOG, "Failed to list tables", e);
} finally {
if (rset != null) {
try {
rset.close();
} catch (SQLException ex) {
LoggingUtils.logAll(LOG, "Failed to close resultset", ex);
}
}
if (pStmt != null) {
try {
pStmt.close();
} catch (SQLException ex) {
LoggingUtils.logAll(LOG, "Failed to close statement", ex);
}
}
try {
close();
} catch (SQLException ex) {
LoggingUtils.logAll(LOG, "Unable to discard connection", ex);
}
}
return tables.toArray(new String[tables.size()]);
}
@Override
public String[] getColumnNamesForProcedure(String procedureName) {
List<String> ret = new ArrayList<String>();
try {
DatabaseMetaData metaData = this.getConnection().getMetaData();
ResultSet results = metaData.getProcedureColumns(null, null,
procedureName, null);
if (null == results) {
return null;
}
try {
while (results.next()) {
if (results.getInt("COLUMN_TYPE")
!= DatabaseMetaData.procedureColumnReturn) {
int index = results.getInt("ORDINAL_POSITION");
if (index < 0) {
continue; // actually the return type
}
for (int i = ret.size(); i < index; ++i) {
ret.add(null);
}
String name = results.getString("COLUMN_NAME");
if (index == ret.size()) {
ret.add(name);
} else {
ret.set(index, name);
}
}
}
String[] result = ret.toArray(new String[ret.size()]);
LOG.debug("getColumnsNamesForProcedure returns "
+ StringUtils.join(ret, ","));
return result;
} finally {
results.close();
getConnection().commit();
}
} catch (SQLException e) {
LoggingUtils.logAll(LOG, "Error reading procedure metadata: ", e);
throw new RuntimeException("Can't fetch column names for procedure.", e);
}
}
@Override
public Map<String, Integer>
getColumnTypesForProcedure(String procedureName) {
Map<String, Integer> ret = new TreeMap<String, Integer>();
try {
DatabaseMetaData metaData = this.getConnection().getMetaData();
ResultSet results = metaData.getProcedureColumns(null, null,
procedureName, null);
if (null == results) {
return null;
}
try {
while (results.next()) {
if (results.getInt("COLUMN_TYPE")
!= DatabaseMetaData.procedureColumnReturn) {
int index = results.getInt("ORDINAL_POSITION");
if (index < 0) {
continue; // actually the return type
}
// we don't care if we get several rows for the
// same ORDINAL_POSITION (e.g. like H2 gives us)
// as we'll just overwrite the entry in the map:
ret.put(
results.getString("COLUMN_NAME"),
results.getInt("DATA_TYPE"));
}
}
LOG.debug("Columns returned = " + StringUtils.join(ret.keySet(), ","));
LOG.debug("Types returned = " + StringUtils.join(ret.values(), ","));
return ret.isEmpty() ? null : ret;
} finally {
results.close();
getConnection().commit();
}
} catch (SQLException sqlException) {
LoggingUtils.logAll(LOG, "Error reading primary key metadata: "
+ sqlException.toString(), sqlException);
return null;
}
}
@Override
public Map<String, String>
getColumnTypeNamesForProcedure(String procedureName) {
Map<String, String> ret = new TreeMap<String, String>();
try {
DatabaseMetaData metaData = this.getConnection().getMetaData();
ResultSet results = metaData.getProcedureColumns(null, null,
procedureName, null);
if (null == results) {
return null;
}
try {
while (results.next()) {
if (results.getInt("COLUMN_TYPE")
!= DatabaseMetaData.procedureColumnReturn) {
int index = results.getInt("ORDINAL_POSITION");
if (index < 0) {
continue; // actually the return type
}
// we don't care if we get several rows for the
// same ORDINAL_POSITION (e.g. like H2 gives us)
// as we'll just overwrite the entry in the map:
ret.put(
results.getString("COLUMN_NAME"),
results.getString("TYPE_NAME"));
}
}
LOG.debug("Columns returned = " + StringUtils.join(ret.keySet(), ","));
LOG.debug(
"Type names returned = " + StringUtils.join(ret.values(), ","));
return ret.isEmpty() ? null : ret;
} finally {
results.close();
getConnection().commit();
}
} catch (SQLException sqlException) {
LoggingUtils.logAll(LOG, "Error reading primary key metadata: "
+ sqlException.toString(), sqlException);
return null;
}
}
@Override
public String escapeColName(String colName) {
return OracleUtils.escapeIdentifier(colName, options.isOracleEscapingDisabled());
}
@Override
public String escapeTableName(String tableName) {
return OracleUtils.escapeIdentifier(tableName, options.isOracleEscapingDisabled());
}
@Override
public boolean escapeTableNameOnExport() {
return true;
}
@Override
public String[] getColumnNames(String tableName) {
Connection conn = null;
PreparedStatement pStmt = null;
ResultSet rset = null;
List<String> columns = new ArrayList<String>();
String tableOwner = null;
String shortTableName = tableName;
int qualifierIndex = tableName.indexOf('.');
if (qualifierIndex != -1) {
tableOwner = tableName.substring(0, qualifierIndex);
shortTableName = tableName.substring(qualifierIndex + 1);
}
try {
conn = getConnection();
if (tableOwner == null) {
tableOwner = getSessionUser(conn);
}
pStmt = conn.prepareStatement(QUERY_COLUMNS_FOR_TABLE,
ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY);
pStmt.setString(1, tableOwner);
pStmt.setString(2, shortTableName);
rset = pStmt.executeQuery();
while (rset.next()) {
columns.add(rset.getString(1));
}
conn.commit();
} catch (SQLException e) {
try {
conn.rollback();
} catch (SQLException ex) {
LoggingUtils.logAll(LOG, "Failed to rollback transaction", ex);
}
LoggingUtils.logAll(LOG, "Failed to list columns", e);
} finally {
if (rset != null) {
try {
rset.close();
} catch (SQLException ex) {
LoggingUtils.logAll(LOG, "Failed to close resultset", ex);
}
}
if (pStmt != null) {
try {
pStmt.close();
} catch (SQLException ex) {
LoggingUtils.logAll(LOG, "Failed to close statement", ex);
}
}
try {
close();
} catch (SQLException ex) {
LoggingUtils.logAll(LOG, "Unable to discard connection", ex);
}
}
return filterSpecifiedColumnNames(columns.toArray(new String[columns.size()]));
}
@Override
public String getPrimaryKey(String tableName) {
Connection conn = null;
PreparedStatement pStmt = null;
ResultSet rset = null;
List<String> columns = new ArrayList<String>();
String tableOwner = null;
String shortTableName = tableName;
int qualifierIndex = tableName.indexOf('.');
if (qualifierIndex != -1) {
tableOwner = tableName.substring(0, qualifierIndex);
shortTableName = tableName.substring(qualifierIndex + 1);
}
try {
conn = getConnection();
if (tableOwner == null) {
tableOwner = getSessionUser(conn);
}
pStmt = conn.prepareStatement(QUERY_PRIMARY_KEY_FOR_TABLE,
ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY);
pStmt.setString(1, shortTableName);
pStmt.setString(2, tableOwner);
rset = pStmt.executeQuery();
while (rset.next()) {
columns.add(rset.getString(1));
}
conn.commit();
} catch (SQLException e) {
try {
if (conn != null) {
conn.rollback();
}
} catch (SQLException ex) {
LoggingUtils.logAll(LOG, "Failed to rollback transaction", ex);
}
LoggingUtils.logAll(LOG, "Failed to list columns", e);
} finally {
if (rset != null) {
try {
rset.close();
} catch (SQLException ex) {
LoggingUtils.logAll(LOG, "Failed to close resultset", ex);
}
}
if (pStmt != null) {
try {
pStmt.close();
} catch (SQLException ex) {
LoggingUtils.logAll(LOG, "Failed to close statement", ex);
}
}
try {
close();
} catch (SQLException ex) {
LoggingUtils.logAll(LOG, "Unable to discard connection", ex);
}
}
if (columns.size() == 0) {
// Table has no primary key
return null;
}
if (columns.size() > 1) {
// The primary key is multi-column primary key. Warn the user.
// TODO select the appropriate column instead of the first column based
// on the datatype - giving preference to numerics over other types.
LOG.warn("The table " + tableName + " "
+ "contains a multi-column primary key. Sqoop will default to "
+ "the column " + columns.get(0) + " only for this job.");
}
return columns.get(0);
}
@Override
public String getInputBoundsQuery(String splitByCol, String sanitizedQuery) {
/*
* The default input bounds query generated by DataDrivenImportJob
* is of the form:
* SELECT MIN(splitByCol), MAX(splitByCol) FROM (sanitizedQuery) AS t1
*
* This works for most databases but not Oracle since Oracle does not
* allow the use of "AS" to project the subquery as a table. Instead the
* correct format for use with Oracle is as follows:
* SELECT MIN(splitByCol), MAX(splitByCol) FROM (sanitizedQuery) t1
*/
return "SELECT MIN(" + splitByCol + "), MAX(" + splitByCol + ") FROM ("
+ sanitizedQuery + ") t1";
}
@Override
public LogicalType toAvroLogicalType(int sqlType, Integer precision, Integer scale) {
Configuration conf = options.getConf();
return OracleUtils.toAvroLogicalType(sqlType, precision, scale, conf);
}
}
|
googleapis/google-cloud-java | 36,275 | java-asset/proto-google-cloud-asset-v1/src/main/java/com/google/cloud/asset/v1/OutputConfig.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/asset/v1/asset_service.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.asset.v1;
/**
*
*
* <pre>
* Output configuration for export assets destination.
* </pre>
*
* Protobuf type {@code google.cloud.asset.v1.OutputConfig}
*/
public final class OutputConfig extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.asset.v1.OutputConfig)
OutputConfigOrBuilder {
private static final long serialVersionUID = 0L;
// Use OutputConfig.newBuilder() to construct.
private OutputConfig(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private OutputConfig() {}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new OutputConfig();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.asset.v1.AssetServiceProto
.internal_static_google_cloud_asset_v1_OutputConfig_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.asset.v1.AssetServiceProto
.internal_static_google_cloud_asset_v1_OutputConfig_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.asset.v1.OutputConfig.class,
com.google.cloud.asset.v1.OutputConfig.Builder.class);
}
private int destinationCase_ = 0;
@SuppressWarnings("serial")
private java.lang.Object destination_;
public enum DestinationCase
implements
com.google.protobuf.Internal.EnumLite,
com.google.protobuf.AbstractMessage.InternalOneOfEnum {
GCS_DESTINATION(1),
BIGQUERY_DESTINATION(2),
DESTINATION_NOT_SET(0);
private final int value;
private DestinationCase(int value) {
this.value = value;
}
/**
* @param value The number of the enum to look for.
* @return The enum associated with the given number.
* @deprecated Use {@link #forNumber(int)} instead.
*/
@java.lang.Deprecated
public static DestinationCase valueOf(int value) {
return forNumber(value);
}
public static DestinationCase forNumber(int value) {
switch (value) {
case 1:
return GCS_DESTINATION;
case 2:
return BIGQUERY_DESTINATION;
case 0:
return DESTINATION_NOT_SET;
default:
return null;
}
}
public int getNumber() {
return this.value;
}
};
public DestinationCase getDestinationCase() {
return DestinationCase.forNumber(destinationCase_);
}
public static final int GCS_DESTINATION_FIELD_NUMBER = 1;
/**
*
*
* <pre>
* Destination on Cloud Storage.
* </pre>
*
* <code>.google.cloud.asset.v1.GcsDestination gcs_destination = 1;</code>
*
* @return Whether the gcsDestination field is set.
*/
@java.lang.Override
public boolean hasGcsDestination() {
return destinationCase_ == 1;
}
/**
*
*
* <pre>
* Destination on Cloud Storage.
* </pre>
*
* <code>.google.cloud.asset.v1.GcsDestination gcs_destination = 1;</code>
*
* @return The gcsDestination.
*/
@java.lang.Override
public com.google.cloud.asset.v1.GcsDestination getGcsDestination() {
if (destinationCase_ == 1) {
return (com.google.cloud.asset.v1.GcsDestination) destination_;
}
return com.google.cloud.asset.v1.GcsDestination.getDefaultInstance();
}
/**
*
*
* <pre>
* Destination on Cloud Storage.
* </pre>
*
* <code>.google.cloud.asset.v1.GcsDestination gcs_destination = 1;</code>
*/
@java.lang.Override
public com.google.cloud.asset.v1.GcsDestinationOrBuilder getGcsDestinationOrBuilder() {
if (destinationCase_ == 1) {
return (com.google.cloud.asset.v1.GcsDestination) destination_;
}
return com.google.cloud.asset.v1.GcsDestination.getDefaultInstance();
}
public static final int BIGQUERY_DESTINATION_FIELD_NUMBER = 2;
/**
*
*
* <pre>
* Destination on BigQuery. The output table stores the fields in asset
* Protobuf as columns in BigQuery.
* </pre>
*
* <code>.google.cloud.asset.v1.BigQueryDestination bigquery_destination = 2;</code>
*
* @return Whether the bigqueryDestination field is set.
*/
@java.lang.Override
public boolean hasBigqueryDestination() {
return destinationCase_ == 2;
}
/**
*
*
* <pre>
* Destination on BigQuery. The output table stores the fields in asset
* Protobuf as columns in BigQuery.
* </pre>
*
* <code>.google.cloud.asset.v1.BigQueryDestination bigquery_destination = 2;</code>
*
* @return The bigqueryDestination.
*/
@java.lang.Override
public com.google.cloud.asset.v1.BigQueryDestination getBigqueryDestination() {
if (destinationCase_ == 2) {
return (com.google.cloud.asset.v1.BigQueryDestination) destination_;
}
return com.google.cloud.asset.v1.BigQueryDestination.getDefaultInstance();
}
/**
*
*
* <pre>
* Destination on BigQuery. The output table stores the fields in asset
* Protobuf as columns in BigQuery.
* </pre>
*
* <code>.google.cloud.asset.v1.BigQueryDestination bigquery_destination = 2;</code>
*/
@java.lang.Override
public com.google.cloud.asset.v1.BigQueryDestinationOrBuilder getBigqueryDestinationOrBuilder() {
if (destinationCase_ == 2) {
return (com.google.cloud.asset.v1.BigQueryDestination) destination_;
}
return com.google.cloud.asset.v1.BigQueryDestination.getDefaultInstance();
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (destinationCase_ == 1) {
output.writeMessage(1, (com.google.cloud.asset.v1.GcsDestination) destination_);
}
if (destinationCase_ == 2) {
output.writeMessage(2, (com.google.cloud.asset.v1.BigQueryDestination) destination_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (destinationCase_ == 1) {
size +=
com.google.protobuf.CodedOutputStream.computeMessageSize(
1, (com.google.cloud.asset.v1.GcsDestination) destination_);
}
if (destinationCase_ == 2) {
size +=
com.google.protobuf.CodedOutputStream.computeMessageSize(
2, (com.google.cloud.asset.v1.BigQueryDestination) destination_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.asset.v1.OutputConfig)) {
return super.equals(obj);
}
com.google.cloud.asset.v1.OutputConfig other = (com.google.cloud.asset.v1.OutputConfig) obj;
if (!getDestinationCase().equals(other.getDestinationCase())) return false;
switch (destinationCase_) {
case 1:
if (!getGcsDestination().equals(other.getGcsDestination())) return false;
break;
case 2:
if (!getBigqueryDestination().equals(other.getBigqueryDestination())) return false;
break;
case 0:
default:
}
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
switch (destinationCase_) {
case 1:
hash = (37 * hash) + GCS_DESTINATION_FIELD_NUMBER;
hash = (53 * hash) + getGcsDestination().hashCode();
break;
case 2:
hash = (37 * hash) + BIGQUERY_DESTINATION_FIELD_NUMBER;
hash = (53 * hash) + getBigqueryDestination().hashCode();
break;
case 0:
default:
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.asset.v1.OutputConfig parseFrom(java.nio.ByteBuffer data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.asset.v1.OutputConfig parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.asset.v1.OutputConfig parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.asset.v1.OutputConfig parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.asset.v1.OutputConfig parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.asset.v1.OutputConfig parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.asset.v1.OutputConfig parseFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.asset.v1.OutputConfig parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.asset.v1.OutputConfig parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.asset.v1.OutputConfig parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.asset.v1.OutputConfig parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.asset.v1.OutputConfig parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.cloud.asset.v1.OutputConfig prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Output configuration for export assets destination.
* </pre>
*
* Protobuf type {@code google.cloud.asset.v1.OutputConfig}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.asset.v1.OutputConfig)
com.google.cloud.asset.v1.OutputConfigOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.asset.v1.AssetServiceProto
.internal_static_google_cloud_asset_v1_OutputConfig_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.asset.v1.AssetServiceProto
.internal_static_google_cloud_asset_v1_OutputConfig_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.asset.v1.OutputConfig.class,
com.google.cloud.asset.v1.OutputConfig.Builder.class);
}
// Construct using com.google.cloud.asset.v1.OutputConfig.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
if (gcsDestinationBuilder_ != null) {
gcsDestinationBuilder_.clear();
}
if (bigqueryDestinationBuilder_ != null) {
bigqueryDestinationBuilder_.clear();
}
destinationCase_ = 0;
destination_ = null;
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.asset.v1.AssetServiceProto
.internal_static_google_cloud_asset_v1_OutputConfig_descriptor;
}
@java.lang.Override
public com.google.cloud.asset.v1.OutputConfig getDefaultInstanceForType() {
return com.google.cloud.asset.v1.OutputConfig.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.asset.v1.OutputConfig build() {
com.google.cloud.asset.v1.OutputConfig result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.asset.v1.OutputConfig buildPartial() {
com.google.cloud.asset.v1.OutputConfig result =
new com.google.cloud.asset.v1.OutputConfig(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
buildPartialOneofs(result);
onBuilt();
return result;
}
private void buildPartial0(com.google.cloud.asset.v1.OutputConfig result) {
int from_bitField0_ = bitField0_;
}
private void buildPartialOneofs(com.google.cloud.asset.v1.OutputConfig result) {
result.destinationCase_ = destinationCase_;
result.destination_ = this.destination_;
if (destinationCase_ == 1 && gcsDestinationBuilder_ != null) {
result.destination_ = gcsDestinationBuilder_.build();
}
if (destinationCase_ == 2 && bigqueryDestinationBuilder_ != null) {
result.destination_ = bigqueryDestinationBuilder_.build();
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.asset.v1.OutputConfig) {
return mergeFrom((com.google.cloud.asset.v1.OutputConfig) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.asset.v1.OutputConfig other) {
if (other == com.google.cloud.asset.v1.OutputConfig.getDefaultInstance()) return this;
switch (other.getDestinationCase()) {
case GCS_DESTINATION:
{
mergeGcsDestination(other.getGcsDestination());
break;
}
case BIGQUERY_DESTINATION:
{
mergeBigqueryDestination(other.getBigqueryDestination());
break;
}
case DESTINATION_NOT_SET:
{
break;
}
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
input.readMessage(getGcsDestinationFieldBuilder().getBuilder(), extensionRegistry);
destinationCase_ = 1;
break;
} // case 10
case 18:
{
input.readMessage(
getBigqueryDestinationFieldBuilder().getBuilder(), extensionRegistry);
destinationCase_ = 2;
break;
} // case 18
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int destinationCase_ = 0;
private java.lang.Object destination_;
public DestinationCase getDestinationCase() {
return DestinationCase.forNumber(destinationCase_);
}
public Builder clearDestination() {
destinationCase_ = 0;
destination_ = null;
onChanged();
return this;
}
private int bitField0_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.asset.v1.GcsDestination,
com.google.cloud.asset.v1.GcsDestination.Builder,
com.google.cloud.asset.v1.GcsDestinationOrBuilder>
gcsDestinationBuilder_;
/**
*
*
* <pre>
* Destination on Cloud Storage.
* </pre>
*
* <code>.google.cloud.asset.v1.GcsDestination gcs_destination = 1;</code>
*
* @return Whether the gcsDestination field is set.
*/
@java.lang.Override
public boolean hasGcsDestination() {
return destinationCase_ == 1;
}
/**
*
*
* <pre>
* Destination on Cloud Storage.
* </pre>
*
* <code>.google.cloud.asset.v1.GcsDestination gcs_destination = 1;</code>
*
* @return The gcsDestination.
*/
@java.lang.Override
public com.google.cloud.asset.v1.GcsDestination getGcsDestination() {
if (gcsDestinationBuilder_ == null) {
if (destinationCase_ == 1) {
return (com.google.cloud.asset.v1.GcsDestination) destination_;
}
return com.google.cloud.asset.v1.GcsDestination.getDefaultInstance();
} else {
if (destinationCase_ == 1) {
return gcsDestinationBuilder_.getMessage();
}
return com.google.cloud.asset.v1.GcsDestination.getDefaultInstance();
}
}
/**
*
*
* <pre>
* Destination on Cloud Storage.
* </pre>
*
* <code>.google.cloud.asset.v1.GcsDestination gcs_destination = 1;</code>
*/
public Builder setGcsDestination(com.google.cloud.asset.v1.GcsDestination value) {
if (gcsDestinationBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
destination_ = value;
onChanged();
} else {
gcsDestinationBuilder_.setMessage(value);
}
destinationCase_ = 1;
return this;
}
/**
*
*
* <pre>
* Destination on Cloud Storage.
* </pre>
*
* <code>.google.cloud.asset.v1.GcsDestination gcs_destination = 1;</code>
*/
public Builder setGcsDestination(
com.google.cloud.asset.v1.GcsDestination.Builder builderForValue) {
if (gcsDestinationBuilder_ == null) {
destination_ = builderForValue.build();
onChanged();
} else {
gcsDestinationBuilder_.setMessage(builderForValue.build());
}
destinationCase_ = 1;
return this;
}
/**
*
*
* <pre>
* Destination on Cloud Storage.
* </pre>
*
* <code>.google.cloud.asset.v1.GcsDestination gcs_destination = 1;</code>
*/
public Builder mergeGcsDestination(com.google.cloud.asset.v1.GcsDestination value) {
if (gcsDestinationBuilder_ == null) {
if (destinationCase_ == 1
&& destination_ != com.google.cloud.asset.v1.GcsDestination.getDefaultInstance()) {
destination_ =
com.google.cloud.asset.v1.GcsDestination.newBuilder(
(com.google.cloud.asset.v1.GcsDestination) destination_)
.mergeFrom(value)
.buildPartial();
} else {
destination_ = value;
}
onChanged();
} else {
if (destinationCase_ == 1) {
gcsDestinationBuilder_.mergeFrom(value);
} else {
gcsDestinationBuilder_.setMessage(value);
}
}
destinationCase_ = 1;
return this;
}
/**
*
*
* <pre>
* Destination on Cloud Storage.
* </pre>
*
* <code>.google.cloud.asset.v1.GcsDestination gcs_destination = 1;</code>
*/
public Builder clearGcsDestination() {
if (gcsDestinationBuilder_ == null) {
if (destinationCase_ == 1) {
destinationCase_ = 0;
destination_ = null;
onChanged();
}
} else {
if (destinationCase_ == 1) {
destinationCase_ = 0;
destination_ = null;
}
gcsDestinationBuilder_.clear();
}
return this;
}
/**
*
*
* <pre>
* Destination on Cloud Storage.
* </pre>
*
* <code>.google.cloud.asset.v1.GcsDestination gcs_destination = 1;</code>
*/
public com.google.cloud.asset.v1.GcsDestination.Builder getGcsDestinationBuilder() {
return getGcsDestinationFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* Destination on Cloud Storage.
* </pre>
*
* <code>.google.cloud.asset.v1.GcsDestination gcs_destination = 1;</code>
*/
@java.lang.Override
public com.google.cloud.asset.v1.GcsDestinationOrBuilder getGcsDestinationOrBuilder() {
if ((destinationCase_ == 1) && (gcsDestinationBuilder_ != null)) {
return gcsDestinationBuilder_.getMessageOrBuilder();
} else {
if (destinationCase_ == 1) {
return (com.google.cloud.asset.v1.GcsDestination) destination_;
}
return com.google.cloud.asset.v1.GcsDestination.getDefaultInstance();
}
}
/**
*
*
* <pre>
* Destination on Cloud Storage.
* </pre>
*
* <code>.google.cloud.asset.v1.GcsDestination gcs_destination = 1;</code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.asset.v1.GcsDestination,
com.google.cloud.asset.v1.GcsDestination.Builder,
com.google.cloud.asset.v1.GcsDestinationOrBuilder>
getGcsDestinationFieldBuilder() {
if (gcsDestinationBuilder_ == null) {
if (!(destinationCase_ == 1)) {
destination_ = com.google.cloud.asset.v1.GcsDestination.getDefaultInstance();
}
gcsDestinationBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.asset.v1.GcsDestination,
com.google.cloud.asset.v1.GcsDestination.Builder,
com.google.cloud.asset.v1.GcsDestinationOrBuilder>(
(com.google.cloud.asset.v1.GcsDestination) destination_,
getParentForChildren(),
isClean());
destination_ = null;
}
destinationCase_ = 1;
onChanged();
return gcsDestinationBuilder_;
}
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.asset.v1.BigQueryDestination,
com.google.cloud.asset.v1.BigQueryDestination.Builder,
com.google.cloud.asset.v1.BigQueryDestinationOrBuilder>
bigqueryDestinationBuilder_;
/**
*
*
* <pre>
* Destination on BigQuery. The output table stores the fields in asset
* Protobuf as columns in BigQuery.
* </pre>
*
* <code>.google.cloud.asset.v1.BigQueryDestination bigquery_destination = 2;</code>
*
* @return Whether the bigqueryDestination field is set.
*/
@java.lang.Override
public boolean hasBigqueryDestination() {
return destinationCase_ == 2;
}
/**
*
*
* <pre>
* Destination on BigQuery. The output table stores the fields in asset
* Protobuf as columns in BigQuery.
* </pre>
*
* <code>.google.cloud.asset.v1.BigQueryDestination bigquery_destination = 2;</code>
*
* @return The bigqueryDestination.
*/
@java.lang.Override
public com.google.cloud.asset.v1.BigQueryDestination getBigqueryDestination() {
if (bigqueryDestinationBuilder_ == null) {
if (destinationCase_ == 2) {
return (com.google.cloud.asset.v1.BigQueryDestination) destination_;
}
return com.google.cloud.asset.v1.BigQueryDestination.getDefaultInstance();
} else {
if (destinationCase_ == 2) {
return bigqueryDestinationBuilder_.getMessage();
}
return com.google.cloud.asset.v1.BigQueryDestination.getDefaultInstance();
}
}
/**
*
*
* <pre>
* Destination on BigQuery. The output table stores the fields in asset
* Protobuf as columns in BigQuery.
* </pre>
*
* <code>.google.cloud.asset.v1.BigQueryDestination bigquery_destination = 2;</code>
*/
public Builder setBigqueryDestination(com.google.cloud.asset.v1.BigQueryDestination value) {
if (bigqueryDestinationBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
destination_ = value;
onChanged();
} else {
bigqueryDestinationBuilder_.setMessage(value);
}
destinationCase_ = 2;
return this;
}
/**
*
*
* <pre>
* Destination on BigQuery. The output table stores the fields in asset
* Protobuf as columns in BigQuery.
* </pre>
*
* <code>.google.cloud.asset.v1.BigQueryDestination bigquery_destination = 2;</code>
*/
public Builder setBigqueryDestination(
com.google.cloud.asset.v1.BigQueryDestination.Builder builderForValue) {
if (bigqueryDestinationBuilder_ == null) {
destination_ = builderForValue.build();
onChanged();
} else {
bigqueryDestinationBuilder_.setMessage(builderForValue.build());
}
destinationCase_ = 2;
return this;
}
/**
*
*
* <pre>
* Destination on BigQuery. The output table stores the fields in asset
* Protobuf as columns in BigQuery.
* </pre>
*
* <code>.google.cloud.asset.v1.BigQueryDestination bigquery_destination = 2;</code>
*/
public Builder mergeBigqueryDestination(com.google.cloud.asset.v1.BigQueryDestination value) {
if (bigqueryDestinationBuilder_ == null) {
if (destinationCase_ == 2
&& destination_ != com.google.cloud.asset.v1.BigQueryDestination.getDefaultInstance()) {
destination_ =
com.google.cloud.asset.v1.BigQueryDestination.newBuilder(
(com.google.cloud.asset.v1.BigQueryDestination) destination_)
.mergeFrom(value)
.buildPartial();
} else {
destination_ = value;
}
onChanged();
} else {
if (destinationCase_ == 2) {
bigqueryDestinationBuilder_.mergeFrom(value);
} else {
bigqueryDestinationBuilder_.setMessage(value);
}
}
destinationCase_ = 2;
return this;
}
/**
*
*
* <pre>
* Destination on BigQuery. The output table stores the fields in asset
* Protobuf as columns in BigQuery.
* </pre>
*
* <code>.google.cloud.asset.v1.BigQueryDestination bigquery_destination = 2;</code>
*/
public Builder clearBigqueryDestination() {
if (bigqueryDestinationBuilder_ == null) {
if (destinationCase_ == 2) {
destinationCase_ = 0;
destination_ = null;
onChanged();
}
} else {
if (destinationCase_ == 2) {
destinationCase_ = 0;
destination_ = null;
}
bigqueryDestinationBuilder_.clear();
}
return this;
}
/**
*
*
* <pre>
* Destination on BigQuery. The output table stores the fields in asset
* Protobuf as columns in BigQuery.
* </pre>
*
* <code>.google.cloud.asset.v1.BigQueryDestination bigquery_destination = 2;</code>
*/
public com.google.cloud.asset.v1.BigQueryDestination.Builder getBigqueryDestinationBuilder() {
return getBigqueryDestinationFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* Destination on BigQuery. The output table stores the fields in asset
* Protobuf as columns in BigQuery.
* </pre>
*
* <code>.google.cloud.asset.v1.BigQueryDestination bigquery_destination = 2;</code>
*/
@java.lang.Override
public com.google.cloud.asset.v1.BigQueryDestinationOrBuilder
getBigqueryDestinationOrBuilder() {
if ((destinationCase_ == 2) && (bigqueryDestinationBuilder_ != null)) {
return bigqueryDestinationBuilder_.getMessageOrBuilder();
} else {
if (destinationCase_ == 2) {
return (com.google.cloud.asset.v1.BigQueryDestination) destination_;
}
return com.google.cloud.asset.v1.BigQueryDestination.getDefaultInstance();
}
}
/**
*
*
* <pre>
* Destination on BigQuery. The output table stores the fields in asset
* Protobuf as columns in BigQuery.
* </pre>
*
* <code>.google.cloud.asset.v1.BigQueryDestination bigquery_destination = 2;</code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.asset.v1.BigQueryDestination,
com.google.cloud.asset.v1.BigQueryDestination.Builder,
com.google.cloud.asset.v1.BigQueryDestinationOrBuilder>
getBigqueryDestinationFieldBuilder() {
if (bigqueryDestinationBuilder_ == null) {
if (!(destinationCase_ == 2)) {
destination_ = com.google.cloud.asset.v1.BigQueryDestination.getDefaultInstance();
}
bigqueryDestinationBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.asset.v1.BigQueryDestination,
com.google.cloud.asset.v1.BigQueryDestination.Builder,
com.google.cloud.asset.v1.BigQueryDestinationOrBuilder>(
(com.google.cloud.asset.v1.BigQueryDestination) destination_,
getParentForChildren(),
isClean());
destination_ = null;
}
destinationCase_ = 2;
onChanged();
return bigqueryDestinationBuilder_;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.asset.v1.OutputConfig)
}
// @@protoc_insertion_point(class_scope:google.cloud.asset.v1.OutputConfig)
private static final com.google.cloud.asset.v1.OutputConfig DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.asset.v1.OutputConfig();
}
public static com.google.cloud.asset.v1.OutputConfig getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<OutputConfig> PARSER =
new com.google.protobuf.AbstractParser<OutputConfig>() {
@java.lang.Override
public OutputConfig parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<OutputConfig> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<OutputConfig> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.asset.v1.OutputConfig getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleapis/google-cloud-java | 36,503 | java-networkconnectivity/google-cloud-networkconnectivity/src/main/java/com/google/cloud/networkconnectivity/v1/stub/PolicyBasedRoutingServiceStubSettings.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.cloud.networkconnectivity.v1.stub;
import static com.google.cloud.networkconnectivity.v1.PolicyBasedRoutingServiceClient.ListLocationsPagedResponse;
import static com.google.cloud.networkconnectivity.v1.PolicyBasedRoutingServiceClient.ListPolicyBasedRoutesPagedResponse;
import com.google.api.core.ApiFunction;
import com.google.api.core.ApiFuture;
import com.google.api.core.ObsoleteApi;
import com.google.api.gax.core.GaxProperties;
import com.google.api.gax.core.GoogleCredentialsProvider;
import com.google.api.gax.core.InstantiatingExecutorProvider;
import com.google.api.gax.grpc.GaxGrpcProperties;
import com.google.api.gax.grpc.GrpcTransportChannel;
import com.google.api.gax.grpc.InstantiatingGrpcChannelProvider;
import com.google.api.gax.grpc.ProtoOperationTransformers;
import com.google.api.gax.longrunning.OperationSnapshot;
import com.google.api.gax.longrunning.OperationTimedPollAlgorithm;
import com.google.api.gax.retrying.RetrySettings;
import com.google.api.gax.rpc.ApiCallContext;
import com.google.api.gax.rpc.ApiClientHeaderProvider;
import com.google.api.gax.rpc.ClientContext;
import com.google.api.gax.rpc.OperationCallSettings;
import com.google.api.gax.rpc.PageContext;
import com.google.api.gax.rpc.PagedCallSettings;
import com.google.api.gax.rpc.PagedListDescriptor;
import com.google.api.gax.rpc.PagedListResponseFactory;
import com.google.api.gax.rpc.StatusCode;
import com.google.api.gax.rpc.StubSettings;
import com.google.api.gax.rpc.TransportChannelProvider;
import com.google.api.gax.rpc.UnaryCallSettings;
import com.google.api.gax.rpc.UnaryCallable;
import com.google.cloud.location.GetLocationRequest;
import com.google.cloud.location.ListLocationsRequest;
import com.google.cloud.location.ListLocationsResponse;
import com.google.cloud.location.Location;
import com.google.cloud.networkconnectivity.v1.CreatePolicyBasedRouteRequest;
import com.google.cloud.networkconnectivity.v1.DeletePolicyBasedRouteRequest;
import com.google.cloud.networkconnectivity.v1.GetPolicyBasedRouteRequest;
import com.google.cloud.networkconnectivity.v1.ListPolicyBasedRoutesRequest;
import com.google.cloud.networkconnectivity.v1.ListPolicyBasedRoutesResponse;
import com.google.cloud.networkconnectivity.v1.OperationMetadata;
import com.google.cloud.networkconnectivity.v1.PolicyBasedRoute;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.Lists;
import com.google.iam.v1.GetIamPolicyRequest;
import com.google.iam.v1.Policy;
import com.google.iam.v1.SetIamPolicyRequest;
import com.google.iam.v1.TestIamPermissionsRequest;
import com.google.iam.v1.TestIamPermissionsResponse;
import com.google.longrunning.Operation;
import com.google.protobuf.Empty;
import java.io.IOException;
import java.time.Duration;
import java.util.List;
import javax.annotation.Generated;
// AUTO-GENERATED DOCUMENTATION AND CLASS.
/**
* Settings class to configure an instance of {@link PolicyBasedRoutingServiceStub}.
*
* <p>The default instance has everything set to sensible defaults:
*
* <ul>
* <li>The default service address (networkconnectivity.googleapis.com) and default port (443) are
* used.
* <li>Credentials are acquired automatically through Application Default Credentials.
* <li>Retries are configured for idempotent methods but not for non-idempotent methods.
* </ul>
*
* <p>The builder of this class is recursive, so contained classes are themselves builders. When
* build() is called, the tree of builders is called to create the complete settings object.
*
* <p>For example, to set the
* [RetrySettings](https://cloud.google.com/java/docs/reference/gax/latest/com.google.api.gax.retrying.RetrySettings)
* of getPolicyBasedRoute:
*
* <pre>{@code
* // This snippet has been automatically generated and should be regarded as a code template only.
* // It will require modifications to work:
* // - It may require correct/in-range values for request initialization.
* // - It may require specifying regional endpoints when creating the service client as shown in
* // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library
* PolicyBasedRoutingServiceStubSettings.Builder policyBasedRoutingServiceSettingsBuilder =
* PolicyBasedRoutingServiceStubSettings.newBuilder();
* policyBasedRoutingServiceSettingsBuilder
* .getPolicyBasedRouteSettings()
* .setRetrySettings(
* policyBasedRoutingServiceSettingsBuilder
* .getPolicyBasedRouteSettings()
* .getRetrySettings()
* .toBuilder()
* .setInitialRetryDelayDuration(Duration.ofSeconds(1))
* .setInitialRpcTimeoutDuration(Duration.ofSeconds(5))
* .setMaxAttempts(5)
* .setMaxRetryDelayDuration(Duration.ofSeconds(30))
* .setMaxRpcTimeoutDuration(Duration.ofSeconds(60))
* .setRetryDelayMultiplier(1.3)
* .setRpcTimeoutMultiplier(1.5)
* .setTotalTimeoutDuration(Duration.ofSeconds(300))
* .build());
* PolicyBasedRoutingServiceStubSettings policyBasedRoutingServiceSettings =
* policyBasedRoutingServiceSettingsBuilder.build();
* }</pre>
*
* Please refer to the [Client Side Retry
* Guide](https://github.com/googleapis/google-cloud-java/blob/main/docs/client_retries.md) for
* additional support in setting retries.
*
* <p>To configure the RetrySettings of a Long Running Operation method, create an
* OperationTimedPollAlgorithm object and update the RPC's polling algorithm. For example, to
* configure the RetrySettings for createPolicyBasedRoute:
*
* <pre>{@code
* // This snippet has been automatically generated and should be regarded as a code template only.
* // It will require modifications to work:
* // - It may require correct/in-range values for request initialization.
* // - It may require specifying regional endpoints when creating the service client as shown in
* // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library
* PolicyBasedRoutingServiceStubSettings.Builder policyBasedRoutingServiceSettingsBuilder =
* PolicyBasedRoutingServiceStubSettings.newBuilder();
* TimedRetryAlgorithm timedRetryAlgorithm =
* OperationalTimedPollAlgorithm.create(
* RetrySettings.newBuilder()
* .setInitialRetryDelayDuration(Duration.ofMillis(500))
* .setRetryDelayMultiplier(1.5)
* .setMaxRetryDelayDuration(Duration.ofMillis(5000))
* .setTotalTimeoutDuration(Duration.ofHours(24))
* .build());
* policyBasedRoutingServiceSettingsBuilder
* .createClusterOperationSettings()
* .setPollingAlgorithm(timedRetryAlgorithm)
* .build();
* }</pre>
*/
@Generated("by gapic-generator-java")
public class PolicyBasedRoutingServiceStubSettings
extends StubSettings<PolicyBasedRoutingServiceStubSettings> {
/** The default scopes of the service. */
private static final ImmutableList<String> DEFAULT_SERVICE_SCOPES =
ImmutableList.<String>builder().add("https://www.googleapis.com/auth/cloud-platform").build();
private final PagedCallSettings<
ListPolicyBasedRoutesRequest,
ListPolicyBasedRoutesResponse,
ListPolicyBasedRoutesPagedResponse>
listPolicyBasedRoutesSettings;
private final UnaryCallSettings<GetPolicyBasedRouteRequest, PolicyBasedRoute>
getPolicyBasedRouteSettings;
private final UnaryCallSettings<CreatePolicyBasedRouteRequest, Operation>
createPolicyBasedRouteSettings;
private final OperationCallSettings<
CreatePolicyBasedRouteRequest, PolicyBasedRoute, OperationMetadata>
createPolicyBasedRouteOperationSettings;
private final UnaryCallSettings<DeletePolicyBasedRouteRequest, Operation>
deletePolicyBasedRouteSettings;
private final OperationCallSettings<DeletePolicyBasedRouteRequest, Empty, OperationMetadata>
deletePolicyBasedRouteOperationSettings;
private final PagedCallSettings<
ListLocationsRequest, ListLocationsResponse, ListLocationsPagedResponse>
listLocationsSettings;
private final UnaryCallSettings<GetLocationRequest, Location> getLocationSettings;
private final UnaryCallSettings<SetIamPolicyRequest, Policy> setIamPolicySettings;
private final UnaryCallSettings<GetIamPolicyRequest, Policy> getIamPolicySettings;
private final UnaryCallSettings<TestIamPermissionsRequest, TestIamPermissionsResponse>
testIamPermissionsSettings;
private static final PagedListDescriptor<
ListPolicyBasedRoutesRequest, ListPolicyBasedRoutesResponse, PolicyBasedRoute>
LIST_POLICY_BASED_ROUTES_PAGE_STR_DESC =
new PagedListDescriptor<
ListPolicyBasedRoutesRequest, ListPolicyBasedRoutesResponse, PolicyBasedRoute>() {
@Override
public String emptyToken() {
return "";
}
@Override
public ListPolicyBasedRoutesRequest injectToken(
ListPolicyBasedRoutesRequest payload, String token) {
return ListPolicyBasedRoutesRequest.newBuilder(payload).setPageToken(token).build();
}
@Override
public ListPolicyBasedRoutesRequest injectPageSize(
ListPolicyBasedRoutesRequest payload, int pageSize) {
return ListPolicyBasedRoutesRequest.newBuilder(payload).setPageSize(pageSize).build();
}
@Override
public Integer extractPageSize(ListPolicyBasedRoutesRequest payload) {
return payload.getPageSize();
}
@Override
public String extractNextToken(ListPolicyBasedRoutesResponse payload) {
return payload.getNextPageToken();
}
@Override
public Iterable<PolicyBasedRoute> extractResources(
ListPolicyBasedRoutesResponse payload) {
return payload.getPolicyBasedRoutesList();
}
};
private static final PagedListDescriptor<ListLocationsRequest, ListLocationsResponse, Location>
LIST_LOCATIONS_PAGE_STR_DESC =
new PagedListDescriptor<ListLocationsRequest, ListLocationsResponse, Location>() {
@Override
public String emptyToken() {
return "";
}
@Override
public ListLocationsRequest injectToken(ListLocationsRequest payload, String token) {
return ListLocationsRequest.newBuilder(payload).setPageToken(token).build();
}
@Override
public ListLocationsRequest injectPageSize(ListLocationsRequest payload, int pageSize) {
return ListLocationsRequest.newBuilder(payload).setPageSize(pageSize).build();
}
@Override
public Integer extractPageSize(ListLocationsRequest payload) {
return payload.getPageSize();
}
@Override
public String extractNextToken(ListLocationsResponse payload) {
return payload.getNextPageToken();
}
@Override
public Iterable<Location> extractResources(ListLocationsResponse payload) {
return payload.getLocationsList();
}
};
private static final PagedListResponseFactory<
ListPolicyBasedRoutesRequest,
ListPolicyBasedRoutesResponse,
ListPolicyBasedRoutesPagedResponse>
LIST_POLICY_BASED_ROUTES_PAGE_STR_FACT =
new PagedListResponseFactory<
ListPolicyBasedRoutesRequest,
ListPolicyBasedRoutesResponse,
ListPolicyBasedRoutesPagedResponse>() {
@Override
public ApiFuture<ListPolicyBasedRoutesPagedResponse> getFuturePagedResponse(
UnaryCallable<ListPolicyBasedRoutesRequest, ListPolicyBasedRoutesResponse> callable,
ListPolicyBasedRoutesRequest request,
ApiCallContext context,
ApiFuture<ListPolicyBasedRoutesResponse> futureResponse) {
PageContext<
ListPolicyBasedRoutesRequest, ListPolicyBasedRoutesResponse, PolicyBasedRoute>
pageContext =
PageContext.create(
callable, LIST_POLICY_BASED_ROUTES_PAGE_STR_DESC, request, context);
return ListPolicyBasedRoutesPagedResponse.createAsync(pageContext, futureResponse);
}
};
private static final PagedListResponseFactory<
ListLocationsRequest, ListLocationsResponse, ListLocationsPagedResponse>
LIST_LOCATIONS_PAGE_STR_FACT =
new PagedListResponseFactory<
ListLocationsRequest, ListLocationsResponse, ListLocationsPagedResponse>() {
@Override
public ApiFuture<ListLocationsPagedResponse> getFuturePagedResponse(
UnaryCallable<ListLocationsRequest, ListLocationsResponse> callable,
ListLocationsRequest request,
ApiCallContext context,
ApiFuture<ListLocationsResponse> futureResponse) {
PageContext<ListLocationsRequest, ListLocationsResponse, Location> pageContext =
PageContext.create(callable, LIST_LOCATIONS_PAGE_STR_DESC, request, context);
return ListLocationsPagedResponse.createAsync(pageContext, futureResponse);
}
};
/** Returns the object with the settings used for calls to listPolicyBasedRoutes. */
public PagedCallSettings<
ListPolicyBasedRoutesRequest,
ListPolicyBasedRoutesResponse,
ListPolicyBasedRoutesPagedResponse>
listPolicyBasedRoutesSettings() {
return listPolicyBasedRoutesSettings;
}
/** Returns the object with the settings used for calls to getPolicyBasedRoute. */
public UnaryCallSettings<GetPolicyBasedRouteRequest, PolicyBasedRoute>
getPolicyBasedRouteSettings() {
return getPolicyBasedRouteSettings;
}
/** Returns the object with the settings used for calls to createPolicyBasedRoute. */
public UnaryCallSettings<CreatePolicyBasedRouteRequest, Operation>
createPolicyBasedRouteSettings() {
return createPolicyBasedRouteSettings;
}
/** Returns the object with the settings used for calls to createPolicyBasedRoute. */
public OperationCallSettings<CreatePolicyBasedRouteRequest, PolicyBasedRoute, OperationMetadata>
createPolicyBasedRouteOperationSettings() {
return createPolicyBasedRouteOperationSettings;
}
/** Returns the object with the settings used for calls to deletePolicyBasedRoute. */
public UnaryCallSettings<DeletePolicyBasedRouteRequest, Operation>
deletePolicyBasedRouteSettings() {
return deletePolicyBasedRouteSettings;
}
/** Returns the object with the settings used for calls to deletePolicyBasedRoute. */
public OperationCallSettings<DeletePolicyBasedRouteRequest, Empty, OperationMetadata>
deletePolicyBasedRouteOperationSettings() {
return deletePolicyBasedRouteOperationSettings;
}
/** Returns the object with the settings used for calls to listLocations. */
public PagedCallSettings<ListLocationsRequest, ListLocationsResponse, ListLocationsPagedResponse>
listLocationsSettings() {
return listLocationsSettings;
}
/** Returns the object with the settings used for calls to getLocation. */
public UnaryCallSettings<GetLocationRequest, Location> getLocationSettings() {
return getLocationSettings;
}
/** Returns the object with the settings used for calls to setIamPolicy. */
public UnaryCallSettings<SetIamPolicyRequest, Policy> setIamPolicySettings() {
return setIamPolicySettings;
}
/** Returns the object with the settings used for calls to getIamPolicy. */
public UnaryCallSettings<GetIamPolicyRequest, Policy> getIamPolicySettings() {
return getIamPolicySettings;
}
/** Returns the object with the settings used for calls to testIamPermissions. */
public UnaryCallSettings<TestIamPermissionsRequest, TestIamPermissionsResponse>
testIamPermissionsSettings() {
return testIamPermissionsSettings;
}
public PolicyBasedRoutingServiceStub createStub() throws IOException {
if (getTransportChannelProvider()
.getTransportName()
.equals(GrpcTransportChannel.getGrpcTransportName())) {
return GrpcPolicyBasedRoutingServiceStub.create(this);
}
throw new UnsupportedOperationException(
String.format(
"Transport not supported: %s", getTransportChannelProvider().getTransportName()));
}
/** Returns the default service name. */
@Override
public String getServiceName() {
return "networkconnectivity";
}
/** Returns a builder for the default ExecutorProvider for this service. */
public static InstantiatingExecutorProvider.Builder defaultExecutorProviderBuilder() {
return InstantiatingExecutorProvider.newBuilder();
}
/** Returns the default service endpoint. */
@ObsoleteApi("Use getEndpoint() instead")
public static String getDefaultEndpoint() {
return "networkconnectivity.googleapis.com:443";
}
/** Returns the default mTLS service endpoint. */
public static String getDefaultMtlsEndpoint() {
return "networkconnectivity.mtls.googleapis.com:443";
}
/** Returns the default service scopes. */
public static List<String> getDefaultServiceScopes() {
return DEFAULT_SERVICE_SCOPES;
}
/** Returns a builder for the default credentials for this service. */
public static GoogleCredentialsProvider.Builder defaultCredentialsProviderBuilder() {
return GoogleCredentialsProvider.newBuilder()
.setScopesToApply(DEFAULT_SERVICE_SCOPES)
.setUseJwtAccessWithScope(true);
}
/** Returns a builder for the default ChannelProvider for this service. */
public static InstantiatingGrpcChannelProvider.Builder defaultGrpcTransportProviderBuilder() {
return InstantiatingGrpcChannelProvider.newBuilder()
.setMaxInboundMessageSize(Integer.MAX_VALUE);
}
public static TransportChannelProvider defaultTransportChannelProvider() {
return defaultGrpcTransportProviderBuilder().build();
}
public static ApiClientHeaderProvider.Builder defaultApiClientHeaderProviderBuilder() {
return ApiClientHeaderProvider.newBuilder()
.setGeneratedLibToken(
"gapic", GaxProperties.getLibraryVersion(PolicyBasedRoutingServiceStubSettings.class))
.setTransportToken(
GaxGrpcProperties.getGrpcTokenName(), GaxGrpcProperties.getGrpcVersion());
}
/** Returns a new builder for this class. */
public static Builder newBuilder() {
return Builder.createDefault();
}
/** Returns a new builder for this class. */
public static Builder newBuilder(ClientContext clientContext) {
return new Builder(clientContext);
}
/** Returns a builder containing all the values of this settings class. */
public Builder toBuilder() {
return new Builder(this);
}
protected PolicyBasedRoutingServiceStubSettings(Builder settingsBuilder) throws IOException {
super(settingsBuilder);
listPolicyBasedRoutesSettings = settingsBuilder.listPolicyBasedRoutesSettings().build();
getPolicyBasedRouteSettings = settingsBuilder.getPolicyBasedRouteSettings().build();
createPolicyBasedRouteSettings = settingsBuilder.createPolicyBasedRouteSettings().build();
createPolicyBasedRouteOperationSettings =
settingsBuilder.createPolicyBasedRouteOperationSettings().build();
deletePolicyBasedRouteSettings = settingsBuilder.deletePolicyBasedRouteSettings().build();
deletePolicyBasedRouteOperationSettings =
settingsBuilder.deletePolicyBasedRouteOperationSettings().build();
listLocationsSettings = settingsBuilder.listLocationsSettings().build();
getLocationSettings = settingsBuilder.getLocationSettings().build();
setIamPolicySettings = settingsBuilder.setIamPolicySettings().build();
getIamPolicySettings = settingsBuilder.getIamPolicySettings().build();
testIamPermissionsSettings = settingsBuilder.testIamPermissionsSettings().build();
}
/** Builder for PolicyBasedRoutingServiceStubSettings. */
public static class Builder
extends StubSettings.Builder<PolicyBasedRoutingServiceStubSettings, Builder> {
private final ImmutableList<UnaryCallSettings.Builder<?, ?>> unaryMethodSettingsBuilders;
private final PagedCallSettings.Builder<
ListPolicyBasedRoutesRequest,
ListPolicyBasedRoutesResponse,
ListPolicyBasedRoutesPagedResponse>
listPolicyBasedRoutesSettings;
private final UnaryCallSettings.Builder<GetPolicyBasedRouteRequest, PolicyBasedRoute>
getPolicyBasedRouteSettings;
private final UnaryCallSettings.Builder<CreatePolicyBasedRouteRequest, Operation>
createPolicyBasedRouteSettings;
private final OperationCallSettings.Builder<
CreatePolicyBasedRouteRequest, PolicyBasedRoute, OperationMetadata>
createPolicyBasedRouteOperationSettings;
private final UnaryCallSettings.Builder<DeletePolicyBasedRouteRequest, Operation>
deletePolicyBasedRouteSettings;
private final OperationCallSettings.Builder<
DeletePolicyBasedRouteRequest, Empty, OperationMetadata>
deletePolicyBasedRouteOperationSettings;
private final PagedCallSettings.Builder<
ListLocationsRequest, ListLocationsResponse, ListLocationsPagedResponse>
listLocationsSettings;
private final UnaryCallSettings.Builder<GetLocationRequest, Location> getLocationSettings;
private final UnaryCallSettings.Builder<SetIamPolicyRequest, Policy> setIamPolicySettings;
private final UnaryCallSettings.Builder<GetIamPolicyRequest, Policy> getIamPolicySettings;
private final UnaryCallSettings.Builder<TestIamPermissionsRequest, TestIamPermissionsResponse>
testIamPermissionsSettings;
private static final ImmutableMap<String, ImmutableSet<StatusCode.Code>>
RETRYABLE_CODE_DEFINITIONS;
static {
ImmutableMap.Builder<String, ImmutableSet<StatusCode.Code>> definitions =
ImmutableMap.builder();
definitions.put(
"retry_policy_0_codes",
ImmutableSet.copyOf(Lists.<StatusCode.Code>newArrayList(StatusCode.Code.UNAVAILABLE)));
definitions.put(
"no_retry_1_codes", ImmutableSet.copyOf(Lists.<StatusCode.Code>newArrayList()));
RETRYABLE_CODE_DEFINITIONS = definitions.build();
}
private static final ImmutableMap<String, RetrySettings> RETRY_PARAM_DEFINITIONS;
static {
ImmutableMap.Builder<String, RetrySettings> definitions = ImmutableMap.builder();
RetrySettings settings = null;
settings =
RetrySettings.newBuilder()
.setInitialRetryDelayDuration(Duration.ofMillis(1000L))
.setRetryDelayMultiplier(1.3)
.setMaxRetryDelayDuration(Duration.ofMillis(10000L))
.setInitialRpcTimeoutDuration(Duration.ofMillis(60000L))
.setRpcTimeoutMultiplier(1.0)
.setMaxRpcTimeoutDuration(Duration.ofMillis(60000L))
.setTotalTimeoutDuration(Duration.ofMillis(60000L))
.build();
definitions.put("retry_policy_0_params", settings);
settings =
RetrySettings.newBuilder()
.setInitialRpcTimeoutDuration(Duration.ofMillis(60000L))
.setRpcTimeoutMultiplier(1.0)
.setMaxRpcTimeoutDuration(Duration.ofMillis(60000L))
.setTotalTimeoutDuration(Duration.ofMillis(60000L))
.build();
definitions.put("no_retry_1_params", settings);
RETRY_PARAM_DEFINITIONS = definitions.build();
}
protected Builder() {
this(((ClientContext) null));
}
protected Builder(ClientContext clientContext) {
super(clientContext);
listPolicyBasedRoutesSettings =
PagedCallSettings.newBuilder(LIST_POLICY_BASED_ROUTES_PAGE_STR_FACT);
getPolicyBasedRouteSettings = UnaryCallSettings.newUnaryCallSettingsBuilder();
createPolicyBasedRouteSettings = UnaryCallSettings.newUnaryCallSettingsBuilder();
createPolicyBasedRouteOperationSettings = OperationCallSettings.newBuilder();
deletePolicyBasedRouteSettings = UnaryCallSettings.newUnaryCallSettingsBuilder();
deletePolicyBasedRouteOperationSettings = OperationCallSettings.newBuilder();
listLocationsSettings = PagedCallSettings.newBuilder(LIST_LOCATIONS_PAGE_STR_FACT);
getLocationSettings = UnaryCallSettings.newUnaryCallSettingsBuilder();
setIamPolicySettings = UnaryCallSettings.newUnaryCallSettingsBuilder();
getIamPolicySettings = UnaryCallSettings.newUnaryCallSettingsBuilder();
testIamPermissionsSettings = UnaryCallSettings.newUnaryCallSettingsBuilder();
unaryMethodSettingsBuilders =
ImmutableList.<UnaryCallSettings.Builder<?, ?>>of(
listPolicyBasedRoutesSettings,
getPolicyBasedRouteSettings,
createPolicyBasedRouteSettings,
deletePolicyBasedRouteSettings,
listLocationsSettings,
getLocationSettings,
setIamPolicySettings,
getIamPolicySettings,
testIamPermissionsSettings);
initDefaults(this);
}
protected Builder(PolicyBasedRoutingServiceStubSettings settings) {
super(settings);
listPolicyBasedRoutesSettings = settings.listPolicyBasedRoutesSettings.toBuilder();
getPolicyBasedRouteSettings = settings.getPolicyBasedRouteSettings.toBuilder();
createPolicyBasedRouteSettings = settings.createPolicyBasedRouteSettings.toBuilder();
createPolicyBasedRouteOperationSettings =
settings.createPolicyBasedRouteOperationSettings.toBuilder();
deletePolicyBasedRouteSettings = settings.deletePolicyBasedRouteSettings.toBuilder();
deletePolicyBasedRouteOperationSettings =
settings.deletePolicyBasedRouteOperationSettings.toBuilder();
listLocationsSettings = settings.listLocationsSettings.toBuilder();
getLocationSettings = settings.getLocationSettings.toBuilder();
setIamPolicySettings = settings.setIamPolicySettings.toBuilder();
getIamPolicySettings = settings.getIamPolicySettings.toBuilder();
testIamPermissionsSettings = settings.testIamPermissionsSettings.toBuilder();
unaryMethodSettingsBuilders =
ImmutableList.<UnaryCallSettings.Builder<?, ?>>of(
listPolicyBasedRoutesSettings,
getPolicyBasedRouteSettings,
createPolicyBasedRouteSettings,
deletePolicyBasedRouteSettings,
listLocationsSettings,
getLocationSettings,
setIamPolicySettings,
getIamPolicySettings,
testIamPermissionsSettings);
}
private static Builder createDefault() {
Builder builder = new Builder(((ClientContext) null));
builder.setTransportChannelProvider(defaultTransportChannelProvider());
builder.setCredentialsProvider(defaultCredentialsProviderBuilder().build());
builder.setInternalHeaderProvider(defaultApiClientHeaderProviderBuilder().build());
builder.setMtlsEndpoint(getDefaultMtlsEndpoint());
builder.setSwitchToMtlsEndpointAllowed(true);
return initDefaults(builder);
}
private static Builder initDefaults(Builder builder) {
builder
.listPolicyBasedRoutesSettings()
.setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_0_codes"))
.setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_0_params"));
builder
.getPolicyBasedRouteSettings()
.setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_0_codes"))
.setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_0_params"));
builder
.createPolicyBasedRouteSettings()
.setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("no_retry_1_codes"))
.setRetrySettings(RETRY_PARAM_DEFINITIONS.get("no_retry_1_params"));
builder
.deletePolicyBasedRouteSettings()
.setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("no_retry_1_codes"))
.setRetrySettings(RETRY_PARAM_DEFINITIONS.get("no_retry_1_params"));
builder
.listLocationsSettings()
.setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_0_codes"))
.setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_0_params"));
builder
.getLocationSettings()
.setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_0_codes"))
.setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_0_params"));
builder
.setIamPolicySettings()
.setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_0_codes"))
.setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_0_params"));
builder
.getIamPolicySettings()
.setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_0_codes"))
.setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_0_params"));
builder
.testIamPermissionsSettings()
.setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_0_codes"))
.setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_0_params"));
builder
.createPolicyBasedRouteOperationSettings()
.setInitialCallSettings(
UnaryCallSettings
.<CreatePolicyBasedRouteRequest, OperationSnapshot>newUnaryCallSettingsBuilder()
.setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("no_retry_1_codes"))
.setRetrySettings(RETRY_PARAM_DEFINITIONS.get("no_retry_1_params"))
.build())
.setResponseTransformer(
ProtoOperationTransformers.ResponseTransformer.create(PolicyBasedRoute.class))
.setMetadataTransformer(
ProtoOperationTransformers.MetadataTransformer.create(OperationMetadata.class))
.setPollingAlgorithm(
OperationTimedPollAlgorithm.create(
RetrySettings.newBuilder()
.setInitialRetryDelayDuration(Duration.ofMillis(5000L))
.setRetryDelayMultiplier(1.5)
.setMaxRetryDelayDuration(Duration.ofMillis(45000L))
.setInitialRpcTimeoutDuration(Duration.ZERO)
.setRpcTimeoutMultiplier(1.0)
.setMaxRpcTimeoutDuration(Duration.ZERO)
.setTotalTimeoutDuration(Duration.ofMillis(300000L))
.build()));
builder
.deletePolicyBasedRouteOperationSettings()
.setInitialCallSettings(
UnaryCallSettings
.<DeletePolicyBasedRouteRequest, OperationSnapshot>newUnaryCallSettingsBuilder()
.setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("no_retry_1_codes"))
.setRetrySettings(RETRY_PARAM_DEFINITIONS.get("no_retry_1_params"))
.build())
.setResponseTransformer(
ProtoOperationTransformers.ResponseTransformer.create(Empty.class))
.setMetadataTransformer(
ProtoOperationTransformers.MetadataTransformer.create(OperationMetadata.class))
.setPollingAlgorithm(
OperationTimedPollAlgorithm.create(
RetrySettings.newBuilder()
.setInitialRetryDelayDuration(Duration.ofMillis(5000L))
.setRetryDelayMultiplier(1.5)
.setMaxRetryDelayDuration(Duration.ofMillis(45000L))
.setInitialRpcTimeoutDuration(Duration.ZERO)
.setRpcTimeoutMultiplier(1.0)
.setMaxRpcTimeoutDuration(Duration.ZERO)
.setTotalTimeoutDuration(Duration.ofMillis(300000L))
.build()));
return builder;
}
/**
* Applies the given settings updater function to all of the unary API methods in this service.
*
* <p>Note: This method does not support applying settings to streaming methods.
*/
public Builder applyToAllUnaryMethods(
ApiFunction<UnaryCallSettings.Builder<?, ?>, Void> settingsUpdater) {
super.applyToAllUnaryMethods(unaryMethodSettingsBuilders, settingsUpdater);
return this;
}
public ImmutableList<UnaryCallSettings.Builder<?, ?>> unaryMethodSettingsBuilders() {
return unaryMethodSettingsBuilders;
}
/** Returns the builder for the settings used for calls to listPolicyBasedRoutes. */
public PagedCallSettings.Builder<
ListPolicyBasedRoutesRequest,
ListPolicyBasedRoutesResponse,
ListPolicyBasedRoutesPagedResponse>
listPolicyBasedRoutesSettings() {
return listPolicyBasedRoutesSettings;
}
/** Returns the builder for the settings used for calls to getPolicyBasedRoute. */
public UnaryCallSettings.Builder<GetPolicyBasedRouteRequest, PolicyBasedRoute>
getPolicyBasedRouteSettings() {
return getPolicyBasedRouteSettings;
}
/** Returns the builder for the settings used for calls to createPolicyBasedRoute. */
public UnaryCallSettings.Builder<CreatePolicyBasedRouteRequest, Operation>
createPolicyBasedRouteSettings() {
return createPolicyBasedRouteSettings;
}
/** Returns the builder for the settings used for calls to createPolicyBasedRoute. */
public OperationCallSettings.Builder<
CreatePolicyBasedRouteRequest, PolicyBasedRoute, OperationMetadata>
createPolicyBasedRouteOperationSettings() {
return createPolicyBasedRouteOperationSettings;
}
/** Returns the builder for the settings used for calls to deletePolicyBasedRoute. */
public UnaryCallSettings.Builder<DeletePolicyBasedRouteRequest, Operation>
deletePolicyBasedRouteSettings() {
return deletePolicyBasedRouteSettings;
}
/** Returns the builder for the settings used for calls to deletePolicyBasedRoute. */
public OperationCallSettings.Builder<DeletePolicyBasedRouteRequest, Empty, OperationMetadata>
deletePolicyBasedRouteOperationSettings() {
return deletePolicyBasedRouteOperationSettings;
}
/** Returns the builder for the settings used for calls to listLocations. */
public PagedCallSettings.Builder<
ListLocationsRequest, ListLocationsResponse, ListLocationsPagedResponse>
listLocationsSettings() {
return listLocationsSettings;
}
/** Returns the builder for the settings used for calls to getLocation. */
public UnaryCallSettings.Builder<GetLocationRequest, Location> getLocationSettings() {
return getLocationSettings;
}
/** Returns the builder for the settings used for calls to setIamPolicy. */
public UnaryCallSettings.Builder<SetIamPolicyRequest, Policy> setIamPolicySettings() {
return setIamPolicySettings;
}
/** Returns the builder for the settings used for calls to getIamPolicy. */
public UnaryCallSettings.Builder<GetIamPolicyRequest, Policy> getIamPolicySettings() {
return getIamPolicySettings;
}
/** Returns the builder for the settings used for calls to testIamPermissions. */
public UnaryCallSettings.Builder<TestIamPermissionsRequest, TestIamPermissionsResponse>
testIamPermissionsSettings() {
return testIamPermissionsSettings;
}
@Override
public PolicyBasedRoutingServiceStubSettings build() throws IOException {
return new PolicyBasedRoutingServiceStubSettings(this);
}
}
}
|
apache/iotdb | 36,447 | iotdb-core/datanode/src/test/java/org/apache/iotdb/db/storageengine/dataregion/compaction/cross/RewriteCrossSpaceCompactionWithFastPerformerTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.iotdb.db.storageengine.dataregion.compaction.cross;
import org.apache.iotdb.commons.concurrent.ExceptionalCountDownLatch;
import org.apache.iotdb.commons.exception.IllegalPathException;
import org.apache.iotdb.commons.exception.MetadataException;
import org.apache.iotdb.commons.path.AlignedFullPath;
import org.apache.iotdb.commons.path.IFullPath;
import org.apache.iotdb.commons.path.MeasurementPath;
import org.apache.iotdb.db.conf.IoTDBDescriptor;
import org.apache.iotdb.db.exception.StorageEngineException;
import org.apache.iotdb.db.queryengine.execution.fragment.FragmentInstanceContext;
import org.apache.iotdb.db.queryengine.plan.planner.plan.node.PlanNodeId;
import org.apache.iotdb.db.queryengine.plan.planner.plan.node.write.DeleteDataNode;
import org.apache.iotdb.db.storageengine.dataregion.DataRegion;
import org.apache.iotdb.db.storageengine.dataregion.compaction.AbstractCompactionTest;
import org.apache.iotdb.db.storageengine.dataregion.compaction.execute.performer.impl.FastCompactionPerformer;
import org.apache.iotdb.db.storageengine.dataregion.compaction.execute.task.CrossSpaceCompactionTask;
import org.apache.iotdb.db.storageengine.dataregion.compaction.execute.utils.reader.IDataBlockReader;
import org.apache.iotdb.db.storageengine.dataregion.compaction.execute.utils.reader.SeriesDataBlockReader;
import org.apache.iotdb.db.storageengine.dataregion.compaction.utils.CompactionFileGeneratorUtils;
import org.apache.iotdb.db.storageengine.dataregion.flush.TsFileFlushPolicy;
import org.apache.iotdb.db.storageengine.dataregion.read.control.FileReaderManager;
import org.apache.iotdb.db.storageengine.dataregion.tsfile.TsFileManager;
import org.apache.iotdb.db.storageengine.dataregion.tsfile.TsFileResource;
import org.apache.iotdb.db.storageengine.dataregion.tsfile.TsFileResourceStatus;
import org.apache.iotdb.db.storageengine.dataregion.tsfile.generator.TsFileNameGenerator;
import org.apache.iotdb.db.storageengine.dataregion.wal.recover.WALRecoverManager;
import org.apache.iotdb.db.utils.EnvironmentUtils;
import org.apache.tsfile.common.conf.TSFileDescriptor;
import org.apache.tsfile.common.constant.TsFileConstant;
import org.apache.tsfile.enums.TSDataType;
import org.apache.tsfile.exception.write.WriteProcessException;
import org.apache.tsfile.file.metadata.IDeviceID;
import org.apache.tsfile.read.common.block.TsBlock;
import org.apache.tsfile.utils.Pair;
import org.apache.tsfile.utils.TsFileGeneratorUtils;
import org.apache.tsfile.write.schema.IMeasurementSchema;
import org.apache.tsfile.write.schema.MeasurementSchema;
import org.junit.After;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.File;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import static org.apache.iotdb.commons.conf.IoTDBConstant.CROSS_COMPACTION_TMP_FILE_SUFFIX;
import static org.apache.iotdb.commons.conf.IoTDBConstant.PATH_SEPARATOR;
import static org.junit.Assert.assertEquals;
public class RewriteCrossSpaceCompactionWithFastPerformerTest extends AbstractCompactionTest {
private static final Logger LOGGER =
LoggerFactory.getLogger(RewriteCrossSpaceCompactionWithFastPerformerTest.class);
private final String oldThreadName = Thread.currentThread().getName();
@Before
public void setUp()
throws IOException, WriteProcessException, MetadataException, InterruptedException {
super.setUp();
WALRecoverManager.getInstance().setAllDataRegionScannedLatch(new ExceptionalCountDownLatch(1));
IoTDBDescriptor.getInstance().getConfig().setTargetChunkSize(1024);
Thread.currentThread().setName("pool-1-IoTDB-Compaction-Worker-1");
}
@After
public void tearDown() throws IOException, StorageEngineException {
super.tearDown();
Thread.currentThread().setName(oldThreadName);
FileReaderManager.getInstance().closeAndRemoveAllOpenedReaders();
}
/**
* Total 4 seq files and 5 unseq files, each file has different aligned timeseries.
*
* <p>Seq files<br>
* first and second file has d0 ~ d1 and s0 ~ s2, time range is 0 ~ 299 and 350 ~ 649, value range
* is 0 ~ 299 and 350 ~ 649.<br>
* third and forth file has d0 ~ d3 and s0 ~ S4,time range is 700 ~ 999 and 1050 ~ 1349, value
* range is 700 ~ 999 and 1050 ~ 1349.<br>
*
* <p>UnSeq files<br>
* first, second and third file has d0 ~ d2 and s0 ~ s3, time range is 20 ~ 219, 250 ~ 449 and 480
* ~ 679, value range is 10020 ~ 10219, 10250 ~ 10449 and 10480 ~ 10679.<br>
* forth and fifth file has d0 and s0 ~ s4, time range is 450 ~ 549 and 550 ~ 649, value range is
* 20450 ~ 20549 and 20550 ~ 20649.
*
* <p>The data of d0.s0, d0.s1, d2.s4 and d3.s4 is deleted in each file.
*/
@Test
public void testAlignedCrossSpaceCompactionWithAllDataDeletedInTimeseries() throws Exception {
TSFileDescriptor.getInstance().getConfig().setMaxNumberOfPointsInPage(30);
registerTimeseriesInMManger(4, 5, true);
createFiles(2, 2, 3, 300, 0, 0, 50, 50, true, true);
createFiles(2, 4, 5, 300, 700, 700, 50, 50, true, true);
createFiles(3, 3, 4, 200, 20, 10020, 30, 30, true, false);
createFiles(2, 1, 5, 100, 450, 20450, 0, 0, true, false);
// generate mods file
List<String> seriesPaths = new ArrayList<>();
seriesPaths.add(
COMPACTION_TEST_SG
+ PATH_SEPARATOR
+ "d"
+ TsFileGeneratorUtils.getAlignDeviceOffset()
+ PATH_SEPARATOR
+ "s0");
seriesPaths.add(
COMPACTION_TEST_SG
+ PATH_SEPARATOR
+ "d"
+ TsFileGeneratorUtils.getAlignDeviceOffset()
+ PATH_SEPARATOR
+ "s1");
seriesPaths.add(
COMPACTION_TEST_SG
+ PATH_SEPARATOR
+ "d"
+ (TsFileGeneratorUtils.getAlignDeviceOffset() + 2)
+ PATH_SEPARATOR
+ "s4");
seriesPaths.add(
COMPACTION_TEST_SG
+ PATH_SEPARATOR
+ "d"
+ (TsFileGeneratorUtils.getAlignDeviceOffset() + 3)
+ PATH_SEPARATOR
+ "s4");
generateModsFile(seriesPaths, seqResources, Long.MIN_VALUE, Long.MAX_VALUE, false);
generateModsFile(seriesPaths, unseqResources, Long.MIN_VALUE, Long.MAX_VALUE, false);
generateModsFile(seriesPaths, seqResources, Long.MIN_VALUE, Long.MAX_VALUE, true);
generateModsFile(seriesPaths, unseqResources, Long.MIN_VALUE, Long.MAX_VALUE, true);
for (int i = TsFileGeneratorUtils.getAlignDeviceOffset();
i < TsFileGeneratorUtils.getAlignDeviceOffset() + 4;
i++) {
for (int j = 0; j < 5; j++) {
List<IMeasurementSchema> schemas = new ArrayList<>();
schemas.add(new MeasurementSchema("s" + j, TSDataType.INT64));
IFullPath path =
new AlignedFullPath(
IDeviceID.Factory.DEFAULT_FACTORY.create(
COMPACTION_TEST_SG + PATH_SEPARATOR + "d" + i),
Collections.singletonList("s" + j),
schemas);
IDataBlockReader tsFilesReader =
new SeriesDataBlockReader(
path,
FragmentInstanceContext.createFragmentInstanceContextForCompaction(
EnvironmentUtils.TEST_QUERY_CONTEXT.getQueryId()),
seqResources,
unseqResources,
true);
int count = 0;
while (tsFilesReader.hasNextBatch()) {
TsBlock batchData = tsFilesReader.nextBatch();
for (int readIndex = 0, size = batchData.getPositionCount();
readIndex < size;
readIndex++) {
long currentTime = batchData.getTimeByIndex(readIndex);
long currentValue = batchData.getColumn(0).getLong(readIndex);
if (i == TsFileGeneratorUtils.getAlignDeviceOffset()
&& ((450 <= currentTime && currentTime < 550)
|| (550 <= currentTime && currentTime < 650))) {
assertEquals(currentTime + 20000, currentValue);
} else if ((i < TsFileGeneratorUtils.getAlignDeviceOffset() + 3 && j < 4)
&& ((20 <= currentTime && currentTime < 220)
|| (250 <= currentTime && currentTime < 450)
|| (480 <= currentTime && currentTime < 680))) {
assertEquals(currentTime + 10000, currentValue);
} else {
assertEquals(currentTime, currentValue);
}
count++;
}
}
tsFilesReader.close();
if ((i == TsFileGeneratorUtils.getAlignDeviceOffset() && j == 0)
|| (i == TsFileGeneratorUtils.getAlignDeviceOffset() && j == 1)
|| (i == TsFileGeneratorUtils.getAlignDeviceOffset() + 2 && j == 4)
|| (i == TsFileGeneratorUtils.getAlignDeviceOffset() + 3 && j == 4)) {
assertEquals(0, count);
} else if (i < TsFileGeneratorUtils.getAlignDeviceOffset() + 2 && j < 3) {
assertEquals(1280, count);
} else if (i < TsFileGeneratorUtils.getAlignDeviceOffset() + 1 && j < 4) {
assertEquals(1230, count);
} else if (i == TsFileGeneratorUtils.getAlignDeviceOffset()) {
assertEquals(800, count);
} else if ((i == TsFileGeneratorUtils.getAlignDeviceOffset() + 1 && j == 4)) {
assertEquals(600, count);
} else if (i < TsFileGeneratorUtils.getAlignDeviceOffset() + 3 && j < 4) {
assertEquals(1200, count);
} else {
assertEquals(600, count);
}
}
}
List<TsFileResource> targetResources =
CompactionFileGeneratorUtils.getCrossCompactionTargetTsFileResources(seqResources);
TsFileManager tsFileManager =
new TsFileManager(COMPACTION_TEST_SG, "0", STORAGE_GROUP_DIR.getPath());
tsFileManager.addAll(seqResources, true);
tsFileManager.addAll(unseqResources, false);
CrossSpaceCompactionTask task =
new CrossSpaceCompactionTask(
0,
tsFileManager,
seqResources,
unseqResources,
new FastCompactionPerformer(true),
0,
0);
task.start();
for (TsFileResource resource : seqResources) {
resource.resetModFile();
Assert.assertFalse(resource.anyModFileExists());
}
for (TsFileResource resource : unseqResources) {
resource.resetModFile();
Assert.assertFalse(resource.anyModFileExists());
}
for (TsFileResource resource : targetResources) {
resource.setFile(
new File(
resource
.getTsFilePath()
.replace(CROSS_COMPACTION_TMP_FILE_SUFFIX, TsFileConstant.TSFILE_SUFFIX)));
resource.resetModFile();
Assert.assertTrue(resource.anyModFileExists());
Assert.assertEquals(4, resource.getAllModEntries().size());
}
FileReaderManager.getInstance().closeAndRemoveAllOpenedReaders();
for (int i = TsFileGeneratorUtils.getAlignDeviceOffset();
i < TsFileGeneratorUtils.getAlignDeviceOffset() + 4;
i++) {
for (int j = 0; j < 5; j++) {
List<IMeasurementSchema> schemas = new ArrayList<>();
schemas.add(new MeasurementSchema("s" + j, TSDataType.INT64));
IFullPath path =
new AlignedFullPath(
IDeviceID.Factory.DEFAULT_FACTORY.create(
COMPACTION_TEST_SG + PATH_SEPARATOR + "d" + i),
Collections.singletonList("s" + j),
schemas);
IDataBlockReader tsFilesReader =
new SeriesDataBlockReader(
path,
FragmentInstanceContext.createFragmentInstanceContextForCompaction(
EnvironmentUtils.TEST_QUERY_CONTEXT.getQueryId()),
tsFileManager.getTsFileList(true),
new ArrayList<>(),
true);
int count = 0;
while (tsFilesReader.hasNextBatch()) {
TsBlock batchData = tsFilesReader.nextBatch();
for (int readIndex = 0, size = batchData.getPositionCount();
readIndex < size;
readIndex++) {
long currentTime = batchData.getTimeByIndex(readIndex);
long currentValue = batchData.getColumn(0).getLong(readIndex);
if (i == TsFileGeneratorUtils.getAlignDeviceOffset()
&& ((450 <= currentTime && currentTime < 550)
|| (550 <= currentTime && currentTime < 650))) {
assertEquals(currentTime + 20000, currentValue);
} else if ((i < TsFileGeneratorUtils.getAlignDeviceOffset() + 3 && j < 4)
&& ((20 <= currentTime && currentTime < 220)
|| (250 <= currentTime && currentTime < 450)
|| (480 <= currentTime && currentTime < 680))) {
assertEquals(currentTime + 10000, currentValue);
} else {
assertEquals(currentTime, currentValue);
}
count++;
}
}
tsFilesReader.close();
if ((i == TsFileGeneratorUtils.getAlignDeviceOffset() && j == 0)
|| (i == TsFileGeneratorUtils.getAlignDeviceOffset() && j == 1)
|| (i == TsFileGeneratorUtils.getAlignDeviceOffset() + 2 && j == 4)
|| (i == TsFileGeneratorUtils.getAlignDeviceOffset() + 3 && j == 4)) {
assertEquals(0, count);
} else if (i < TsFileGeneratorUtils.getAlignDeviceOffset() + 2 && j < 3) {
assertEquals(1280, count);
} else if (i < TsFileGeneratorUtils.getAlignDeviceOffset() + 1 && j < 4) {
assertEquals(1230, count);
} else if (i == TsFileGeneratorUtils.getAlignDeviceOffset()) {
assertEquals(800, count);
} else if ((i == TsFileGeneratorUtils.getAlignDeviceOffset() + 1 && j == 4)) {
assertEquals(600, count);
} else if (i < TsFileGeneratorUtils.getAlignDeviceOffset() + 3 && j < 4) {
assertEquals(1200, count);
} else {
assertEquals(600, count);
}
}
}
}
/**
* Total 4 seq files and 5 unseq files, each file has different aligned timeseries.
*
* <p>Seq files<br>
* first and second file has d0 ~ d1 and s0 ~ s2, time range is 0 ~ 299 and 350 ~ 649, value range
* is 0 ~ 299 and 350 ~ 649.<br>
* third and forth file has d0 ~ d3 and s0 ~ S4,time range is 700 ~ 999 and 1050 ~ 1349, value
* range is 700 ~ 999 and 1050 ~ 1349.<br>
*
* <p>UnSeq files<br>
* first, second and third file has d0 ~ d2 and s0 ~ s3, time range is 20 ~ 219, 250 ~ 449 and 480
* ~ 679, value range is 10020 ~ 10219, 10250 ~ 10449 and 10480 ~ 10679.<br>
* forth and fifth file has d0 and s0 ~ s4, time range is 450 ~ 549 and 550 ~ 649, value range is
* 20450 ~ 20549 and 20550 ~ 20649.
*
* <p>The data of d0, d1 and d2 is deleted in each file. The first target file is empty.
*/
@Test
public void testAlignedCrossSpaceCompactionWithAllDataDeletedInOneTargetFile() throws Exception {
TSFileDescriptor.getInstance().getConfig().setMaxNumberOfPointsInPage(30);
registerTimeseriesInMManger(4, 5, true);
createFiles(2, 2, 3, 300, 0, 0, 50, 50, true, true);
createFiles(2, 4, 5, 300, 700, 700, 50, 50, true, true);
createFiles(3, 3, 4, 200, 20, 10020, 30, 30, true, false);
createFiles(2, 1, 5, 100, 450, 20450, 0, 0, true, false);
// generate mods file
List<String> seriesPaths = new ArrayList<>();
for (int i = 0; i < 5; i++) {
seriesPaths.add(
COMPACTION_TEST_SG
+ PATH_SEPARATOR
+ "d"
+ TsFileGeneratorUtils.getAlignDeviceOffset()
+ PATH_SEPARATOR
+ "s"
+ i);
seriesPaths.add(
COMPACTION_TEST_SG
+ PATH_SEPARATOR
+ "d"
+ (TsFileGeneratorUtils.getAlignDeviceOffset() + 1)
+ PATH_SEPARATOR
+ "s"
+ i);
seriesPaths.add(
COMPACTION_TEST_SG
+ PATH_SEPARATOR
+ "d"
+ (TsFileGeneratorUtils.getAlignDeviceOffset() + 2)
+ PATH_SEPARATOR
+ "s"
+ i);
seriesPaths.add(COMPACTION_TEST_SG + PATH_SEPARATOR + "d0" + PATH_SEPARATOR + "s" + i);
seriesPaths.add(COMPACTION_TEST_SG + PATH_SEPARATOR + "d1" + PATH_SEPARATOR + "s" + i);
seriesPaths.add(COMPACTION_TEST_SG + PATH_SEPARATOR + "d2" + PATH_SEPARATOR + "s" + i);
}
generateModsFile(seriesPaths, seqResources, Long.MIN_VALUE, Long.MAX_VALUE, false);
generateModsFile(seriesPaths, unseqResources, Long.MIN_VALUE, Long.MAX_VALUE, false);
generateModsFile(seriesPaths, seqResources, Long.MIN_VALUE, Long.MAX_VALUE, true);
generateModsFile(seriesPaths, unseqResources, Long.MIN_VALUE, Long.MAX_VALUE, true);
for (int i = TsFileGeneratorUtils.getAlignDeviceOffset();
i < TsFileGeneratorUtils.getAlignDeviceOffset() + 4;
i++) {
for (int j = 0; j < 5; j++) {
List<IMeasurementSchema> schemas = new ArrayList<>();
schemas.add(new MeasurementSchema("s" + j, TSDataType.INT64));
IFullPath path =
new AlignedFullPath(
IDeviceID.Factory.DEFAULT_FACTORY.create(
COMPACTION_TEST_SG + PATH_SEPARATOR + "d" + i),
Collections.singletonList("s" + j),
schemas);
IDataBlockReader tsFilesReader =
new SeriesDataBlockReader(
path,
FragmentInstanceContext.createFragmentInstanceContextForCompaction(
EnvironmentUtils.TEST_QUERY_CONTEXT.getQueryId()),
seqResources,
unseqResources,
true);
int count = 0;
while (tsFilesReader.hasNextBatch()) {
TsBlock batchData = tsFilesReader.nextBatch();
for (int readIndex = 0, size = batchData.getPositionCount();
readIndex < size;
readIndex++) {
long currentTime = batchData.getTimeByIndex(readIndex);
long currentValue = batchData.getColumn(0).getLong(readIndex);
if (i == TsFileGeneratorUtils.getAlignDeviceOffset()
&& ((450 <= currentTime && currentTime < 550)
|| (550 <= currentTime && currentTime < 650))) {
assertEquals(currentTime + 20000, currentValue);
} else if ((i < TsFileGeneratorUtils.getAlignDeviceOffset() + 3 && j < 4)
&& ((20 <= currentTime && currentTime < 220)
|| (250 <= currentTime && currentTime < 450)
|| (480 <= currentTime && currentTime < 680))) {
assertEquals(currentTime + 10000, currentValue);
} else {
assertEquals(currentTime, currentValue);
}
count++;
}
}
tsFilesReader.close();
if (i == 0 || i == 1 || i == 2) {
assertEquals(0, count);
}
if ((i == TsFileGeneratorUtils.getAlignDeviceOffset())
|| (i == TsFileGeneratorUtils.getAlignDeviceOffset() + 1)
|| (i == TsFileGeneratorUtils.getAlignDeviceOffset() + 2)) {
assertEquals(0, count);
} else if (i < TsFileGeneratorUtils.getAlignDeviceOffset() + 2 && j < 3) {
assertEquals(1280, count);
} else if (i < TsFileGeneratorUtils.getAlignDeviceOffset() + 1 && j < 4) {
assertEquals(1230, count);
} else if ((i == TsFileGeneratorUtils.getAlignDeviceOffset() + 1 && j == 4)) {
assertEquals(600, count);
} else if (i < TsFileGeneratorUtils.getAlignDeviceOffset() + 3 && j < 4) {
assertEquals(1200, count);
} else {
assertEquals(600, count);
}
}
}
List<TsFileResource> targetResources =
CompactionFileGeneratorUtils.getCrossCompactionTargetTsFileResources(seqResources);
TsFileManager tsFileManager =
new TsFileManager(COMPACTION_TEST_SG, "0", STORAGE_GROUP_DIR.getPath());
tsFileManager.addAll(seqResources, true);
tsFileManager.addAll(unseqResources, false);
for (TsFileResource resource : seqResources) {
Assert.assertTrue(resource.anyModFileExists());
}
for (TsFileResource resource : unseqResources) {
Assert.assertTrue(resource.anyModFileExists());
}
CrossSpaceCompactionTask task =
new CrossSpaceCompactionTask(
0,
tsFileManager,
seqResources,
unseqResources,
new FastCompactionPerformer(true),
0,
0);
task.start();
for (TsFileResource resource : targetResources) {
resource.setFile(
new File(
resource
.getTsFilePath()
.replace(CROSS_COMPACTION_TMP_FILE_SUFFIX, TsFileConstant.TSFILE_SUFFIX)));
if (!resource.getTsFile().exists()) {
continue;
}
Assert.assertTrue(resource.anyModFileExists());
Assert.assertEquals(30, resource.getAllModEntries().size());
}
FileReaderManager.getInstance().closeAndRemoveAllOpenedReaders();
for (int i = TsFileGeneratorUtils.getAlignDeviceOffset();
i < TsFileGeneratorUtils.getAlignDeviceOffset() + 4;
i++) {
for (int j = 0; j < 5; j++) {
List<IMeasurementSchema> schemas = new ArrayList<>();
schemas.add(new MeasurementSchema("s" + j, TSDataType.INT64));
IFullPath path =
new AlignedFullPath(
IDeviceID.Factory.DEFAULT_FACTORY.create(
COMPACTION_TEST_SG + PATH_SEPARATOR + "d" + i),
Collections.singletonList("s" + j),
schemas);
IDataBlockReader tsFilesReader =
new SeriesDataBlockReader(
path,
FragmentInstanceContext.createFragmentInstanceContextForCompaction(
EnvironmentUtils.TEST_QUERY_CONTEXT.getQueryId()),
tsFileManager.getTsFileList(true),
new ArrayList<>(),
true);
int count = 0;
while (tsFilesReader.hasNextBatch()) {
TsBlock batchData = tsFilesReader.nextBatch();
for (int readIndex = 0, size = batchData.getPositionCount();
readIndex < size;
readIndex++) {
long currentTime = batchData.getTimeByIndex(readIndex);
long currentValue = batchData.getColumn(0).getLong(readIndex);
if (i == TsFileGeneratorUtils.getAlignDeviceOffset()
&& ((450 <= currentTime && currentTime < 550)
|| (550 <= currentTime && currentTime < 650))) {
assertEquals(currentTime + 20000, currentValue);
} else if ((i < TsFileGeneratorUtils.getAlignDeviceOffset() + 3 && j < 4)
&& ((20 <= currentTime && currentTime < 220)
|| (250 <= currentTime && currentTime < 450)
|| (480 <= currentTime && currentTime < 680))) {
assertEquals(currentTime + 10000, currentValue);
} else {
assertEquals(currentTime, currentValue);
}
count++;
}
}
tsFilesReader.close();
if (i == 0 || i == 1 || i == 2) {
assertEquals(0, count);
}
if ((i == TsFileGeneratorUtils.getAlignDeviceOffset())
|| (i == TsFileGeneratorUtils.getAlignDeviceOffset() + 1)
|| (i == TsFileGeneratorUtils.getAlignDeviceOffset() + 2)) {
assertEquals(0, count);
} else if (i < TsFileGeneratorUtils.getAlignDeviceOffset() + 2 && j < 3) {
assertEquals(1280, count);
} else if (i < TsFileGeneratorUtils.getAlignDeviceOffset() + 1 && j < 4) {
assertEquals(1230, count);
} else if ((i == TsFileGeneratorUtils.getAlignDeviceOffset() + 1 && j == 4)) {
assertEquals(600, count);
} else if (i < TsFileGeneratorUtils.getAlignDeviceOffset() + 3 && j < 4) {
assertEquals(1200, count);
} else {
assertEquals(600, count);
}
}
}
}
/**
* Total 4 seq files and 5 unseq files, each file has different aligned timeseries.
*
* <p>Seq files<br>
* first and second file has d0 ~ d1 and s0 ~ s2, time range is 0 ~ 299 and 350 ~ 649, value range
* is 0 ~ 299 and 350 ~ 649.<br>
* third and forth file has d0 ~ d3 and s0 ~ S4,time range is 700 ~ 999 and 1050 ~ 1349, value
* range is 700 ~ 999 and 1050 ~ 1349.<br>
*
* <p>UnSeq files<br>
* first, second and third file has d0 ~ d2 and s0 ~ s3, time range is 20 ~ 219, 250 ~ 449 and 480
* ~ 679, value range is 10020 ~ 10219, 10250 ~ 10449 and 10480 ~ 10679.<br>
* forth and fifth file has d0 and s0 ~ s4, time range is 450 ~ 549 and 550 ~ 649, value range is
* 20450 ~ 20549 and 20550 ~ 20649.
*
* <p>The data of d3.s0 is deleted. Test when there is a deletion to the file before compaction,
* then comes to a deletion during compaction.
*/
@Test
public void testOneDeletionDuringCompaction() throws Exception {
DataRegion vsgp =
new DataRegion(
STORAGE_GROUP_DIR.getPath(),
"0",
new TsFileFlushPolicy.DirectFlushPolicy(),
COMPACTION_TEST_SG);
registerTimeseriesInMManger(4, 5, true);
createFiles(2, 2, 3, 300, 0, 0, 50, 50, true, true);
createFiles(2, 4, 5, 300, 700, 700, 50, 50, true, true);
createFiles(3, 3, 4, 200, 20, 10020, 30, 30, true, false);
createFiles(2, 1, 5, 100, 450, 20450, 0, 0, true, false);
vsgp.getTsFileResourceManager().addAll(seqResources, true);
vsgp.getTsFileResourceManager().addAll(unseqResources, false);
MeasurementPath path =
new MeasurementPath(
COMPACTION_TEST_SG
+ PATH_SEPARATOR
+ "d"
+ (TsFileGeneratorUtils.getAlignDeviceOffset() + 3)
+ PATH_SEPARATOR
+ "s0");
DeleteDataNode deleteDataNode =
new DeleteDataNode(new PlanNodeId("1"), Collections.singletonList(path), 0, 1000);
deleteDataNode.setSearchIndex(0);
vsgp.deleteByDevice(
new MeasurementPath(
COMPACTION_TEST_SG
+ PATH_SEPARATOR
+ "d"
+ (TsFileGeneratorUtils.getAlignDeviceOffset() + 3)
+ PATH_SEPARATOR
+ "s0"),
deleteDataNode);
CrossSpaceCompactionTask task =
new CrossSpaceCompactionTask(
0,
vsgp.getTsFileResourceManager(),
seqResources,
unseqResources,
new FastCompactionPerformer(true),
0,
0);
task.setSourceFilesToCompactionCandidate();
seqResources.forEach(f -> f.setStatus(TsFileResourceStatus.COMPACTING));
unseqResources.forEach(f -> f.setStatus(TsFileResourceStatus.COMPACTING));
// delete data in source file during compaction
DeleteDataNode deleteDataNode2 =
new DeleteDataNode(new PlanNodeId("2"), Collections.singletonList(path), 0, 1200);
deleteDataNode2.setSearchIndex(0);
vsgp.deleteByDevice(
new MeasurementPath(
COMPACTION_TEST_SG
+ PATH_SEPARATOR
+ "d"
+ (TsFileGeneratorUtils.getAlignDeviceOffset() + 3)
+ PATH_SEPARATOR
+ "s0"),
deleteDataNode2);
for (int i = 0; i < seqResources.size(); i++) {
TsFileResource resource = seqResources.get(i);
resource.resetModFile();
if (i < 2) {
Assert.assertFalse(resource.getCompactionModFile().exists());
Assert.assertFalse(resource.anyModFileExists());
} else if (i == 2) {
Assert.assertTrue(resource.anyModFileExists());
Assert.assertEquals(2, resource.getAllModEntries().size());
} else {
Assert.assertTrue(resource.anyModFileExists());
Assert.assertEquals(1, resource.getAllModEntries().size());
}
}
for (TsFileResource resource : unseqResources) {
resource.resetModFile();
Assert.assertFalse(resource.getCompactionModFile().exists());
Assert.assertFalse(resource.anyModFileExists());
}
task.start();
for (TsFileResource resource : seqResources) {
Assert.assertFalse(resource.getTsFile().exists());
Assert.assertFalse(resource.anyModFileExists());
Assert.assertFalse(resource.getCompactionModFile().exists());
}
for (TsFileResource resource : unseqResources) {
Assert.assertFalse(resource.getTsFile().exists());
Assert.assertFalse(resource.anyModFileExists());
Assert.assertFalse(resource.getCompactionModFile().exists());
}
for (int i = 0; i < seqResources.size(); i++) {
TsFileResource seqResource = seqResources.get(i);
TsFileResource resource =
new TsFileResource(
TsFileNameGenerator.increaseCrossCompactionCnt(seqResource.getTsFile()));
if (i < 2) {
Assert.assertFalse(resource.getCompactionModFile().exists());
Assert.assertFalse(resource.anyModFileExists());
} else {
Assert.assertFalse(resource.getCompactionModFile().exists());
Assert.assertFalse(resource.anyModFileExists());
}
}
}
/**
* Total 4 seq files and 5 unseq files, each file has different aligned timeseries.
*
* <p>Seq files<br>
* first and second file has d0 ~ d1 and s0 ~ s2, time range is 0 ~ 299 and 350 ~ 649, value range
* is 0 ~ 299 and 350 ~ 649.<br>
* third and forth file has d0 ~ d3 and s0 ~ S4,time range is 700 ~ 999 and 1050 ~ 1349, value
* range is 700 ~ 999 and 1050 ~ 1349.<br>
*
* <p>UnSeq files<br>
* first, second and third file has d0 ~ d2 and s0 ~ s3, time range is 20 ~ 219, 250 ~ 449 and 480
* ~ 679, value range is 10020 ~ 10219, 10250 ~ 10449 and 10480 ~ 10679.<br>
* forth and fifth file has d0 and s0 ~ s4, time range is 450 ~ 549 and 550 ~ 649, value range is
* 20450 ~ 20549 and 20550 ~ 20649.
*
* <p>The data of d3.s0 is deleted. Test when there is a deletion to the file before compaction,
* then comes to serveral deletions during compaction.
*/
@Test
public void testSeveralDeletionsDuringCompaction() throws Exception {
DataRegion vsgp =
new DataRegion(
STORAGE_GROUP_DIR.getPath(),
"0",
new TsFileFlushPolicy.DirectFlushPolicy(),
COMPACTION_TEST_SG);
registerTimeseriesInMManger(4, 5, true);
createFiles(2, 2, 3, 300, 0, 0, 50, 50, true, true);
createFiles(2, 4, 5, 300, 700, 700, 50, 50, true, true);
createFiles(3, 3, 4, 200, 20, 10020, 30, 30, true, false);
createFiles(2, 1, 5, 100, 450, 20450, 0, 0, true, false);
vsgp.getTsFileResourceManager().addAll(seqResources, true);
vsgp.getTsFileResourceManager().addAll(unseqResources, false);
MeasurementPath path =
new MeasurementPath(
COMPACTION_TEST_SG
+ PATH_SEPARATOR
+ "d"
+ (TsFileGeneratorUtils.getAlignDeviceOffset() + 3)
+ PATH_SEPARATOR
+ "s0");
DeleteDataNode deleteDataNode =
new DeleteDataNode(new PlanNodeId("1"), Collections.singletonList(path), 0, 1000);
deleteDataNode.setSearchIndex(0);
vsgp.deleteByDevice(
new MeasurementPath(
COMPACTION_TEST_SG
+ PATH_SEPARATOR
+ "d"
+ (TsFileGeneratorUtils.getAlignDeviceOffset() + 3)
+ PATH_SEPARATOR
+ "s0"),
deleteDataNode);
CrossSpaceCompactionTask task =
new CrossSpaceCompactionTask(
0,
vsgp.getTsFileResourceManager(),
seqResources,
unseqResources,
new FastCompactionPerformer(true),
0,
0);
task.setSourceFilesToCompactionCandidate();
seqResources.forEach(f -> f.setStatus(TsFileResourceStatus.COMPACTING));
unseqResources.forEach(f -> f.setStatus(TsFileResourceStatus.COMPACTING));
// delete data in source file during compaction
DeleteDataNode deleteDataNode2 =
new DeleteDataNode(new PlanNodeId("2"), Collections.singletonList(path), 0, 1200);
deleteDataNode2.setSearchIndex(0);
vsgp.deleteByDevice(
new MeasurementPath(
COMPACTION_TEST_SG
+ PATH_SEPARATOR
+ "d"
+ (TsFileGeneratorUtils.getAlignDeviceOffset() + 3)
+ PATH_SEPARATOR
+ "s0"),
deleteDataNode2);
DeleteDataNode deleteDataNode3 =
new DeleteDataNode(new PlanNodeId("3"), Collections.singletonList(path), 0, 1800);
deleteDataNode3.setSearchIndex(0);
vsgp.deleteByDevice(
new MeasurementPath(
COMPACTION_TEST_SG
+ PATH_SEPARATOR
+ "d"
+ (TsFileGeneratorUtils.getAlignDeviceOffset() + 3)
+ PATH_SEPARATOR
+ "s0"),
deleteDataNode3);
for (int i = 0; i < seqResources.size(); i++) {
TsFileResource resource = seqResources.get(i);
resource.resetModFile();
if (i < 2) {
Assert.assertFalse(resource.getCompactionModFile().exists());
Assert.assertFalse(resource.anyModFileExists());
} else if (i == 2) {
Assert.assertTrue(resource.anyModFileExists());
Assert.assertEquals(3, resource.getAllModEntries().size());
} else {
Assert.assertTrue(resource.anyModFileExists());
Assert.assertEquals(2, resource.getAllModEntries().size());
}
}
for (TsFileResource resource : unseqResources) {
resource.resetModFile();
Assert.assertFalse(resource.getCompactionModFile().exists());
Assert.assertFalse(resource.anyModFileExists());
}
task.start();
for (TsFileResource resource : seqResources) {
Assert.assertFalse(resource.getTsFile().exists());
Assert.assertFalse(resource.anyModFileExists());
Assert.assertFalse(resource.getCompactionModFile().exists());
}
for (TsFileResource resource : unseqResources) {
Assert.assertFalse(resource.getTsFile().exists());
Assert.assertFalse(resource.anyModFileExists());
Assert.assertFalse(resource.getCompactionModFile().exists());
}
for (int i = 0; i < seqResources.size(); i++) {
TsFileResource seqResource = seqResources.get(i);
TsFileResource resource =
new TsFileResource(
TsFileNameGenerator.increaseCrossCompactionCnt(seqResource.getTsFile()));
if (i < 2) {
Assert.assertFalse(resource.getCompactionModFile().exists());
Assert.assertFalse(resource.anyModFileExists());
} else {
Assert.assertFalse(resource.getCompactionModFile().exists());
Assert.assertFalse(resource.anyModFileExists());
}
}
}
private void generateModsFile(
List<String> seriesPaths,
List<TsFileResource> resources,
long startValue,
long endValue,
boolean isCompactionMods)
throws IllegalPathException, IOException {
for (TsFileResource resource : resources) {
Map<String, Pair<Long, Long>> deleteMap = new HashMap<>();
for (String path : seriesPaths) {
deleteMap.put(path, new Pair<>(startValue, endValue));
}
CompactionFileGeneratorUtils.generateMods(deleteMap, resource, isCompactionMods);
}
}
}
|
googleapis/google-cloud-java | 36,240 | java-aiplatform/google-cloud-aiplatform/src/test/java/com/google/cloud/aiplatform/v1/GenAiTuningServiceClientTest.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.cloud.aiplatform.v1;
import static com.google.cloud.aiplatform.v1.GenAiTuningServiceClient.ListLocationsPagedResponse;
import static com.google.cloud.aiplatform.v1.GenAiTuningServiceClient.ListTuningJobsPagedResponse;
import com.google.api.gax.core.NoCredentialsProvider;
import com.google.api.gax.grpc.GaxGrpcProperties;
import com.google.api.gax.grpc.testing.LocalChannelProvider;
import com.google.api.gax.grpc.testing.MockGrpcService;
import com.google.api.gax.grpc.testing.MockServiceHelper;
import com.google.api.gax.rpc.ApiClientHeaderProvider;
import com.google.api.gax.rpc.InvalidArgumentException;
import com.google.api.gax.rpc.StatusCode;
import com.google.cloud.location.GetLocationRequest;
import com.google.cloud.location.ListLocationsRequest;
import com.google.cloud.location.ListLocationsResponse;
import com.google.cloud.location.Location;
import com.google.common.collect.Lists;
import com.google.iam.v1.AuditConfig;
import com.google.iam.v1.Binding;
import com.google.iam.v1.GetIamPolicyRequest;
import com.google.iam.v1.GetPolicyOptions;
import com.google.iam.v1.Policy;
import com.google.iam.v1.SetIamPolicyRequest;
import com.google.iam.v1.TestIamPermissionsRequest;
import com.google.iam.v1.TestIamPermissionsResponse;
import com.google.longrunning.Operation;
import com.google.protobuf.AbstractMessage;
import com.google.protobuf.Any;
import com.google.protobuf.ByteString;
import com.google.protobuf.Empty;
import com.google.protobuf.FieldMask;
import com.google.protobuf.Timestamp;
import com.google.rpc.Status;
import io.grpc.StatusRuntimeException;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.List;
import java.util.UUID;
import java.util.concurrent.ExecutionException;
import javax.annotation.Generated;
import org.junit.After;
import org.junit.AfterClass;
import org.junit.Assert;
import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Test;
@Generated("by gapic-generator-java")
public class GenAiTuningServiceClientTest {
private static MockGenAiTuningService mockGenAiTuningService;
private static MockIAMPolicy mockIAMPolicy;
private static MockLocations mockLocations;
private static MockServiceHelper mockServiceHelper;
private LocalChannelProvider channelProvider;
private GenAiTuningServiceClient client;
@BeforeClass
public static void startStaticServer() {
mockGenAiTuningService = new MockGenAiTuningService();
mockLocations = new MockLocations();
mockIAMPolicy = new MockIAMPolicy();
mockServiceHelper =
new MockServiceHelper(
UUID.randomUUID().toString(),
Arrays.<MockGrpcService>asList(mockGenAiTuningService, mockLocations, mockIAMPolicy));
mockServiceHelper.start();
}
@AfterClass
public static void stopServer() {
mockServiceHelper.stop();
}
@Before
public void setUp() throws IOException {
mockServiceHelper.reset();
channelProvider = mockServiceHelper.createChannelProvider();
GenAiTuningServiceSettings settings =
GenAiTuningServiceSettings.newBuilder()
.setTransportChannelProvider(channelProvider)
.setCredentialsProvider(NoCredentialsProvider.create())
.build();
client = GenAiTuningServiceClient.create(settings);
}
@After
public void tearDown() throws Exception {
client.close();
}
@Test
public void createTuningJobTest() throws Exception {
TuningJob expectedResponse =
TuningJob.newBuilder()
.setName(TuningJobName.of("[PROJECT]", "[LOCATION]", "[TUNING_JOB]").toString())
.setTunedModelDisplayName("tunedModelDisplayName759401232")
.setDescription("description-1724546052")
.setState(JobState.forNumber(0))
.setCreateTime(Timestamp.newBuilder().build())
.setStartTime(Timestamp.newBuilder().build())
.setEndTime(Timestamp.newBuilder().build())
.setUpdateTime(Timestamp.newBuilder().build())
.setError(Status.newBuilder().build())
.putAllLabels(new HashMap<String, String>())
.setExperiment(
ContextName.of("[PROJECT]", "[LOCATION]", "[METADATA_STORE]", "[CONTEXT]")
.toString())
.setTunedModel(TunedModel.newBuilder().build())
.setTuningDataStats(TuningDataStats.newBuilder().build())
.setEncryptionSpec(EncryptionSpec.newBuilder().build())
.setServiceAccount("serviceAccount1079137720")
.build();
mockGenAiTuningService.addResponse(expectedResponse);
LocationName parent = LocationName.of("[PROJECT]", "[LOCATION]");
TuningJob tuningJob = TuningJob.newBuilder().build();
TuningJob actualResponse = client.createTuningJob(parent, tuningJob);
Assert.assertEquals(expectedResponse, actualResponse);
List<AbstractMessage> actualRequests = mockGenAiTuningService.getRequests();
Assert.assertEquals(1, actualRequests.size());
CreateTuningJobRequest actualRequest = ((CreateTuningJobRequest) actualRequests.get(0));
Assert.assertEquals(parent.toString(), actualRequest.getParent());
Assert.assertEquals(tuningJob, actualRequest.getTuningJob());
Assert.assertTrue(
channelProvider.isHeaderSent(
ApiClientHeaderProvider.getDefaultApiClientHeaderKey(),
GaxGrpcProperties.getDefaultApiClientHeaderPattern()));
}
@Test
public void createTuningJobExceptionTest() throws Exception {
StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT);
mockGenAiTuningService.addException(exception);
try {
LocationName parent = LocationName.of("[PROJECT]", "[LOCATION]");
TuningJob tuningJob = TuningJob.newBuilder().build();
client.createTuningJob(parent, tuningJob);
Assert.fail("No exception raised");
} catch (InvalidArgumentException e) {
// Expected exception.
}
}
@Test
public void createTuningJobTest2() throws Exception {
TuningJob expectedResponse =
TuningJob.newBuilder()
.setName(TuningJobName.of("[PROJECT]", "[LOCATION]", "[TUNING_JOB]").toString())
.setTunedModelDisplayName("tunedModelDisplayName759401232")
.setDescription("description-1724546052")
.setState(JobState.forNumber(0))
.setCreateTime(Timestamp.newBuilder().build())
.setStartTime(Timestamp.newBuilder().build())
.setEndTime(Timestamp.newBuilder().build())
.setUpdateTime(Timestamp.newBuilder().build())
.setError(Status.newBuilder().build())
.putAllLabels(new HashMap<String, String>())
.setExperiment(
ContextName.of("[PROJECT]", "[LOCATION]", "[METADATA_STORE]", "[CONTEXT]")
.toString())
.setTunedModel(TunedModel.newBuilder().build())
.setTuningDataStats(TuningDataStats.newBuilder().build())
.setEncryptionSpec(EncryptionSpec.newBuilder().build())
.setServiceAccount("serviceAccount1079137720")
.build();
mockGenAiTuningService.addResponse(expectedResponse);
String parent = "parent-995424086";
TuningJob tuningJob = TuningJob.newBuilder().build();
TuningJob actualResponse = client.createTuningJob(parent, tuningJob);
Assert.assertEquals(expectedResponse, actualResponse);
List<AbstractMessage> actualRequests = mockGenAiTuningService.getRequests();
Assert.assertEquals(1, actualRequests.size());
CreateTuningJobRequest actualRequest = ((CreateTuningJobRequest) actualRequests.get(0));
Assert.assertEquals(parent, actualRequest.getParent());
Assert.assertEquals(tuningJob, actualRequest.getTuningJob());
Assert.assertTrue(
channelProvider.isHeaderSent(
ApiClientHeaderProvider.getDefaultApiClientHeaderKey(),
GaxGrpcProperties.getDefaultApiClientHeaderPattern()));
}
@Test
public void createTuningJobExceptionTest2() throws Exception {
StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT);
mockGenAiTuningService.addException(exception);
try {
String parent = "parent-995424086";
TuningJob tuningJob = TuningJob.newBuilder().build();
client.createTuningJob(parent, tuningJob);
Assert.fail("No exception raised");
} catch (InvalidArgumentException e) {
// Expected exception.
}
}
@Test
public void getTuningJobTest() throws Exception {
TuningJob expectedResponse =
TuningJob.newBuilder()
.setName(TuningJobName.of("[PROJECT]", "[LOCATION]", "[TUNING_JOB]").toString())
.setTunedModelDisplayName("tunedModelDisplayName759401232")
.setDescription("description-1724546052")
.setState(JobState.forNumber(0))
.setCreateTime(Timestamp.newBuilder().build())
.setStartTime(Timestamp.newBuilder().build())
.setEndTime(Timestamp.newBuilder().build())
.setUpdateTime(Timestamp.newBuilder().build())
.setError(Status.newBuilder().build())
.putAllLabels(new HashMap<String, String>())
.setExperiment(
ContextName.of("[PROJECT]", "[LOCATION]", "[METADATA_STORE]", "[CONTEXT]")
.toString())
.setTunedModel(TunedModel.newBuilder().build())
.setTuningDataStats(TuningDataStats.newBuilder().build())
.setEncryptionSpec(EncryptionSpec.newBuilder().build())
.setServiceAccount("serviceAccount1079137720")
.build();
mockGenAiTuningService.addResponse(expectedResponse);
TuningJobName name = TuningJobName.of("[PROJECT]", "[LOCATION]", "[TUNING_JOB]");
TuningJob actualResponse = client.getTuningJob(name);
Assert.assertEquals(expectedResponse, actualResponse);
List<AbstractMessage> actualRequests = mockGenAiTuningService.getRequests();
Assert.assertEquals(1, actualRequests.size());
GetTuningJobRequest actualRequest = ((GetTuningJobRequest) actualRequests.get(0));
Assert.assertEquals(name.toString(), actualRequest.getName());
Assert.assertTrue(
channelProvider.isHeaderSent(
ApiClientHeaderProvider.getDefaultApiClientHeaderKey(),
GaxGrpcProperties.getDefaultApiClientHeaderPattern()));
}
@Test
public void getTuningJobExceptionTest() throws Exception {
StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT);
mockGenAiTuningService.addException(exception);
try {
TuningJobName name = TuningJobName.of("[PROJECT]", "[LOCATION]", "[TUNING_JOB]");
client.getTuningJob(name);
Assert.fail("No exception raised");
} catch (InvalidArgumentException e) {
// Expected exception.
}
}
@Test
public void getTuningJobTest2() throws Exception {
TuningJob expectedResponse =
TuningJob.newBuilder()
.setName(TuningJobName.of("[PROJECT]", "[LOCATION]", "[TUNING_JOB]").toString())
.setTunedModelDisplayName("tunedModelDisplayName759401232")
.setDescription("description-1724546052")
.setState(JobState.forNumber(0))
.setCreateTime(Timestamp.newBuilder().build())
.setStartTime(Timestamp.newBuilder().build())
.setEndTime(Timestamp.newBuilder().build())
.setUpdateTime(Timestamp.newBuilder().build())
.setError(Status.newBuilder().build())
.putAllLabels(new HashMap<String, String>())
.setExperiment(
ContextName.of("[PROJECT]", "[LOCATION]", "[METADATA_STORE]", "[CONTEXT]")
.toString())
.setTunedModel(TunedModel.newBuilder().build())
.setTuningDataStats(TuningDataStats.newBuilder().build())
.setEncryptionSpec(EncryptionSpec.newBuilder().build())
.setServiceAccount("serviceAccount1079137720")
.build();
mockGenAiTuningService.addResponse(expectedResponse);
String name = "name3373707";
TuningJob actualResponse = client.getTuningJob(name);
Assert.assertEquals(expectedResponse, actualResponse);
List<AbstractMessage> actualRequests = mockGenAiTuningService.getRequests();
Assert.assertEquals(1, actualRequests.size());
GetTuningJobRequest actualRequest = ((GetTuningJobRequest) actualRequests.get(0));
Assert.assertEquals(name, actualRequest.getName());
Assert.assertTrue(
channelProvider.isHeaderSent(
ApiClientHeaderProvider.getDefaultApiClientHeaderKey(),
GaxGrpcProperties.getDefaultApiClientHeaderPattern()));
}
@Test
public void getTuningJobExceptionTest2() throws Exception {
StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT);
mockGenAiTuningService.addException(exception);
try {
String name = "name3373707";
client.getTuningJob(name);
Assert.fail("No exception raised");
} catch (InvalidArgumentException e) {
// Expected exception.
}
}
@Test
public void listTuningJobsTest() throws Exception {
TuningJob responsesElement = TuningJob.newBuilder().build();
ListTuningJobsResponse expectedResponse =
ListTuningJobsResponse.newBuilder()
.setNextPageToken("")
.addAllTuningJobs(Arrays.asList(responsesElement))
.build();
mockGenAiTuningService.addResponse(expectedResponse);
LocationName parent = LocationName.of("[PROJECT]", "[LOCATION]");
ListTuningJobsPagedResponse pagedListResponse = client.listTuningJobs(parent);
List<TuningJob> resources = Lists.newArrayList(pagedListResponse.iterateAll());
Assert.assertEquals(1, resources.size());
Assert.assertEquals(expectedResponse.getTuningJobsList().get(0), resources.get(0));
List<AbstractMessage> actualRequests = mockGenAiTuningService.getRequests();
Assert.assertEquals(1, actualRequests.size());
ListTuningJobsRequest actualRequest = ((ListTuningJobsRequest) actualRequests.get(0));
Assert.assertEquals(parent.toString(), actualRequest.getParent());
Assert.assertTrue(
channelProvider.isHeaderSent(
ApiClientHeaderProvider.getDefaultApiClientHeaderKey(),
GaxGrpcProperties.getDefaultApiClientHeaderPattern()));
}
@Test
public void listTuningJobsExceptionTest() throws Exception {
StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT);
mockGenAiTuningService.addException(exception);
try {
LocationName parent = LocationName.of("[PROJECT]", "[LOCATION]");
client.listTuningJobs(parent);
Assert.fail("No exception raised");
} catch (InvalidArgumentException e) {
// Expected exception.
}
}
@Test
public void listTuningJobsTest2() throws Exception {
TuningJob responsesElement = TuningJob.newBuilder().build();
ListTuningJobsResponse expectedResponse =
ListTuningJobsResponse.newBuilder()
.setNextPageToken("")
.addAllTuningJobs(Arrays.asList(responsesElement))
.build();
mockGenAiTuningService.addResponse(expectedResponse);
String parent = "parent-995424086";
ListTuningJobsPagedResponse pagedListResponse = client.listTuningJobs(parent);
List<TuningJob> resources = Lists.newArrayList(pagedListResponse.iterateAll());
Assert.assertEquals(1, resources.size());
Assert.assertEquals(expectedResponse.getTuningJobsList().get(0), resources.get(0));
List<AbstractMessage> actualRequests = mockGenAiTuningService.getRequests();
Assert.assertEquals(1, actualRequests.size());
ListTuningJobsRequest actualRequest = ((ListTuningJobsRequest) actualRequests.get(0));
Assert.assertEquals(parent, actualRequest.getParent());
Assert.assertTrue(
channelProvider.isHeaderSent(
ApiClientHeaderProvider.getDefaultApiClientHeaderKey(),
GaxGrpcProperties.getDefaultApiClientHeaderPattern()));
}
@Test
public void listTuningJobsExceptionTest2() throws Exception {
StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT);
mockGenAiTuningService.addException(exception);
try {
String parent = "parent-995424086";
client.listTuningJobs(parent);
Assert.fail("No exception raised");
} catch (InvalidArgumentException e) {
// Expected exception.
}
}
@Test
public void cancelTuningJobTest() throws Exception {
Empty expectedResponse = Empty.newBuilder().build();
mockGenAiTuningService.addResponse(expectedResponse);
TuningJobName name = TuningJobName.of("[PROJECT]", "[LOCATION]", "[TUNING_JOB]");
client.cancelTuningJob(name);
List<AbstractMessage> actualRequests = mockGenAiTuningService.getRequests();
Assert.assertEquals(1, actualRequests.size());
CancelTuningJobRequest actualRequest = ((CancelTuningJobRequest) actualRequests.get(0));
Assert.assertEquals(name.toString(), actualRequest.getName());
Assert.assertTrue(
channelProvider.isHeaderSent(
ApiClientHeaderProvider.getDefaultApiClientHeaderKey(),
GaxGrpcProperties.getDefaultApiClientHeaderPattern()));
}
@Test
public void cancelTuningJobExceptionTest() throws Exception {
StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT);
mockGenAiTuningService.addException(exception);
try {
TuningJobName name = TuningJobName.of("[PROJECT]", "[LOCATION]", "[TUNING_JOB]");
client.cancelTuningJob(name);
Assert.fail("No exception raised");
} catch (InvalidArgumentException e) {
// Expected exception.
}
}
@Test
public void cancelTuningJobTest2() throws Exception {
Empty expectedResponse = Empty.newBuilder().build();
mockGenAiTuningService.addResponse(expectedResponse);
String name = "name3373707";
client.cancelTuningJob(name);
List<AbstractMessage> actualRequests = mockGenAiTuningService.getRequests();
Assert.assertEquals(1, actualRequests.size());
CancelTuningJobRequest actualRequest = ((CancelTuningJobRequest) actualRequests.get(0));
Assert.assertEquals(name, actualRequest.getName());
Assert.assertTrue(
channelProvider.isHeaderSent(
ApiClientHeaderProvider.getDefaultApiClientHeaderKey(),
GaxGrpcProperties.getDefaultApiClientHeaderPattern()));
}
@Test
public void cancelTuningJobExceptionTest2() throws Exception {
StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT);
mockGenAiTuningService.addException(exception);
try {
String name = "name3373707";
client.cancelTuningJob(name);
Assert.fail("No exception raised");
} catch (InvalidArgumentException e) {
// Expected exception.
}
}
@Test
public void rebaseTunedModelTest() throws Exception {
TuningJob expectedResponse =
TuningJob.newBuilder()
.setName(TuningJobName.of("[PROJECT]", "[LOCATION]", "[TUNING_JOB]").toString())
.setTunedModelDisplayName("tunedModelDisplayName759401232")
.setDescription("description-1724546052")
.setState(JobState.forNumber(0))
.setCreateTime(Timestamp.newBuilder().build())
.setStartTime(Timestamp.newBuilder().build())
.setEndTime(Timestamp.newBuilder().build())
.setUpdateTime(Timestamp.newBuilder().build())
.setError(Status.newBuilder().build())
.putAllLabels(new HashMap<String, String>())
.setExperiment(
ContextName.of("[PROJECT]", "[LOCATION]", "[METADATA_STORE]", "[CONTEXT]")
.toString())
.setTunedModel(TunedModel.newBuilder().build())
.setTuningDataStats(TuningDataStats.newBuilder().build())
.setEncryptionSpec(EncryptionSpec.newBuilder().build())
.setServiceAccount("serviceAccount1079137720")
.build();
Operation resultOperation =
Operation.newBuilder()
.setName("rebaseTunedModelTest")
.setDone(true)
.setResponse(Any.pack(expectedResponse))
.build();
mockGenAiTuningService.addResponse(resultOperation);
LocationName parent = LocationName.of("[PROJECT]", "[LOCATION]");
TunedModelRef tunedModelRef = TunedModelRef.newBuilder().build();
TuningJob actualResponse = client.rebaseTunedModelAsync(parent, tunedModelRef).get();
Assert.assertEquals(expectedResponse, actualResponse);
List<AbstractMessage> actualRequests = mockGenAiTuningService.getRequests();
Assert.assertEquals(1, actualRequests.size());
RebaseTunedModelRequest actualRequest = ((RebaseTunedModelRequest) actualRequests.get(0));
Assert.assertEquals(parent.toString(), actualRequest.getParent());
Assert.assertEquals(tunedModelRef, actualRequest.getTunedModelRef());
Assert.assertTrue(
channelProvider.isHeaderSent(
ApiClientHeaderProvider.getDefaultApiClientHeaderKey(),
GaxGrpcProperties.getDefaultApiClientHeaderPattern()));
}
@Test
public void rebaseTunedModelExceptionTest() throws Exception {
StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT);
mockGenAiTuningService.addException(exception);
try {
LocationName parent = LocationName.of("[PROJECT]", "[LOCATION]");
TunedModelRef tunedModelRef = TunedModelRef.newBuilder().build();
client.rebaseTunedModelAsync(parent, tunedModelRef).get();
Assert.fail("No exception raised");
} catch (ExecutionException e) {
Assert.assertEquals(InvalidArgumentException.class, e.getCause().getClass());
InvalidArgumentException apiException = ((InvalidArgumentException) e.getCause());
Assert.assertEquals(StatusCode.Code.INVALID_ARGUMENT, apiException.getStatusCode().getCode());
}
}
@Test
public void rebaseTunedModelTest2() throws Exception {
TuningJob expectedResponse =
TuningJob.newBuilder()
.setName(TuningJobName.of("[PROJECT]", "[LOCATION]", "[TUNING_JOB]").toString())
.setTunedModelDisplayName("tunedModelDisplayName759401232")
.setDescription("description-1724546052")
.setState(JobState.forNumber(0))
.setCreateTime(Timestamp.newBuilder().build())
.setStartTime(Timestamp.newBuilder().build())
.setEndTime(Timestamp.newBuilder().build())
.setUpdateTime(Timestamp.newBuilder().build())
.setError(Status.newBuilder().build())
.putAllLabels(new HashMap<String, String>())
.setExperiment(
ContextName.of("[PROJECT]", "[LOCATION]", "[METADATA_STORE]", "[CONTEXT]")
.toString())
.setTunedModel(TunedModel.newBuilder().build())
.setTuningDataStats(TuningDataStats.newBuilder().build())
.setEncryptionSpec(EncryptionSpec.newBuilder().build())
.setServiceAccount("serviceAccount1079137720")
.build();
Operation resultOperation =
Operation.newBuilder()
.setName("rebaseTunedModelTest")
.setDone(true)
.setResponse(Any.pack(expectedResponse))
.build();
mockGenAiTuningService.addResponse(resultOperation);
String parent = "parent-995424086";
TunedModelRef tunedModelRef = TunedModelRef.newBuilder().build();
TuningJob actualResponse = client.rebaseTunedModelAsync(parent, tunedModelRef).get();
Assert.assertEquals(expectedResponse, actualResponse);
List<AbstractMessage> actualRequests = mockGenAiTuningService.getRequests();
Assert.assertEquals(1, actualRequests.size());
RebaseTunedModelRequest actualRequest = ((RebaseTunedModelRequest) actualRequests.get(0));
Assert.assertEquals(parent, actualRequest.getParent());
Assert.assertEquals(tunedModelRef, actualRequest.getTunedModelRef());
Assert.assertTrue(
channelProvider.isHeaderSent(
ApiClientHeaderProvider.getDefaultApiClientHeaderKey(),
GaxGrpcProperties.getDefaultApiClientHeaderPattern()));
}
@Test
public void rebaseTunedModelExceptionTest2() throws Exception {
StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT);
mockGenAiTuningService.addException(exception);
try {
String parent = "parent-995424086";
TunedModelRef tunedModelRef = TunedModelRef.newBuilder().build();
client.rebaseTunedModelAsync(parent, tunedModelRef).get();
Assert.fail("No exception raised");
} catch (ExecutionException e) {
Assert.assertEquals(InvalidArgumentException.class, e.getCause().getClass());
InvalidArgumentException apiException = ((InvalidArgumentException) e.getCause());
Assert.assertEquals(StatusCode.Code.INVALID_ARGUMENT, apiException.getStatusCode().getCode());
}
}
@Test
public void listLocationsTest() throws Exception {
Location responsesElement = Location.newBuilder().build();
ListLocationsResponse expectedResponse =
ListLocationsResponse.newBuilder()
.setNextPageToken("")
.addAllLocations(Arrays.asList(responsesElement))
.build();
mockLocations.addResponse(expectedResponse);
ListLocationsRequest request =
ListLocationsRequest.newBuilder()
.setName("name3373707")
.setFilter("filter-1274492040")
.setPageSize(883849137)
.setPageToken("pageToken873572522")
.build();
ListLocationsPagedResponse pagedListResponse = client.listLocations(request);
List<Location> resources = Lists.newArrayList(pagedListResponse.iterateAll());
Assert.assertEquals(1, resources.size());
Assert.assertEquals(expectedResponse.getLocationsList().get(0), resources.get(0));
List<AbstractMessage> actualRequests = mockLocations.getRequests();
Assert.assertEquals(1, actualRequests.size());
ListLocationsRequest actualRequest = ((ListLocationsRequest) actualRequests.get(0));
Assert.assertEquals(request.getName(), actualRequest.getName());
Assert.assertEquals(request.getFilter(), actualRequest.getFilter());
Assert.assertEquals(request.getPageSize(), actualRequest.getPageSize());
Assert.assertEquals(request.getPageToken(), actualRequest.getPageToken());
Assert.assertTrue(
channelProvider.isHeaderSent(
ApiClientHeaderProvider.getDefaultApiClientHeaderKey(),
GaxGrpcProperties.getDefaultApiClientHeaderPattern()));
}
@Test
public void listLocationsExceptionTest() throws Exception {
StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT);
mockLocations.addException(exception);
try {
ListLocationsRequest request =
ListLocationsRequest.newBuilder()
.setName("name3373707")
.setFilter("filter-1274492040")
.setPageSize(883849137)
.setPageToken("pageToken873572522")
.build();
client.listLocations(request);
Assert.fail("No exception raised");
} catch (InvalidArgumentException e) {
// Expected exception.
}
}
@Test
public void getLocationTest() throws Exception {
Location expectedResponse =
Location.newBuilder()
.setName("name3373707")
.setLocationId("locationId1541836720")
.setDisplayName("displayName1714148973")
.putAllLabels(new HashMap<String, String>())
.setMetadata(Any.newBuilder().build())
.build();
mockLocations.addResponse(expectedResponse);
GetLocationRequest request = GetLocationRequest.newBuilder().setName("name3373707").build();
Location actualResponse = client.getLocation(request);
Assert.assertEquals(expectedResponse, actualResponse);
List<AbstractMessage> actualRequests = mockLocations.getRequests();
Assert.assertEquals(1, actualRequests.size());
GetLocationRequest actualRequest = ((GetLocationRequest) actualRequests.get(0));
Assert.assertEquals(request.getName(), actualRequest.getName());
Assert.assertTrue(
channelProvider.isHeaderSent(
ApiClientHeaderProvider.getDefaultApiClientHeaderKey(),
GaxGrpcProperties.getDefaultApiClientHeaderPattern()));
}
@Test
public void getLocationExceptionTest() throws Exception {
StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT);
mockLocations.addException(exception);
try {
GetLocationRequest request = GetLocationRequest.newBuilder().setName("name3373707").build();
client.getLocation(request);
Assert.fail("No exception raised");
} catch (InvalidArgumentException e) {
// Expected exception.
}
}
@Test
public void setIamPolicyTest() throws Exception {
Policy expectedResponse =
Policy.newBuilder()
.setVersion(351608024)
.addAllBindings(new ArrayList<Binding>())
.addAllAuditConfigs(new ArrayList<AuditConfig>())
.setEtag(ByteString.EMPTY)
.build();
mockIAMPolicy.addResponse(expectedResponse);
SetIamPolicyRequest request =
SetIamPolicyRequest.newBuilder()
.setResource(
EndpointName.ofProjectLocationEndpointName("[PROJECT]", "[LOCATION]", "[ENDPOINT]")
.toString())
.setPolicy(Policy.newBuilder().build())
.setUpdateMask(FieldMask.newBuilder().build())
.build();
Policy actualResponse = client.setIamPolicy(request);
Assert.assertEquals(expectedResponse, actualResponse);
List<AbstractMessage> actualRequests = mockIAMPolicy.getRequests();
Assert.assertEquals(1, actualRequests.size());
SetIamPolicyRequest actualRequest = ((SetIamPolicyRequest) actualRequests.get(0));
Assert.assertEquals(request.getResource(), actualRequest.getResource());
Assert.assertEquals(request.getPolicy(), actualRequest.getPolicy());
Assert.assertEquals(request.getUpdateMask(), actualRequest.getUpdateMask());
Assert.assertTrue(
channelProvider.isHeaderSent(
ApiClientHeaderProvider.getDefaultApiClientHeaderKey(),
GaxGrpcProperties.getDefaultApiClientHeaderPattern()));
}
@Test
public void setIamPolicyExceptionTest() throws Exception {
StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT);
mockIAMPolicy.addException(exception);
try {
SetIamPolicyRequest request =
SetIamPolicyRequest.newBuilder()
.setResource(
EndpointName.ofProjectLocationEndpointName(
"[PROJECT]", "[LOCATION]", "[ENDPOINT]")
.toString())
.setPolicy(Policy.newBuilder().build())
.setUpdateMask(FieldMask.newBuilder().build())
.build();
client.setIamPolicy(request);
Assert.fail("No exception raised");
} catch (InvalidArgumentException e) {
// Expected exception.
}
}
@Test
public void getIamPolicyTest() throws Exception {
Policy expectedResponse =
Policy.newBuilder()
.setVersion(351608024)
.addAllBindings(new ArrayList<Binding>())
.addAllAuditConfigs(new ArrayList<AuditConfig>())
.setEtag(ByteString.EMPTY)
.build();
mockIAMPolicy.addResponse(expectedResponse);
GetIamPolicyRequest request =
GetIamPolicyRequest.newBuilder()
.setResource(
EndpointName.ofProjectLocationEndpointName("[PROJECT]", "[LOCATION]", "[ENDPOINT]")
.toString())
.setOptions(GetPolicyOptions.newBuilder().build())
.build();
Policy actualResponse = client.getIamPolicy(request);
Assert.assertEquals(expectedResponse, actualResponse);
List<AbstractMessage> actualRequests = mockIAMPolicy.getRequests();
Assert.assertEquals(1, actualRequests.size());
GetIamPolicyRequest actualRequest = ((GetIamPolicyRequest) actualRequests.get(0));
Assert.assertEquals(request.getResource(), actualRequest.getResource());
Assert.assertEquals(request.getOptions(), actualRequest.getOptions());
Assert.assertTrue(
channelProvider.isHeaderSent(
ApiClientHeaderProvider.getDefaultApiClientHeaderKey(),
GaxGrpcProperties.getDefaultApiClientHeaderPattern()));
}
@Test
public void getIamPolicyExceptionTest() throws Exception {
StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT);
mockIAMPolicy.addException(exception);
try {
GetIamPolicyRequest request =
GetIamPolicyRequest.newBuilder()
.setResource(
EndpointName.ofProjectLocationEndpointName(
"[PROJECT]", "[LOCATION]", "[ENDPOINT]")
.toString())
.setOptions(GetPolicyOptions.newBuilder().build())
.build();
client.getIamPolicy(request);
Assert.fail("No exception raised");
} catch (InvalidArgumentException e) {
// Expected exception.
}
}
@Test
public void testIamPermissionsTest() throws Exception {
TestIamPermissionsResponse expectedResponse =
TestIamPermissionsResponse.newBuilder().addAllPermissions(new ArrayList<String>()).build();
mockIAMPolicy.addResponse(expectedResponse);
TestIamPermissionsRequest request =
TestIamPermissionsRequest.newBuilder()
.setResource(
EndpointName.ofProjectLocationEndpointName("[PROJECT]", "[LOCATION]", "[ENDPOINT]")
.toString())
.addAllPermissions(new ArrayList<String>())
.build();
TestIamPermissionsResponse actualResponse = client.testIamPermissions(request);
Assert.assertEquals(expectedResponse, actualResponse);
List<AbstractMessage> actualRequests = mockIAMPolicy.getRequests();
Assert.assertEquals(1, actualRequests.size());
TestIamPermissionsRequest actualRequest = ((TestIamPermissionsRequest) actualRequests.get(0));
Assert.assertEquals(request.getResource(), actualRequest.getResource());
Assert.assertEquals(request.getPermissionsList(), actualRequest.getPermissionsList());
Assert.assertTrue(
channelProvider.isHeaderSent(
ApiClientHeaderProvider.getDefaultApiClientHeaderKey(),
GaxGrpcProperties.getDefaultApiClientHeaderPattern()));
}
@Test
public void testIamPermissionsExceptionTest() throws Exception {
StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT);
mockIAMPolicy.addException(exception);
try {
TestIamPermissionsRequest request =
TestIamPermissionsRequest.newBuilder()
.setResource(
EndpointName.ofProjectLocationEndpointName(
"[PROJECT]", "[LOCATION]", "[ENDPOINT]")
.toString())
.addAllPermissions(new ArrayList<String>())
.build();
client.testIamPermissions(request);
Assert.fail("No exception raised");
} catch (InvalidArgumentException e) {
// Expected exception.
}
}
}
|
googleapis/google-cloud-java | 36,228 | java-retail/proto-google-cloud-retail-v2/src/main/java/com/google/cloud/retail/v2/CreateControlRequest.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/retail/v2/control_service.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.retail.v2;
/**
*
*
* <pre>
* Request for CreateControl method.
* </pre>
*
* Protobuf type {@code google.cloud.retail.v2.CreateControlRequest}
*/
public final class CreateControlRequest extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.retail.v2.CreateControlRequest)
CreateControlRequestOrBuilder {
private static final long serialVersionUID = 0L;
// Use CreateControlRequest.newBuilder() to construct.
private CreateControlRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private CreateControlRequest() {
parent_ = "";
controlId_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new CreateControlRequest();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.retail.v2.ControlServiceProto
.internal_static_google_cloud_retail_v2_CreateControlRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.retail.v2.ControlServiceProto
.internal_static_google_cloud_retail_v2_CreateControlRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.retail.v2.CreateControlRequest.class,
com.google.cloud.retail.v2.CreateControlRequest.Builder.class);
}
private int bitField0_;
public static final int PARENT_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private volatile java.lang.Object parent_ = "";
/**
*
*
* <pre>
* Required. Full resource name of parent catalog. Format:
* `projects/{project_number}/locations/{location_id}/catalogs/{catalog_id}`
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The parent.
*/
@java.lang.Override
public java.lang.String getParent() {
java.lang.Object ref = parent_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
parent_ = s;
return s;
}
}
/**
*
*
* <pre>
* Required. Full resource name of parent catalog. Format:
* `projects/{project_number}/locations/{location_id}/catalogs/{catalog_id}`
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for parent.
*/
@java.lang.Override
public com.google.protobuf.ByteString getParentBytes() {
java.lang.Object ref = parent_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
parent_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int CONTROL_FIELD_NUMBER = 2;
private com.google.cloud.retail.v2.Control control_;
/**
*
*
* <pre>
* Required. The Control to create.
* </pre>
*
* <code>.google.cloud.retail.v2.Control control = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the control field is set.
*/
@java.lang.Override
public boolean hasControl() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* Required. The Control to create.
* </pre>
*
* <code>.google.cloud.retail.v2.Control control = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The control.
*/
@java.lang.Override
public com.google.cloud.retail.v2.Control getControl() {
return control_ == null ? com.google.cloud.retail.v2.Control.getDefaultInstance() : control_;
}
/**
*
*
* <pre>
* Required. The Control to create.
* </pre>
*
* <code>.google.cloud.retail.v2.Control control = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
@java.lang.Override
public com.google.cloud.retail.v2.ControlOrBuilder getControlOrBuilder() {
return control_ == null ? com.google.cloud.retail.v2.Control.getDefaultInstance() : control_;
}
public static final int CONTROL_ID_FIELD_NUMBER = 3;
@SuppressWarnings("serial")
private volatile java.lang.Object controlId_ = "";
/**
*
*
* <pre>
* Required. The ID to use for the Control, which will become the final
* component of the Control's resource name.
*
* This value should be 4-63 characters, and valid characters
* are /[a-z][0-9]-_/.
* </pre>
*
* <code>string control_id = 3 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The controlId.
*/
@java.lang.Override
public java.lang.String getControlId() {
java.lang.Object ref = controlId_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
controlId_ = s;
return s;
}
}
/**
*
*
* <pre>
* Required. The ID to use for the Control, which will become the final
* component of the Control's resource name.
*
* This value should be 4-63 characters, and valid characters
* are /[a-z][0-9]-_/.
* </pre>
*
* <code>string control_id = 3 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The bytes for controlId.
*/
@java.lang.Override
public com.google.protobuf.ByteString getControlIdBytes() {
java.lang.Object ref = controlId_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
controlId_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 1, parent_);
}
if (((bitField0_ & 0x00000001) != 0)) {
output.writeMessage(2, getControl());
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(controlId_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 3, controlId_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, parent_);
}
if (((bitField0_ & 0x00000001) != 0)) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, getControl());
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(controlId_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(3, controlId_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.retail.v2.CreateControlRequest)) {
return super.equals(obj);
}
com.google.cloud.retail.v2.CreateControlRequest other =
(com.google.cloud.retail.v2.CreateControlRequest) obj;
if (!getParent().equals(other.getParent())) return false;
if (hasControl() != other.hasControl()) return false;
if (hasControl()) {
if (!getControl().equals(other.getControl())) return false;
}
if (!getControlId().equals(other.getControlId())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + PARENT_FIELD_NUMBER;
hash = (53 * hash) + getParent().hashCode();
if (hasControl()) {
hash = (37 * hash) + CONTROL_FIELD_NUMBER;
hash = (53 * hash) + getControl().hashCode();
}
hash = (37 * hash) + CONTROL_ID_FIELD_NUMBER;
hash = (53 * hash) + getControlId().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.retail.v2.CreateControlRequest parseFrom(java.nio.ByteBuffer data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.retail.v2.CreateControlRequest parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.retail.v2.CreateControlRequest parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.retail.v2.CreateControlRequest parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.retail.v2.CreateControlRequest parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.retail.v2.CreateControlRequest parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.retail.v2.CreateControlRequest parseFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.retail.v2.CreateControlRequest parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.retail.v2.CreateControlRequest parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.retail.v2.CreateControlRequest parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.retail.v2.CreateControlRequest parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.retail.v2.CreateControlRequest parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.cloud.retail.v2.CreateControlRequest prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Request for CreateControl method.
* </pre>
*
* Protobuf type {@code google.cloud.retail.v2.CreateControlRequest}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.retail.v2.CreateControlRequest)
com.google.cloud.retail.v2.CreateControlRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.retail.v2.ControlServiceProto
.internal_static_google_cloud_retail_v2_CreateControlRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.retail.v2.ControlServiceProto
.internal_static_google_cloud_retail_v2_CreateControlRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.retail.v2.CreateControlRequest.class,
com.google.cloud.retail.v2.CreateControlRequest.Builder.class);
}
// Construct using com.google.cloud.retail.v2.CreateControlRequest.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {
getControlFieldBuilder();
}
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
parent_ = "";
control_ = null;
if (controlBuilder_ != null) {
controlBuilder_.dispose();
controlBuilder_ = null;
}
controlId_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.retail.v2.ControlServiceProto
.internal_static_google_cloud_retail_v2_CreateControlRequest_descriptor;
}
@java.lang.Override
public com.google.cloud.retail.v2.CreateControlRequest getDefaultInstanceForType() {
return com.google.cloud.retail.v2.CreateControlRequest.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.retail.v2.CreateControlRequest build() {
com.google.cloud.retail.v2.CreateControlRequest result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.retail.v2.CreateControlRequest buildPartial() {
com.google.cloud.retail.v2.CreateControlRequest result =
new com.google.cloud.retail.v2.CreateControlRequest(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(com.google.cloud.retail.v2.CreateControlRequest result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.parent_ = parent_;
}
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000002) != 0)) {
result.control_ = controlBuilder_ == null ? control_ : controlBuilder_.build();
to_bitField0_ |= 0x00000001;
}
if (((from_bitField0_ & 0x00000004) != 0)) {
result.controlId_ = controlId_;
}
result.bitField0_ |= to_bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.retail.v2.CreateControlRequest) {
return mergeFrom((com.google.cloud.retail.v2.CreateControlRequest) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.retail.v2.CreateControlRequest other) {
if (other == com.google.cloud.retail.v2.CreateControlRequest.getDefaultInstance())
return this;
if (!other.getParent().isEmpty()) {
parent_ = other.parent_;
bitField0_ |= 0x00000001;
onChanged();
}
if (other.hasControl()) {
mergeControl(other.getControl());
}
if (!other.getControlId().isEmpty()) {
controlId_ = other.controlId_;
bitField0_ |= 0x00000004;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
parent_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000001;
break;
} // case 10
case 18:
{
input.readMessage(getControlFieldBuilder().getBuilder(), extensionRegistry);
bitField0_ |= 0x00000002;
break;
} // case 18
case 26:
{
controlId_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000004;
break;
} // case 26
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.lang.Object parent_ = "";
/**
*
*
* <pre>
* Required. Full resource name of parent catalog. Format:
* `projects/{project_number}/locations/{location_id}/catalogs/{catalog_id}`
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The parent.
*/
public java.lang.String getParent() {
java.lang.Object ref = parent_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
parent_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Required. Full resource name of parent catalog. Format:
* `projects/{project_number}/locations/{location_id}/catalogs/{catalog_id}`
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for parent.
*/
public com.google.protobuf.ByteString getParentBytes() {
java.lang.Object ref = parent_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
parent_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Required. Full resource name of parent catalog. Format:
* `projects/{project_number}/locations/{location_id}/catalogs/{catalog_id}`
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The parent to set.
* @return This builder for chaining.
*/
public Builder setParent(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
parent_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. Full resource name of parent catalog. Format:
* `projects/{project_number}/locations/{location_id}/catalogs/{catalog_id}`
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return This builder for chaining.
*/
public Builder clearParent() {
parent_ = getDefaultInstance().getParent();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. Full resource name of parent catalog. Format:
* `projects/{project_number}/locations/{location_id}/catalogs/{catalog_id}`
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The bytes for parent to set.
* @return This builder for chaining.
*/
public Builder setParentBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
parent_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
private com.google.cloud.retail.v2.Control control_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.retail.v2.Control,
com.google.cloud.retail.v2.Control.Builder,
com.google.cloud.retail.v2.ControlOrBuilder>
controlBuilder_;
/**
*
*
* <pre>
* Required. The Control to create.
* </pre>
*
* <code>.google.cloud.retail.v2.Control control = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the control field is set.
*/
public boolean hasControl() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
*
*
* <pre>
* Required. The Control to create.
* </pre>
*
* <code>.google.cloud.retail.v2.Control control = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The control.
*/
public com.google.cloud.retail.v2.Control getControl() {
if (controlBuilder_ == null) {
return control_ == null
? com.google.cloud.retail.v2.Control.getDefaultInstance()
: control_;
} else {
return controlBuilder_.getMessage();
}
}
/**
*
*
* <pre>
* Required. The Control to create.
* </pre>
*
* <code>.google.cloud.retail.v2.Control control = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setControl(com.google.cloud.retail.v2.Control value) {
if (controlBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
control_ = value;
} else {
controlBuilder_.setMessage(value);
}
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The Control to create.
* </pre>
*
* <code>.google.cloud.retail.v2.Control control = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setControl(com.google.cloud.retail.v2.Control.Builder builderForValue) {
if (controlBuilder_ == null) {
control_ = builderForValue.build();
} else {
controlBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The Control to create.
* </pre>
*
* <code>.google.cloud.retail.v2.Control control = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder mergeControl(com.google.cloud.retail.v2.Control value) {
if (controlBuilder_ == null) {
if (((bitField0_ & 0x00000002) != 0)
&& control_ != null
&& control_ != com.google.cloud.retail.v2.Control.getDefaultInstance()) {
getControlBuilder().mergeFrom(value);
} else {
control_ = value;
}
} else {
controlBuilder_.mergeFrom(value);
}
if (control_ != null) {
bitField0_ |= 0x00000002;
onChanged();
}
return this;
}
/**
*
*
* <pre>
* Required. The Control to create.
* </pre>
*
* <code>.google.cloud.retail.v2.Control control = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder clearControl() {
bitField0_ = (bitField0_ & ~0x00000002);
control_ = null;
if (controlBuilder_ != null) {
controlBuilder_.dispose();
controlBuilder_ = null;
}
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The Control to create.
* </pre>
*
* <code>.google.cloud.retail.v2.Control control = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.cloud.retail.v2.Control.Builder getControlBuilder() {
bitField0_ |= 0x00000002;
onChanged();
return getControlFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* Required. The Control to create.
* </pre>
*
* <code>.google.cloud.retail.v2.Control control = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.cloud.retail.v2.ControlOrBuilder getControlOrBuilder() {
if (controlBuilder_ != null) {
return controlBuilder_.getMessageOrBuilder();
} else {
return control_ == null
? com.google.cloud.retail.v2.Control.getDefaultInstance()
: control_;
}
}
/**
*
*
* <pre>
* Required. The Control to create.
* </pre>
*
* <code>.google.cloud.retail.v2.Control control = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.retail.v2.Control,
com.google.cloud.retail.v2.Control.Builder,
com.google.cloud.retail.v2.ControlOrBuilder>
getControlFieldBuilder() {
if (controlBuilder_ == null) {
controlBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.retail.v2.Control,
com.google.cloud.retail.v2.Control.Builder,
com.google.cloud.retail.v2.ControlOrBuilder>(
getControl(), getParentForChildren(), isClean());
control_ = null;
}
return controlBuilder_;
}
private java.lang.Object controlId_ = "";
/**
*
*
* <pre>
* Required. The ID to use for the Control, which will become the final
* component of the Control's resource name.
*
* This value should be 4-63 characters, and valid characters
* are /[a-z][0-9]-_/.
* </pre>
*
* <code>string control_id = 3 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The controlId.
*/
public java.lang.String getControlId() {
java.lang.Object ref = controlId_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
controlId_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Required. The ID to use for the Control, which will become the final
* component of the Control's resource name.
*
* This value should be 4-63 characters, and valid characters
* are /[a-z][0-9]-_/.
* </pre>
*
* <code>string control_id = 3 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The bytes for controlId.
*/
public com.google.protobuf.ByteString getControlIdBytes() {
java.lang.Object ref = controlId_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
controlId_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Required. The ID to use for the Control, which will become the final
* component of the Control's resource name.
*
* This value should be 4-63 characters, and valid characters
* are /[a-z][0-9]-_/.
* </pre>
*
* <code>string control_id = 3 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @param value The controlId to set.
* @return This builder for chaining.
*/
public Builder setControlId(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
controlId_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The ID to use for the Control, which will become the final
* component of the Control's resource name.
*
* This value should be 4-63 characters, and valid characters
* are /[a-z][0-9]-_/.
* </pre>
*
* <code>string control_id = 3 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return This builder for chaining.
*/
public Builder clearControlId() {
controlId_ = getDefaultInstance().getControlId();
bitField0_ = (bitField0_ & ~0x00000004);
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The ID to use for the Control, which will become the final
* component of the Control's resource name.
*
* This value should be 4-63 characters, and valid characters
* are /[a-z][0-9]-_/.
* </pre>
*
* <code>string control_id = 3 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @param value The bytes for controlId to set.
* @return This builder for chaining.
*/
public Builder setControlIdBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
controlId_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.retail.v2.CreateControlRequest)
}
// @@protoc_insertion_point(class_scope:google.cloud.retail.v2.CreateControlRequest)
private static final com.google.cloud.retail.v2.CreateControlRequest DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.retail.v2.CreateControlRequest();
}
public static com.google.cloud.retail.v2.CreateControlRequest getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<CreateControlRequest> PARSER =
new com.google.protobuf.AbstractParser<CreateControlRequest>() {
@java.lang.Override
public CreateControlRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<CreateControlRequest> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<CreateControlRequest> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.retail.v2.CreateControlRequest getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleapis/google-cloud-java | 36,214 | java-bare-metal-solution/proto-google-cloud-bare-metal-solution-v2/src/main/java/com/google/cloud/baremetalsolution/v2/ListSSHKeysResponse.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/baremetalsolution/v2/ssh_key.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.baremetalsolution.v2;
/**
*
*
* <pre>
* Message for response of ListSSHKeys.
* </pre>
*
* Protobuf type {@code google.cloud.baremetalsolution.v2.ListSSHKeysResponse}
*/
public final class ListSSHKeysResponse extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.baremetalsolution.v2.ListSSHKeysResponse)
ListSSHKeysResponseOrBuilder {
private static final long serialVersionUID = 0L;
// Use ListSSHKeysResponse.newBuilder() to construct.
private ListSSHKeysResponse(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private ListSSHKeysResponse() {
sshKeys_ = java.util.Collections.emptyList();
nextPageToken_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new ListSSHKeysResponse();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.baremetalsolution.v2.SshKeyProto
.internal_static_google_cloud_baremetalsolution_v2_ListSSHKeysResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.baremetalsolution.v2.SshKeyProto
.internal_static_google_cloud_baremetalsolution_v2_ListSSHKeysResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.baremetalsolution.v2.ListSSHKeysResponse.class,
com.google.cloud.baremetalsolution.v2.ListSSHKeysResponse.Builder.class);
}
public static final int SSH_KEYS_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private java.util.List<com.google.cloud.baremetalsolution.v2.SSHKey> sshKeys_;
/**
*
*
* <pre>
* The SSH keys registered in the project.
* </pre>
*
* <code>repeated .google.cloud.baremetalsolution.v2.SSHKey ssh_keys = 1;</code>
*/
@java.lang.Override
public java.util.List<com.google.cloud.baremetalsolution.v2.SSHKey> getSshKeysList() {
return sshKeys_;
}
/**
*
*
* <pre>
* The SSH keys registered in the project.
* </pre>
*
* <code>repeated .google.cloud.baremetalsolution.v2.SSHKey ssh_keys = 1;</code>
*/
@java.lang.Override
public java.util.List<? extends com.google.cloud.baremetalsolution.v2.SSHKeyOrBuilder>
getSshKeysOrBuilderList() {
return sshKeys_;
}
/**
*
*
* <pre>
* The SSH keys registered in the project.
* </pre>
*
* <code>repeated .google.cloud.baremetalsolution.v2.SSHKey ssh_keys = 1;</code>
*/
@java.lang.Override
public int getSshKeysCount() {
return sshKeys_.size();
}
/**
*
*
* <pre>
* The SSH keys registered in the project.
* </pre>
*
* <code>repeated .google.cloud.baremetalsolution.v2.SSHKey ssh_keys = 1;</code>
*/
@java.lang.Override
public com.google.cloud.baremetalsolution.v2.SSHKey getSshKeys(int index) {
return sshKeys_.get(index);
}
/**
*
*
* <pre>
* The SSH keys registered in the project.
* </pre>
*
* <code>repeated .google.cloud.baremetalsolution.v2.SSHKey ssh_keys = 1;</code>
*/
@java.lang.Override
public com.google.cloud.baremetalsolution.v2.SSHKeyOrBuilder getSshKeysOrBuilder(int index) {
return sshKeys_.get(index);
}
public static final int NEXT_PAGE_TOKEN_FIELD_NUMBER = 90;
@SuppressWarnings("serial")
private volatile java.lang.Object nextPageToken_ = "";
/**
*
*
* <pre>
* Token to retrieve the next page of results, or empty if there are no more
* results in the list.
* </pre>
*
* <code>string next_page_token = 90;</code>
*
* @return The nextPageToken.
*/
@java.lang.Override
public java.lang.String getNextPageToken() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
nextPageToken_ = s;
return s;
}
}
/**
*
*
* <pre>
* Token to retrieve the next page of results, or empty if there are no more
* results in the list.
* </pre>
*
* <code>string next_page_token = 90;</code>
*
* @return The bytes for nextPageToken.
*/
@java.lang.Override
public com.google.protobuf.ByteString getNextPageTokenBytes() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
nextPageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
for (int i = 0; i < sshKeys_.size(); i++) {
output.writeMessage(1, sshKeys_.get(i));
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 90, nextPageToken_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
for (int i = 0; i < sshKeys_.size(); i++) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, sshKeys_.get(i));
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(90, nextPageToken_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.baremetalsolution.v2.ListSSHKeysResponse)) {
return super.equals(obj);
}
com.google.cloud.baremetalsolution.v2.ListSSHKeysResponse other =
(com.google.cloud.baremetalsolution.v2.ListSSHKeysResponse) obj;
if (!getSshKeysList().equals(other.getSshKeysList())) return false;
if (!getNextPageToken().equals(other.getNextPageToken())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (getSshKeysCount() > 0) {
hash = (37 * hash) + SSH_KEYS_FIELD_NUMBER;
hash = (53 * hash) + getSshKeysList().hashCode();
}
hash = (37 * hash) + NEXT_PAGE_TOKEN_FIELD_NUMBER;
hash = (53 * hash) + getNextPageToken().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.baremetalsolution.v2.ListSSHKeysResponse parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.baremetalsolution.v2.ListSSHKeysResponse parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.baremetalsolution.v2.ListSSHKeysResponse parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.baremetalsolution.v2.ListSSHKeysResponse parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.baremetalsolution.v2.ListSSHKeysResponse parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.baremetalsolution.v2.ListSSHKeysResponse parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.baremetalsolution.v2.ListSSHKeysResponse parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.baremetalsolution.v2.ListSSHKeysResponse parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.baremetalsolution.v2.ListSSHKeysResponse parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.baremetalsolution.v2.ListSSHKeysResponse parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.baremetalsolution.v2.ListSSHKeysResponse parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.baremetalsolution.v2.ListSSHKeysResponse parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.baremetalsolution.v2.ListSSHKeysResponse prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Message for response of ListSSHKeys.
* </pre>
*
* Protobuf type {@code google.cloud.baremetalsolution.v2.ListSSHKeysResponse}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.baremetalsolution.v2.ListSSHKeysResponse)
com.google.cloud.baremetalsolution.v2.ListSSHKeysResponseOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.baremetalsolution.v2.SshKeyProto
.internal_static_google_cloud_baremetalsolution_v2_ListSSHKeysResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.baremetalsolution.v2.SshKeyProto
.internal_static_google_cloud_baremetalsolution_v2_ListSSHKeysResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.baremetalsolution.v2.ListSSHKeysResponse.class,
com.google.cloud.baremetalsolution.v2.ListSSHKeysResponse.Builder.class);
}
// Construct using com.google.cloud.baremetalsolution.v2.ListSSHKeysResponse.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
if (sshKeysBuilder_ == null) {
sshKeys_ = java.util.Collections.emptyList();
} else {
sshKeys_ = null;
sshKeysBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000001);
nextPageToken_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.baremetalsolution.v2.SshKeyProto
.internal_static_google_cloud_baremetalsolution_v2_ListSSHKeysResponse_descriptor;
}
@java.lang.Override
public com.google.cloud.baremetalsolution.v2.ListSSHKeysResponse getDefaultInstanceForType() {
return com.google.cloud.baremetalsolution.v2.ListSSHKeysResponse.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.baremetalsolution.v2.ListSSHKeysResponse build() {
com.google.cloud.baremetalsolution.v2.ListSSHKeysResponse result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.baremetalsolution.v2.ListSSHKeysResponse buildPartial() {
com.google.cloud.baremetalsolution.v2.ListSSHKeysResponse result =
new com.google.cloud.baremetalsolution.v2.ListSSHKeysResponse(this);
buildPartialRepeatedFields(result);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartialRepeatedFields(
com.google.cloud.baremetalsolution.v2.ListSSHKeysResponse result) {
if (sshKeysBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0)) {
sshKeys_ = java.util.Collections.unmodifiableList(sshKeys_);
bitField0_ = (bitField0_ & ~0x00000001);
}
result.sshKeys_ = sshKeys_;
} else {
result.sshKeys_ = sshKeysBuilder_.build();
}
}
private void buildPartial0(com.google.cloud.baremetalsolution.v2.ListSSHKeysResponse result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000002) != 0)) {
result.nextPageToken_ = nextPageToken_;
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.baremetalsolution.v2.ListSSHKeysResponse) {
return mergeFrom((com.google.cloud.baremetalsolution.v2.ListSSHKeysResponse) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.baremetalsolution.v2.ListSSHKeysResponse other) {
if (other == com.google.cloud.baremetalsolution.v2.ListSSHKeysResponse.getDefaultInstance())
return this;
if (sshKeysBuilder_ == null) {
if (!other.sshKeys_.isEmpty()) {
if (sshKeys_.isEmpty()) {
sshKeys_ = other.sshKeys_;
bitField0_ = (bitField0_ & ~0x00000001);
} else {
ensureSshKeysIsMutable();
sshKeys_.addAll(other.sshKeys_);
}
onChanged();
}
} else {
if (!other.sshKeys_.isEmpty()) {
if (sshKeysBuilder_.isEmpty()) {
sshKeysBuilder_.dispose();
sshKeysBuilder_ = null;
sshKeys_ = other.sshKeys_;
bitField0_ = (bitField0_ & ~0x00000001);
sshKeysBuilder_ =
com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders
? getSshKeysFieldBuilder()
: null;
} else {
sshKeysBuilder_.addAllMessages(other.sshKeys_);
}
}
}
if (!other.getNextPageToken().isEmpty()) {
nextPageToken_ = other.nextPageToken_;
bitField0_ |= 0x00000002;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
com.google.cloud.baremetalsolution.v2.SSHKey m =
input.readMessage(
com.google.cloud.baremetalsolution.v2.SSHKey.parser(), extensionRegistry);
if (sshKeysBuilder_ == null) {
ensureSshKeysIsMutable();
sshKeys_.add(m);
} else {
sshKeysBuilder_.addMessage(m);
}
break;
} // case 10
case 722:
{
nextPageToken_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000002;
break;
} // case 722
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.util.List<com.google.cloud.baremetalsolution.v2.SSHKey> sshKeys_ =
java.util.Collections.emptyList();
private void ensureSshKeysIsMutable() {
if (!((bitField0_ & 0x00000001) != 0)) {
sshKeys_ = new java.util.ArrayList<com.google.cloud.baremetalsolution.v2.SSHKey>(sshKeys_);
bitField0_ |= 0x00000001;
}
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.baremetalsolution.v2.SSHKey,
com.google.cloud.baremetalsolution.v2.SSHKey.Builder,
com.google.cloud.baremetalsolution.v2.SSHKeyOrBuilder>
sshKeysBuilder_;
/**
*
*
* <pre>
* The SSH keys registered in the project.
* </pre>
*
* <code>repeated .google.cloud.baremetalsolution.v2.SSHKey ssh_keys = 1;</code>
*/
public java.util.List<com.google.cloud.baremetalsolution.v2.SSHKey> getSshKeysList() {
if (sshKeysBuilder_ == null) {
return java.util.Collections.unmodifiableList(sshKeys_);
} else {
return sshKeysBuilder_.getMessageList();
}
}
/**
*
*
* <pre>
* The SSH keys registered in the project.
* </pre>
*
* <code>repeated .google.cloud.baremetalsolution.v2.SSHKey ssh_keys = 1;</code>
*/
public int getSshKeysCount() {
if (sshKeysBuilder_ == null) {
return sshKeys_.size();
} else {
return sshKeysBuilder_.getCount();
}
}
/**
*
*
* <pre>
* The SSH keys registered in the project.
* </pre>
*
* <code>repeated .google.cloud.baremetalsolution.v2.SSHKey ssh_keys = 1;</code>
*/
public com.google.cloud.baremetalsolution.v2.SSHKey getSshKeys(int index) {
if (sshKeysBuilder_ == null) {
return sshKeys_.get(index);
} else {
return sshKeysBuilder_.getMessage(index);
}
}
/**
*
*
* <pre>
* The SSH keys registered in the project.
* </pre>
*
* <code>repeated .google.cloud.baremetalsolution.v2.SSHKey ssh_keys = 1;</code>
*/
public Builder setSshKeys(int index, com.google.cloud.baremetalsolution.v2.SSHKey value) {
if (sshKeysBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureSshKeysIsMutable();
sshKeys_.set(index, value);
onChanged();
} else {
sshKeysBuilder_.setMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* The SSH keys registered in the project.
* </pre>
*
* <code>repeated .google.cloud.baremetalsolution.v2.SSHKey ssh_keys = 1;</code>
*/
public Builder setSshKeys(
int index, com.google.cloud.baremetalsolution.v2.SSHKey.Builder builderForValue) {
if (sshKeysBuilder_ == null) {
ensureSshKeysIsMutable();
sshKeys_.set(index, builderForValue.build());
onChanged();
} else {
sshKeysBuilder_.setMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* The SSH keys registered in the project.
* </pre>
*
* <code>repeated .google.cloud.baremetalsolution.v2.SSHKey ssh_keys = 1;</code>
*/
public Builder addSshKeys(com.google.cloud.baremetalsolution.v2.SSHKey value) {
if (sshKeysBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureSshKeysIsMutable();
sshKeys_.add(value);
onChanged();
} else {
sshKeysBuilder_.addMessage(value);
}
return this;
}
/**
*
*
* <pre>
* The SSH keys registered in the project.
* </pre>
*
* <code>repeated .google.cloud.baremetalsolution.v2.SSHKey ssh_keys = 1;</code>
*/
public Builder addSshKeys(int index, com.google.cloud.baremetalsolution.v2.SSHKey value) {
if (sshKeysBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureSshKeysIsMutable();
sshKeys_.add(index, value);
onChanged();
} else {
sshKeysBuilder_.addMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* The SSH keys registered in the project.
* </pre>
*
* <code>repeated .google.cloud.baremetalsolution.v2.SSHKey ssh_keys = 1;</code>
*/
public Builder addSshKeys(
com.google.cloud.baremetalsolution.v2.SSHKey.Builder builderForValue) {
if (sshKeysBuilder_ == null) {
ensureSshKeysIsMutable();
sshKeys_.add(builderForValue.build());
onChanged();
} else {
sshKeysBuilder_.addMessage(builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* The SSH keys registered in the project.
* </pre>
*
* <code>repeated .google.cloud.baremetalsolution.v2.SSHKey ssh_keys = 1;</code>
*/
public Builder addSshKeys(
int index, com.google.cloud.baremetalsolution.v2.SSHKey.Builder builderForValue) {
if (sshKeysBuilder_ == null) {
ensureSshKeysIsMutable();
sshKeys_.add(index, builderForValue.build());
onChanged();
} else {
sshKeysBuilder_.addMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* The SSH keys registered in the project.
* </pre>
*
* <code>repeated .google.cloud.baremetalsolution.v2.SSHKey ssh_keys = 1;</code>
*/
public Builder addAllSshKeys(
java.lang.Iterable<? extends com.google.cloud.baremetalsolution.v2.SSHKey> values) {
if (sshKeysBuilder_ == null) {
ensureSshKeysIsMutable();
com.google.protobuf.AbstractMessageLite.Builder.addAll(values, sshKeys_);
onChanged();
} else {
sshKeysBuilder_.addAllMessages(values);
}
return this;
}
/**
*
*
* <pre>
* The SSH keys registered in the project.
* </pre>
*
* <code>repeated .google.cloud.baremetalsolution.v2.SSHKey ssh_keys = 1;</code>
*/
public Builder clearSshKeys() {
if (sshKeysBuilder_ == null) {
sshKeys_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
} else {
sshKeysBuilder_.clear();
}
return this;
}
/**
*
*
* <pre>
* The SSH keys registered in the project.
* </pre>
*
* <code>repeated .google.cloud.baremetalsolution.v2.SSHKey ssh_keys = 1;</code>
*/
public Builder removeSshKeys(int index) {
if (sshKeysBuilder_ == null) {
ensureSshKeysIsMutable();
sshKeys_.remove(index);
onChanged();
} else {
sshKeysBuilder_.remove(index);
}
return this;
}
/**
*
*
* <pre>
* The SSH keys registered in the project.
* </pre>
*
* <code>repeated .google.cloud.baremetalsolution.v2.SSHKey ssh_keys = 1;</code>
*/
public com.google.cloud.baremetalsolution.v2.SSHKey.Builder getSshKeysBuilder(int index) {
return getSshKeysFieldBuilder().getBuilder(index);
}
/**
*
*
* <pre>
* The SSH keys registered in the project.
* </pre>
*
* <code>repeated .google.cloud.baremetalsolution.v2.SSHKey ssh_keys = 1;</code>
*/
public com.google.cloud.baremetalsolution.v2.SSHKeyOrBuilder getSshKeysOrBuilder(int index) {
if (sshKeysBuilder_ == null) {
return sshKeys_.get(index);
} else {
return sshKeysBuilder_.getMessageOrBuilder(index);
}
}
/**
*
*
* <pre>
* The SSH keys registered in the project.
* </pre>
*
* <code>repeated .google.cloud.baremetalsolution.v2.SSHKey ssh_keys = 1;</code>
*/
public java.util.List<? extends com.google.cloud.baremetalsolution.v2.SSHKeyOrBuilder>
getSshKeysOrBuilderList() {
if (sshKeysBuilder_ != null) {
return sshKeysBuilder_.getMessageOrBuilderList();
} else {
return java.util.Collections.unmodifiableList(sshKeys_);
}
}
/**
*
*
* <pre>
* The SSH keys registered in the project.
* </pre>
*
* <code>repeated .google.cloud.baremetalsolution.v2.SSHKey ssh_keys = 1;</code>
*/
public com.google.cloud.baremetalsolution.v2.SSHKey.Builder addSshKeysBuilder() {
return getSshKeysFieldBuilder()
.addBuilder(com.google.cloud.baremetalsolution.v2.SSHKey.getDefaultInstance());
}
/**
*
*
* <pre>
* The SSH keys registered in the project.
* </pre>
*
* <code>repeated .google.cloud.baremetalsolution.v2.SSHKey ssh_keys = 1;</code>
*/
public com.google.cloud.baremetalsolution.v2.SSHKey.Builder addSshKeysBuilder(int index) {
return getSshKeysFieldBuilder()
.addBuilder(index, com.google.cloud.baremetalsolution.v2.SSHKey.getDefaultInstance());
}
/**
*
*
* <pre>
* The SSH keys registered in the project.
* </pre>
*
* <code>repeated .google.cloud.baremetalsolution.v2.SSHKey ssh_keys = 1;</code>
*/
public java.util.List<com.google.cloud.baremetalsolution.v2.SSHKey.Builder>
getSshKeysBuilderList() {
return getSshKeysFieldBuilder().getBuilderList();
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.baremetalsolution.v2.SSHKey,
com.google.cloud.baremetalsolution.v2.SSHKey.Builder,
com.google.cloud.baremetalsolution.v2.SSHKeyOrBuilder>
getSshKeysFieldBuilder() {
if (sshKeysBuilder_ == null) {
sshKeysBuilder_ =
new com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.baremetalsolution.v2.SSHKey,
com.google.cloud.baremetalsolution.v2.SSHKey.Builder,
com.google.cloud.baremetalsolution.v2.SSHKeyOrBuilder>(
sshKeys_, ((bitField0_ & 0x00000001) != 0), getParentForChildren(), isClean());
sshKeys_ = null;
}
return sshKeysBuilder_;
}
private java.lang.Object nextPageToken_ = "";
/**
*
*
* <pre>
* Token to retrieve the next page of results, or empty if there are no more
* results in the list.
* </pre>
*
* <code>string next_page_token = 90;</code>
*
* @return The nextPageToken.
*/
public java.lang.String getNextPageToken() {
java.lang.Object ref = nextPageToken_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
nextPageToken_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Token to retrieve the next page of results, or empty if there are no more
* results in the list.
* </pre>
*
* <code>string next_page_token = 90;</code>
*
* @return The bytes for nextPageToken.
*/
public com.google.protobuf.ByteString getNextPageTokenBytes() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
nextPageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Token to retrieve the next page of results, or empty if there are no more
* results in the list.
* </pre>
*
* <code>string next_page_token = 90;</code>
*
* @param value The nextPageToken to set.
* @return This builder for chaining.
*/
public Builder setNextPageToken(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
nextPageToken_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Token to retrieve the next page of results, or empty if there are no more
* results in the list.
* </pre>
*
* <code>string next_page_token = 90;</code>
*
* @return This builder for chaining.
*/
public Builder clearNextPageToken() {
nextPageToken_ = getDefaultInstance().getNextPageToken();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
return this;
}
/**
*
*
* <pre>
* Token to retrieve the next page of results, or empty if there are no more
* results in the list.
* </pre>
*
* <code>string next_page_token = 90;</code>
*
* @param value The bytes for nextPageToken to set.
* @return This builder for chaining.
*/
public Builder setNextPageTokenBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
nextPageToken_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.baremetalsolution.v2.ListSSHKeysResponse)
}
// @@protoc_insertion_point(class_scope:google.cloud.baremetalsolution.v2.ListSSHKeysResponse)
private static final com.google.cloud.baremetalsolution.v2.ListSSHKeysResponse DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.baremetalsolution.v2.ListSSHKeysResponse();
}
public static com.google.cloud.baremetalsolution.v2.ListSSHKeysResponse getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<ListSSHKeysResponse> PARSER =
new com.google.protobuf.AbstractParser<ListSSHKeysResponse>() {
@java.lang.Override
public ListSSHKeysResponse parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<ListSSHKeysResponse> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<ListSSHKeysResponse> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.baremetalsolution.v2.ListSSHKeysResponse getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleapis/google-cloud-java | 36,285 | java-dialogflow-cx/proto-google-cloud-dialogflow-cx-v3beta1/src/main/java/com/google/cloud/dialogflow/cx/v3beta1/UpdateTestCaseRequest.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/dialogflow/cx/v3beta1/test_case.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.dialogflow.cx.v3beta1;
/**
*
*
* <pre>
* The request message for
* [TestCases.UpdateTestCase][google.cloud.dialogflow.cx.v3beta1.TestCases.UpdateTestCase].
* </pre>
*
* Protobuf type {@code google.cloud.dialogflow.cx.v3beta1.UpdateTestCaseRequest}
*/
public final class UpdateTestCaseRequest extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.dialogflow.cx.v3beta1.UpdateTestCaseRequest)
UpdateTestCaseRequestOrBuilder {
private static final long serialVersionUID = 0L;
// Use UpdateTestCaseRequest.newBuilder() to construct.
private UpdateTestCaseRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private UpdateTestCaseRequest() {}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new UpdateTestCaseRequest();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.dialogflow.cx.v3beta1.TestCaseProto
.internal_static_google_cloud_dialogflow_cx_v3beta1_UpdateTestCaseRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.dialogflow.cx.v3beta1.TestCaseProto
.internal_static_google_cloud_dialogflow_cx_v3beta1_UpdateTestCaseRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.dialogflow.cx.v3beta1.UpdateTestCaseRequest.class,
com.google.cloud.dialogflow.cx.v3beta1.UpdateTestCaseRequest.Builder.class);
}
private int bitField0_;
public static final int TEST_CASE_FIELD_NUMBER = 1;
private com.google.cloud.dialogflow.cx.v3beta1.TestCase testCase_;
/**
*
*
* <pre>
* Required. The test case to update.
* </pre>
*
* <code>
* .google.cloud.dialogflow.cx.v3beta1.TestCase test_case = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the testCase field is set.
*/
@java.lang.Override
public boolean hasTestCase() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* Required. The test case to update.
* </pre>
*
* <code>
* .google.cloud.dialogflow.cx.v3beta1.TestCase test_case = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The testCase.
*/
@java.lang.Override
public com.google.cloud.dialogflow.cx.v3beta1.TestCase getTestCase() {
return testCase_ == null
? com.google.cloud.dialogflow.cx.v3beta1.TestCase.getDefaultInstance()
: testCase_;
}
/**
*
*
* <pre>
* Required. The test case to update.
* </pre>
*
* <code>
* .google.cloud.dialogflow.cx.v3beta1.TestCase test_case = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
@java.lang.Override
public com.google.cloud.dialogflow.cx.v3beta1.TestCaseOrBuilder getTestCaseOrBuilder() {
return testCase_ == null
? com.google.cloud.dialogflow.cx.v3beta1.TestCase.getDefaultInstance()
: testCase_;
}
public static final int UPDATE_MASK_FIELD_NUMBER = 2;
private com.google.protobuf.FieldMask updateMask_;
/**
*
*
* <pre>
* Required. The mask to specify which fields should be updated. The
* [`creationTime`][google.cloud.dialogflow.cx.v3beta1.TestCase.creation_time]
* and
* [`lastTestResult`][google.cloud.dialogflow.cx.v3beta1.TestCase.last_test_result]
* cannot be updated.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the updateMask field is set.
*/
@java.lang.Override
public boolean hasUpdateMask() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
*
*
* <pre>
* Required. The mask to specify which fields should be updated. The
* [`creationTime`][google.cloud.dialogflow.cx.v3beta1.TestCase.creation_time]
* and
* [`lastTestResult`][google.cloud.dialogflow.cx.v3beta1.TestCase.last_test_result]
* cannot be updated.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The updateMask.
*/
@java.lang.Override
public com.google.protobuf.FieldMask getUpdateMask() {
return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_;
}
/**
*
*
* <pre>
* Required. The mask to specify which fields should be updated. The
* [`creationTime`][google.cloud.dialogflow.cx.v3beta1.TestCase.creation_time]
* and
* [`lastTestResult`][google.cloud.dialogflow.cx.v3beta1.TestCase.last_test_result]
* cannot be updated.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
@java.lang.Override
public com.google.protobuf.FieldMaskOrBuilder getUpdateMaskOrBuilder() {
return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_;
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (((bitField0_ & 0x00000001) != 0)) {
output.writeMessage(1, getTestCase());
}
if (((bitField0_ & 0x00000002) != 0)) {
output.writeMessage(2, getUpdateMask());
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) != 0)) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, getTestCase());
}
if (((bitField0_ & 0x00000002) != 0)) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, getUpdateMask());
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.dialogflow.cx.v3beta1.UpdateTestCaseRequest)) {
return super.equals(obj);
}
com.google.cloud.dialogflow.cx.v3beta1.UpdateTestCaseRequest other =
(com.google.cloud.dialogflow.cx.v3beta1.UpdateTestCaseRequest) obj;
if (hasTestCase() != other.hasTestCase()) return false;
if (hasTestCase()) {
if (!getTestCase().equals(other.getTestCase())) return false;
}
if (hasUpdateMask() != other.hasUpdateMask()) return false;
if (hasUpdateMask()) {
if (!getUpdateMask().equals(other.getUpdateMask())) return false;
}
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (hasTestCase()) {
hash = (37 * hash) + TEST_CASE_FIELD_NUMBER;
hash = (53 * hash) + getTestCase().hashCode();
}
if (hasUpdateMask()) {
hash = (37 * hash) + UPDATE_MASK_FIELD_NUMBER;
hash = (53 * hash) + getUpdateMask().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.dialogflow.cx.v3beta1.UpdateTestCaseRequest parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.dialogflow.cx.v3beta1.UpdateTestCaseRequest parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.dialogflow.cx.v3beta1.UpdateTestCaseRequest parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.dialogflow.cx.v3beta1.UpdateTestCaseRequest parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.dialogflow.cx.v3beta1.UpdateTestCaseRequest parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.dialogflow.cx.v3beta1.UpdateTestCaseRequest parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.dialogflow.cx.v3beta1.UpdateTestCaseRequest parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.dialogflow.cx.v3beta1.UpdateTestCaseRequest parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.dialogflow.cx.v3beta1.UpdateTestCaseRequest parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.dialogflow.cx.v3beta1.UpdateTestCaseRequest parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.dialogflow.cx.v3beta1.UpdateTestCaseRequest parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.dialogflow.cx.v3beta1.UpdateTestCaseRequest parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.dialogflow.cx.v3beta1.UpdateTestCaseRequest prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* The request message for
* [TestCases.UpdateTestCase][google.cloud.dialogflow.cx.v3beta1.TestCases.UpdateTestCase].
* </pre>
*
* Protobuf type {@code google.cloud.dialogflow.cx.v3beta1.UpdateTestCaseRequest}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.dialogflow.cx.v3beta1.UpdateTestCaseRequest)
com.google.cloud.dialogflow.cx.v3beta1.UpdateTestCaseRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.dialogflow.cx.v3beta1.TestCaseProto
.internal_static_google_cloud_dialogflow_cx_v3beta1_UpdateTestCaseRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.dialogflow.cx.v3beta1.TestCaseProto
.internal_static_google_cloud_dialogflow_cx_v3beta1_UpdateTestCaseRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.dialogflow.cx.v3beta1.UpdateTestCaseRequest.class,
com.google.cloud.dialogflow.cx.v3beta1.UpdateTestCaseRequest.Builder.class);
}
// Construct using com.google.cloud.dialogflow.cx.v3beta1.UpdateTestCaseRequest.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {
getTestCaseFieldBuilder();
getUpdateMaskFieldBuilder();
}
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
testCase_ = null;
if (testCaseBuilder_ != null) {
testCaseBuilder_.dispose();
testCaseBuilder_ = null;
}
updateMask_ = null;
if (updateMaskBuilder_ != null) {
updateMaskBuilder_.dispose();
updateMaskBuilder_ = null;
}
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.dialogflow.cx.v3beta1.TestCaseProto
.internal_static_google_cloud_dialogflow_cx_v3beta1_UpdateTestCaseRequest_descriptor;
}
@java.lang.Override
public com.google.cloud.dialogflow.cx.v3beta1.UpdateTestCaseRequest
getDefaultInstanceForType() {
return com.google.cloud.dialogflow.cx.v3beta1.UpdateTestCaseRequest.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.dialogflow.cx.v3beta1.UpdateTestCaseRequest build() {
com.google.cloud.dialogflow.cx.v3beta1.UpdateTestCaseRequest result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.dialogflow.cx.v3beta1.UpdateTestCaseRequest buildPartial() {
com.google.cloud.dialogflow.cx.v3beta1.UpdateTestCaseRequest result =
new com.google.cloud.dialogflow.cx.v3beta1.UpdateTestCaseRequest(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(
com.google.cloud.dialogflow.cx.v3beta1.UpdateTestCaseRequest result) {
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.testCase_ = testCaseBuilder_ == null ? testCase_ : testCaseBuilder_.build();
to_bitField0_ |= 0x00000001;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.updateMask_ = updateMaskBuilder_ == null ? updateMask_ : updateMaskBuilder_.build();
to_bitField0_ |= 0x00000002;
}
result.bitField0_ |= to_bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.dialogflow.cx.v3beta1.UpdateTestCaseRequest) {
return mergeFrom((com.google.cloud.dialogflow.cx.v3beta1.UpdateTestCaseRequest) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.dialogflow.cx.v3beta1.UpdateTestCaseRequest other) {
if (other
== com.google.cloud.dialogflow.cx.v3beta1.UpdateTestCaseRequest.getDefaultInstance())
return this;
if (other.hasTestCase()) {
mergeTestCase(other.getTestCase());
}
if (other.hasUpdateMask()) {
mergeUpdateMask(other.getUpdateMask());
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
input.readMessage(getTestCaseFieldBuilder().getBuilder(), extensionRegistry);
bitField0_ |= 0x00000001;
break;
} // case 10
case 18:
{
input.readMessage(getUpdateMaskFieldBuilder().getBuilder(), extensionRegistry);
bitField0_ |= 0x00000002;
break;
} // case 18
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private com.google.cloud.dialogflow.cx.v3beta1.TestCase testCase_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.dialogflow.cx.v3beta1.TestCase,
com.google.cloud.dialogflow.cx.v3beta1.TestCase.Builder,
com.google.cloud.dialogflow.cx.v3beta1.TestCaseOrBuilder>
testCaseBuilder_;
/**
*
*
* <pre>
* Required. The test case to update.
* </pre>
*
* <code>
* .google.cloud.dialogflow.cx.v3beta1.TestCase test_case = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the testCase field is set.
*/
public boolean hasTestCase() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* Required. The test case to update.
* </pre>
*
* <code>
* .google.cloud.dialogflow.cx.v3beta1.TestCase test_case = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The testCase.
*/
public com.google.cloud.dialogflow.cx.v3beta1.TestCase getTestCase() {
if (testCaseBuilder_ == null) {
return testCase_ == null
? com.google.cloud.dialogflow.cx.v3beta1.TestCase.getDefaultInstance()
: testCase_;
} else {
return testCaseBuilder_.getMessage();
}
}
/**
*
*
* <pre>
* Required. The test case to update.
* </pre>
*
* <code>
* .google.cloud.dialogflow.cx.v3beta1.TestCase test_case = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setTestCase(com.google.cloud.dialogflow.cx.v3beta1.TestCase value) {
if (testCaseBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
testCase_ = value;
} else {
testCaseBuilder_.setMessage(value);
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The test case to update.
* </pre>
*
* <code>
* .google.cloud.dialogflow.cx.v3beta1.TestCase test_case = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setTestCase(
com.google.cloud.dialogflow.cx.v3beta1.TestCase.Builder builderForValue) {
if (testCaseBuilder_ == null) {
testCase_ = builderForValue.build();
} else {
testCaseBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The test case to update.
* </pre>
*
* <code>
* .google.cloud.dialogflow.cx.v3beta1.TestCase test_case = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder mergeTestCase(com.google.cloud.dialogflow.cx.v3beta1.TestCase value) {
if (testCaseBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0)
&& testCase_ != null
&& testCase_ != com.google.cloud.dialogflow.cx.v3beta1.TestCase.getDefaultInstance()) {
getTestCaseBuilder().mergeFrom(value);
} else {
testCase_ = value;
}
} else {
testCaseBuilder_.mergeFrom(value);
}
if (testCase_ != null) {
bitField0_ |= 0x00000001;
onChanged();
}
return this;
}
/**
*
*
* <pre>
* Required. The test case to update.
* </pre>
*
* <code>
* .google.cloud.dialogflow.cx.v3beta1.TestCase test_case = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder clearTestCase() {
bitField0_ = (bitField0_ & ~0x00000001);
testCase_ = null;
if (testCaseBuilder_ != null) {
testCaseBuilder_.dispose();
testCaseBuilder_ = null;
}
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The test case to update.
* </pre>
*
* <code>
* .google.cloud.dialogflow.cx.v3beta1.TestCase test_case = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.cloud.dialogflow.cx.v3beta1.TestCase.Builder getTestCaseBuilder() {
bitField0_ |= 0x00000001;
onChanged();
return getTestCaseFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* Required. The test case to update.
* </pre>
*
* <code>
* .google.cloud.dialogflow.cx.v3beta1.TestCase test_case = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.cloud.dialogflow.cx.v3beta1.TestCaseOrBuilder getTestCaseOrBuilder() {
if (testCaseBuilder_ != null) {
return testCaseBuilder_.getMessageOrBuilder();
} else {
return testCase_ == null
? com.google.cloud.dialogflow.cx.v3beta1.TestCase.getDefaultInstance()
: testCase_;
}
}
/**
*
*
* <pre>
* Required. The test case to update.
* </pre>
*
* <code>
* .google.cloud.dialogflow.cx.v3beta1.TestCase test_case = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.dialogflow.cx.v3beta1.TestCase,
com.google.cloud.dialogflow.cx.v3beta1.TestCase.Builder,
com.google.cloud.dialogflow.cx.v3beta1.TestCaseOrBuilder>
getTestCaseFieldBuilder() {
if (testCaseBuilder_ == null) {
testCaseBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.dialogflow.cx.v3beta1.TestCase,
com.google.cloud.dialogflow.cx.v3beta1.TestCase.Builder,
com.google.cloud.dialogflow.cx.v3beta1.TestCaseOrBuilder>(
getTestCase(), getParentForChildren(), isClean());
testCase_ = null;
}
return testCaseBuilder_;
}
private com.google.protobuf.FieldMask updateMask_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.FieldMask,
com.google.protobuf.FieldMask.Builder,
com.google.protobuf.FieldMaskOrBuilder>
updateMaskBuilder_;
/**
*
*
* <pre>
* Required. The mask to specify which fields should be updated. The
* [`creationTime`][google.cloud.dialogflow.cx.v3beta1.TestCase.creation_time]
* and
* [`lastTestResult`][google.cloud.dialogflow.cx.v3beta1.TestCase.last_test_result]
* cannot be updated.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the updateMask field is set.
*/
public boolean hasUpdateMask() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
*
*
* <pre>
* Required. The mask to specify which fields should be updated. The
* [`creationTime`][google.cloud.dialogflow.cx.v3beta1.TestCase.creation_time]
* and
* [`lastTestResult`][google.cloud.dialogflow.cx.v3beta1.TestCase.last_test_result]
* cannot be updated.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The updateMask.
*/
public com.google.protobuf.FieldMask getUpdateMask() {
if (updateMaskBuilder_ == null) {
return updateMask_ == null
? com.google.protobuf.FieldMask.getDefaultInstance()
: updateMask_;
} else {
return updateMaskBuilder_.getMessage();
}
}
/**
*
*
* <pre>
* Required. The mask to specify which fields should be updated. The
* [`creationTime`][google.cloud.dialogflow.cx.v3beta1.TestCase.creation_time]
* and
* [`lastTestResult`][google.cloud.dialogflow.cx.v3beta1.TestCase.last_test_result]
* cannot be updated.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setUpdateMask(com.google.protobuf.FieldMask value) {
if (updateMaskBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
updateMask_ = value;
} else {
updateMaskBuilder_.setMessage(value);
}
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The mask to specify which fields should be updated. The
* [`creationTime`][google.cloud.dialogflow.cx.v3beta1.TestCase.creation_time]
* and
* [`lastTestResult`][google.cloud.dialogflow.cx.v3beta1.TestCase.last_test_result]
* cannot be updated.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setUpdateMask(com.google.protobuf.FieldMask.Builder builderForValue) {
if (updateMaskBuilder_ == null) {
updateMask_ = builderForValue.build();
} else {
updateMaskBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The mask to specify which fields should be updated. The
* [`creationTime`][google.cloud.dialogflow.cx.v3beta1.TestCase.creation_time]
* and
* [`lastTestResult`][google.cloud.dialogflow.cx.v3beta1.TestCase.last_test_result]
* cannot be updated.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder mergeUpdateMask(com.google.protobuf.FieldMask value) {
if (updateMaskBuilder_ == null) {
if (((bitField0_ & 0x00000002) != 0)
&& updateMask_ != null
&& updateMask_ != com.google.protobuf.FieldMask.getDefaultInstance()) {
getUpdateMaskBuilder().mergeFrom(value);
} else {
updateMask_ = value;
}
} else {
updateMaskBuilder_.mergeFrom(value);
}
if (updateMask_ != null) {
bitField0_ |= 0x00000002;
onChanged();
}
return this;
}
/**
*
*
* <pre>
* Required. The mask to specify which fields should be updated. The
* [`creationTime`][google.cloud.dialogflow.cx.v3beta1.TestCase.creation_time]
* and
* [`lastTestResult`][google.cloud.dialogflow.cx.v3beta1.TestCase.last_test_result]
* cannot be updated.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder clearUpdateMask() {
bitField0_ = (bitField0_ & ~0x00000002);
updateMask_ = null;
if (updateMaskBuilder_ != null) {
updateMaskBuilder_.dispose();
updateMaskBuilder_ = null;
}
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The mask to specify which fields should be updated. The
* [`creationTime`][google.cloud.dialogflow.cx.v3beta1.TestCase.creation_time]
* and
* [`lastTestResult`][google.cloud.dialogflow.cx.v3beta1.TestCase.last_test_result]
* cannot be updated.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.protobuf.FieldMask.Builder getUpdateMaskBuilder() {
bitField0_ |= 0x00000002;
onChanged();
return getUpdateMaskFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* Required. The mask to specify which fields should be updated. The
* [`creationTime`][google.cloud.dialogflow.cx.v3beta1.TestCase.creation_time]
* and
* [`lastTestResult`][google.cloud.dialogflow.cx.v3beta1.TestCase.last_test_result]
* cannot be updated.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.protobuf.FieldMaskOrBuilder getUpdateMaskOrBuilder() {
if (updateMaskBuilder_ != null) {
return updateMaskBuilder_.getMessageOrBuilder();
} else {
return updateMask_ == null
? com.google.protobuf.FieldMask.getDefaultInstance()
: updateMask_;
}
}
/**
*
*
* <pre>
* Required. The mask to specify which fields should be updated. The
* [`creationTime`][google.cloud.dialogflow.cx.v3beta1.TestCase.creation_time]
* and
* [`lastTestResult`][google.cloud.dialogflow.cx.v3beta1.TestCase.last_test_result]
* cannot be updated.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.FieldMask,
com.google.protobuf.FieldMask.Builder,
com.google.protobuf.FieldMaskOrBuilder>
getUpdateMaskFieldBuilder() {
if (updateMaskBuilder_ == null) {
updateMaskBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.FieldMask,
com.google.protobuf.FieldMask.Builder,
com.google.protobuf.FieldMaskOrBuilder>(
getUpdateMask(), getParentForChildren(), isClean());
updateMask_ = null;
}
return updateMaskBuilder_;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.dialogflow.cx.v3beta1.UpdateTestCaseRequest)
}
// @@protoc_insertion_point(class_scope:google.cloud.dialogflow.cx.v3beta1.UpdateTestCaseRequest)
private static final com.google.cloud.dialogflow.cx.v3beta1.UpdateTestCaseRequest
DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.dialogflow.cx.v3beta1.UpdateTestCaseRequest();
}
public static com.google.cloud.dialogflow.cx.v3beta1.UpdateTestCaseRequest getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<UpdateTestCaseRequest> PARSER =
new com.google.protobuf.AbstractParser<UpdateTestCaseRequest>() {
@java.lang.Override
public UpdateTestCaseRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<UpdateTestCaseRequest> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<UpdateTestCaseRequest> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.dialogflow.cx.v3beta1.UpdateTestCaseRequest getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleapis/google-cloud-java | 36,400 | java-tasks/proto-google-cloud-tasks-v2beta2/src/main/java/com/google/cloud/tasks/v2beta2/RateLimits.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/tasks/v2beta2/queue.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.tasks.v2beta2;
/**
*
*
* <pre>
* Rate limits.
*
* This message determines the maximum rate that tasks can be dispatched by a
* queue, regardless of whether the dispatch is a first task attempt or a retry.
*
* Note: The debugging command,
* [RunTask][google.cloud.tasks.v2beta2.CloudTasks.RunTask], will run a task
* even if the queue has reached its
* [RateLimits][google.cloud.tasks.v2beta2.RateLimits].
* </pre>
*
* Protobuf type {@code google.cloud.tasks.v2beta2.RateLimits}
*/
public final class RateLimits extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.tasks.v2beta2.RateLimits)
RateLimitsOrBuilder {
private static final long serialVersionUID = 0L;
// Use RateLimits.newBuilder() to construct.
private RateLimits(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private RateLimits() {}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new RateLimits();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.tasks.v2beta2.QueueProto
.internal_static_google_cloud_tasks_v2beta2_RateLimits_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.tasks.v2beta2.QueueProto
.internal_static_google_cloud_tasks_v2beta2_RateLimits_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.tasks.v2beta2.RateLimits.class,
com.google.cloud.tasks.v2beta2.RateLimits.Builder.class);
}
public static final int MAX_TASKS_DISPATCHED_PER_SECOND_FIELD_NUMBER = 1;
private double maxTasksDispatchedPerSecond_ = 0D;
/**
*
*
* <pre>
* The maximum rate at which tasks are dispatched from this queue.
*
* If unspecified when the queue is created, Cloud Tasks will pick the
* default.
*
* * For [App Engine queues][google.cloud.tasks.v2beta2.AppEngineHttpTarget],
* the maximum allowed value
* is 500.
* * This field is output only for [pull
* queues][google.cloud.tasks.v2beta2.PullTarget]. In addition to the
* `max_tasks_dispatched_per_second` limit, a maximum of 10 QPS of
* [LeaseTasks][google.cloud.tasks.v2beta2.CloudTasks.LeaseTasks] requests
* are allowed per pull queue.
*
*
* This field has the same meaning as
* [rate in
* queue.yaml/xml](https://cloud.google.com/appengine/docs/standard/python/config/queueref#rate).
* </pre>
*
* <code>double max_tasks_dispatched_per_second = 1;</code>
*
* @return The maxTasksDispatchedPerSecond.
*/
@java.lang.Override
public double getMaxTasksDispatchedPerSecond() {
return maxTasksDispatchedPerSecond_;
}
public static final int MAX_BURST_SIZE_FIELD_NUMBER = 2;
private int maxBurstSize_ = 0;
/**
*
*
* <pre>
* The max burst size.
*
* Max burst size limits how fast tasks in queue are processed when
* many tasks are in the queue and the rate is high. This field
* allows the queue to have a high rate so processing starts shortly
* after a task is enqueued, but still limits resource usage when
* many tasks are enqueued in a short period of time.
*
* The [token bucket](https://wikipedia.org/wiki/Token_Bucket)
* algorithm is used to control the rate of task dispatches. Each
* queue has a token bucket that holds tokens, up to the maximum
* specified by `max_burst_size`. Each time a task is dispatched, a
* token is removed from the bucket. Tasks will be dispatched until
* the queue's bucket runs out of tokens. The bucket will be
* continuously refilled with new tokens based on
* [max_dispatches_per_second][RateLimits.max_dispatches_per_second].
*
* The default value of `max_burst_size` is picked by Cloud Tasks
* based on the value of
* [max_dispatches_per_second][RateLimits.max_dispatches_per_second].
*
* The maximum value of `max_burst_size` is 500.
*
* For App Engine queues that were created or updated using
* `queue.yaml/xml`, `max_burst_size` is equal to
* [bucket_size](https://cloud.google.com/appengine/docs/standard/python/config/queueref#bucket_size).
* If
* [UpdateQueue][google.cloud.tasks.v2beta2.CloudTasks.UpdateQueue] is called
* on a queue without explicitly setting a value for `max_burst_size`,
* `max_burst_size` value will get updated if
* [UpdateQueue][google.cloud.tasks.v2beta2.CloudTasks.UpdateQueue] is
* updating [max_dispatches_per_second][RateLimits.max_dispatches_per_second].
* </pre>
*
* <code>int32 max_burst_size = 2;</code>
*
* @return The maxBurstSize.
*/
@java.lang.Override
public int getMaxBurstSize() {
return maxBurstSize_;
}
public static final int MAX_CONCURRENT_TASKS_FIELD_NUMBER = 3;
private int maxConcurrentTasks_ = 0;
/**
*
*
* <pre>
* The maximum number of concurrent tasks that Cloud Tasks allows
* to be dispatched for this queue. After this threshold has been
* reached, Cloud Tasks stops dispatching tasks until the number of
* concurrent requests decreases.
*
* If unspecified when the queue is created, Cloud Tasks will pick the
* default.
*
*
* The maximum allowed value is 5,000.
*
* This field is output only for
* [pull queues][google.cloud.tasks.v2beta2.PullTarget] and always -1, which
* indicates no limit. No other queue types can have `max_concurrent_tasks`
* set to -1.
*
*
* This field has the same meaning as
* [max_concurrent_requests in
* queue.yaml/xml](https://cloud.google.com/appengine/docs/standard/python/config/queueref#max_concurrent_requests).
* </pre>
*
* <code>int32 max_concurrent_tasks = 3;</code>
*
* @return The maxConcurrentTasks.
*/
@java.lang.Override
public int getMaxConcurrentTasks() {
return maxConcurrentTasks_;
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (java.lang.Double.doubleToRawLongBits(maxTasksDispatchedPerSecond_) != 0) {
output.writeDouble(1, maxTasksDispatchedPerSecond_);
}
if (maxBurstSize_ != 0) {
output.writeInt32(2, maxBurstSize_);
}
if (maxConcurrentTasks_ != 0) {
output.writeInt32(3, maxConcurrentTasks_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (java.lang.Double.doubleToRawLongBits(maxTasksDispatchedPerSecond_) != 0) {
size +=
com.google.protobuf.CodedOutputStream.computeDoubleSize(1, maxTasksDispatchedPerSecond_);
}
if (maxBurstSize_ != 0) {
size += com.google.protobuf.CodedOutputStream.computeInt32Size(2, maxBurstSize_);
}
if (maxConcurrentTasks_ != 0) {
size += com.google.protobuf.CodedOutputStream.computeInt32Size(3, maxConcurrentTasks_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.tasks.v2beta2.RateLimits)) {
return super.equals(obj);
}
com.google.cloud.tasks.v2beta2.RateLimits other =
(com.google.cloud.tasks.v2beta2.RateLimits) obj;
if (java.lang.Double.doubleToLongBits(getMaxTasksDispatchedPerSecond())
!= java.lang.Double.doubleToLongBits(other.getMaxTasksDispatchedPerSecond())) return false;
if (getMaxBurstSize() != other.getMaxBurstSize()) return false;
if (getMaxConcurrentTasks() != other.getMaxConcurrentTasks()) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + MAX_TASKS_DISPATCHED_PER_SECOND_FIELD_NUMBER;
hash =
(53 * hash)
+ com.google.protobuf.Internal.hashLong(
java.lang.Double.doubleToLongBits(getMaxTasksDispatchedPerSecond()));
hash = (37 * hash) + MAX_BURST_SIZE_FIELD_NUMBER;
hash = (53 * hash) + getMaxBurstSize();
hash = (37 * hash) + MAX_CONCURRENT_TASKS_FIELD_NUMBER;
hash = (53 * hash) + getMaxConcurrentTasks();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.tasks.v2beta2.RateLimits parseFrom(java.nio.ByteBuffer data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.tasks.v2beta2.RateLimits parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.tasks.v2beta2.RateLimits parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.tasks.v2beta2.RateLimits parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.tasks.v2beta2.RateLimits parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.tasks.v2beta2.RateLimits parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.tasks.v2beta2.RateLimits parseFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.tasks.v2beta2.RateLimits parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.tasks.v2beta2.RateLimits parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.tasks.v2beta2.RateLimits parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.tasks.v2beta2.RateLimits parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.tasks.v2beta2.RateLimits parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.cloud.tasks.v2beta2.RateLimits prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Rate limits.
*
* This message determines the maximum rate that tasks can be dispatched by a
* queue, regardless of whether the dispatch is a first task attempt or a retry.
*
* Note: The debugging command,
* [RunTask][google.cloud.tasks.v2beta2.CloudTasks.RunTask], will run a task
* even if the queue has reached its
* [RateLimits][google.cloud.tasks.v2beta2.RateLimits].
* </pre>
*
* Protobuf type {@code google.cloud.tasks.v2beta2.RateLimits}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.tasks.v2beta2.RateLimits)
com.google.cloud.tasks.v2beta2.RateLimitsOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.tasks.v2beta2.QueueProto
.internal_static_google_cloud_tasks_v2beta2_RateLimits_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.tasks.v2beta2.QueueProto
.internal_static_google_cloud_tasks_v2beta2_RateLimits_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.tasks.v2beta2.RateLimits.class,
com.google.cloud.tasks.v2beta2.RateLimits.Builder.class);
}
// Construct using com.google.cloud.tasks.v2beta2.RateLimits.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
maxTasksDispatchedPerSecond_ = 0D;
maxBurstSize_ = 0;
maxConcurrentTasks_ = 0;
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.tasks.v2beta2.QueueProto
.internal_static_google_cloud_tasks_v2beta2_RateLimits_descriptor;
}
@java.lang.Override
public com.google.cloud.tasks.v2beta2.RateLimits getDefaultInstanceForType() {
return com.google.cloud.tasks.v2beta2.RateLimits.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.tasks.v2beta2.RateLimits build() {
com.google.cloud.tasks.v2beta2.RateLimits result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.tasks.v2beta2.RateLimits buildPartial() {
com.google.cloud.tasks.v2beta2.RateLimits result =
new com.google.cloud.tasks.v2beta2.RateLimits(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(com.google.cloud.tasks.v2beta2.RateLimits result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.maxTasksDispatchedPerSecond_ = maxTasksDispatchedPerSecond_;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.maxBurstSize_ = maxBurstSize_;
}
if (((from_bitField0_ & 0x00000004) != 0)) {
result.maxConcurrentTasks_ = maxConcurrentTasks_;
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.tasks.v2beta2.RateLimits) {
return mergeFrom((com.google.cloud.tasks.v2beta2.RateLimits) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.tasks.v2beta2.RateLimits other) {
if (other == com.google.cloud.tasks.v2beta2.RateLimits.getDefaultInstance()) return this;
if (other.getMaxTasksDispatchedPerSecond() != 0D) {
setMaxTasksDispatchedPerSecond(other.getMaxTasksDispatchedPerSecond());
}
if (other.getMaxBurstSize() != 0) {
setMaxBurstSize(other.getMaxBurstSize());
}
if (other.getMaxConcurrentTasks() != 0) {
setMaxConcurrentTasks(other.getMaxConcurrentTasks());
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 9:
{
maxTasksDispatchedPerSecond_ = input.readDouble();
bitField0_ |= 0x00000001;
break;
} // case 9
case 16:
{
maxBurstSize_ = input.readInt32();
bitField0_ |= 0x00000002;
break;
} // case 16
case 24:
{
maxConcurrentTasks_ = input.readInt32();
bitField0_ |= 0x00000004;
break;
} // case 24
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private double maxTasksDispatchedPerSecond_;
/**
*
*
* <pre>
* The maximum rate at which tasks are dispatched from this queue.
*
* If unspecified when the queue is created, Cloud Tasks will pick the
* default.
*
* * For [App Engine queues][google.cloud.tasks.v2beta2.AppEngineHttpTarget],
* the maximum allowed value
* is 500.
* * This field is output only for [pull
* queues][google.cloud.tasks.v2beta2.PullTarget]. In addition to the
* `max_tasks_dispatched_per_second` limit, a maximum of 10 QPS of
* [LeaseTasks][google.cloud.tasks.v2beta2.CloudTasks.LeaseTasks] requests
* are allowed per pull queue.
*
*
* This field has the same meaning as
* [rate in
* queue.yaml/xml](https://cloud.google.com/appengine/docs/standard/python/config/queueref#rate).
* </pre>
*
* <code>double max_tasks_dispatched_per_second = 1;</code>
*
* @return The maxTasksDispatchedPerSecond.
*/
@java.lang.Override
public double getMaxTasksDispatchedPerSecond() {
return maxTasksDispatchedPerSecond_;
}
/**
*
*
* <pre>
* The maximum rate at which tasks are dispatched from this queue.
*
* If unspecified when the queue is created, Cloud Tasks will pick the
* default.
*
* * For [App Engine queues][google.cloud.tasks.v2beta2.AppEngineHttpTarget],
* the maximum allowed value
* is 500.
* * This field is output only for [pull
* queues][google.cloud.tasks.v2beta2.PullTarget]. In addition to the
* `max_tasks_dispatched_per_second` limit, a maximum of 10 QPS of
* [LeaseTasks][google.cloud.tasks.v2beta2.CloudTasks.LeaseTasks] requests
* are allowed per pull queue.
*
*
* This field has the same meaning as
* [rate in
* queue.yaml/xml](https://cloud.google.com/appengine/docs/standard/python/config/queueref#rate).
* </pre>
*
* <code>double max_tasks_dispatched_per_second = 1;</code>
*
* @param value The maxTasksDispatchedPerSecond to set.
* @return This builder for chaining.
*/
public Builder setMaxTasksDispatchedPerSecond(double value) {
maxTasksDispatchedPerSecond_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* The maximum rate at which tasks are dispatched from this queue.
*
* If unspecified when the queue is created, Cloud Tasks will pick the
* default.
*
* * For [App Engine queues][google.cloud.tasks.v2beta2.AppEngineHttpTarget],
* the maximum allowed value
* is 500.
* * This field is output only for [pull
* queues][google.cloud.tasks.v2beta2.PullTarget]. In addition to the
* `max_tasks_dispatched_per_second` limit, a maximum of 10 QPS of
* [LeaseTasks][google.cloud.tasks.v2beta2.CloudTasks.LeaseTasks] requests
* are allowed per pull queue.
*
*
* This field has the same meaning as
* [rate in
* queue.yaml/xml](https://cloud.google.com/appengine/docs/standard/python/config/queueref#rate).
* </pre>
*
* <code>double max_tasks_dispatched_per_second = 1;</code>
*
* @return This builder for chaining.
*/
public Builder clearMaxTasksDispatchedPerSecond() {
bitField0_ = (bitField0_ & ~0x00000001);
maxTasksDispatchedPerSecond_ = 0D;
onChanged();
return this;
}
private int maxBurstSize_;
/**
*
*
* <pre>
* The max burst size.
*
* Max burst size limits how fast tasks in queue are processed when
* many tasks are in the queue and the rate is high. This field
* allows the queue to have a high rate so processing starts shortly
* after a task is enqueued, but still limits resource usage when
* many tasks are enqueued in a short period of time.
*
* The [token bucket](https://wikipedia.org/wiki/Token_Bucket)
* algorithm is used to control the rate of task dispatches. Each
* queue has a token bucket that holds tokens, up to the maximum
* specified by `max_burst_size`. Each time a task is dispatched, a
* token is removed from the bucket. Tasks will be dispatched until
* the queue's bucket runs out of tokens. The bucket will be
* continuously refilled with new tokens based on
* [max_dispatches_per_second][RateLimits.max_dispatches_per_second].
*
* The default value of `max_burst_size` is picked by Cloud Tasks
* based on the value of
* [max_dispatches_per_second][RateLimits.max_dispatches_per_second].
*
* The maximum value of `max_burst_size` is 500.
*
* For App Engine queues that were created or updated using
* `queue.yaml/xml`, `max_burst_size` is equal to
* [bucket_size](https://cloud.google.com/appengine/docs/standard/python/config/queueref#bucket_size).
* If
* [UpdateQueue][google.cloud.tasks.v2beta2.CloudTasks.UpdateQueue] is called
* on a queue without explicitly setting a value for `max_burst_size`,
* `max_burst_size` value will get updated if
* [UpdateQueue][google.cloud.tasks.v2beta2.CloudTasks.UpdateQueue] is
* updating [max_dispatches_per_second][RateLimits.max_dispatches_per_second].
* </pre>
*
* <code>int32 max_burst_size = 2;</code>
*
* @return The maxBurstSize.
*/
@java.lang.Override
public int getMaxBurstSize() {
return maxBurstSize_;
}
/**
*
*
* <pre>
* The max burst size.
*
* Max burst size limits how fast tasks in queue are processed when
* many tasks are in the queue and the rate is high. This field
* allows the queue to have a high rate so processing starts shortly
* after a task is enqueued, but still limits resource usage when
* many tasks are enqueued in a short period of time.
*
* The [token bucket](https://wikipedia.org/wiki/Token_Bucket)
* algorithm is used to control the rate of task dispatches. Each
* queue has a token bucket that holds tokens, up to the maximum
* specified by `max_burst_size`. Each time a task is dispatched, a
* token is removed from the bucket. Tasks will be dispatched until
* the queue's bucket runs out of tokens. The bucket will be
* continuously refilled with new tokens based on
* [max_dispatches_per_second][RateLimits.max_dispatches_per_second].
*
* The default value of `max_burst_size` is picked by Cloud Tasks
* based on the value of
* [max_dispatches_per_second][RateLimits.max_dispatches_per_second].
*
* The maximum value of `max_burst_size` is 500.
*
* For App Engine queues that were created or updated using
* `queue.yaml/xml`, `max_burst_size` is equal to
* [bucket_size](https://cloud.google.com/appengine/docs/standard/python/config/queueref#bucket_size).
* If
* [UpdateQueue][google.cloud.tasks.v2beta2.CloudTasks.UpdateQueue] is called
* on a queue without explicitly setting a value for `max_burst_size`,
* `max_burst_size` value will get updated if
* [UpdateQueue][google.cloud.tasks.v2beta2.CloudTasks.UpdateQueue] is
* updating [max_dispatches_per_second][RateLimits.max_dispatches_per_second].
* </pre>
*
* <code>int32 max_burst_size = 2;</code>
*
* @param value The maxBurstSize to set.
* @return This builder for chaining.
*/
public Builder setMaxBurstSize(int value) {
maxBurstSize_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* The max burst size.
*
* Max burst size limits how fast tasks in queue are processed when
* many tasks are in the queue and the rate is high. This field
* allows the queue to have a high rate so processing starts shortly
* after a task is enqueued, but still limits resource usage when
* many tasks are enqueued in a short period of time.
*
* The [token bucket](https://wikipedia.org/wiki/Token_Bucket)
* algorithm is used to control the rate of task dispatches. Each
* queue has a token bucket that holds tokens, up to the maximum
* specified by `max_burst_size`. Each time a task is dispatched, a
* token is removed from the bucket. Tasks will be dispatched until
* the queue's bucket runs out of tokens. The bucket will be
* continuously refilled with new tokens based on
* [max_dispatches_per_second][RateLimits.max_dispatches_per_second].
*
* The default value of `max_burst_size` is picked by Cloud Tasks
* based on the value of
* [max_dispatches_per_second][RateLimits.max_dispatches_per_second].
*
* The maximum value of `max_burst_size` is 500.
*
* For App Engine queues that were created or updated using
* `queue.yaml/xml`, `max_burst_size` is equal to
* [bucket_size](https://cloud.google.com/appengine/docs/standard/python/config/queueref#bucket_size).
* If
* [UpdateQueue][google.cloud.tasks.v2beta2.CloudTasks.UpdateQueue] is called
* on a queue without explicitly setting a value for `max_burst_size`,
* `max_burst_size` value will get updated if
* [UpdateQueue][google.cloud.tasks.v2beta2.CloudTasks.UpdateQueue] is
* updating [max_dispatches_per_second][RateLimits.max_dispatches_per_second].
* </pre>
*
* <code>int32 max_burst_size = 2;</code>
*
* @return This builder for chaining.
*/
public Builder clearMaxBurstSize() {
bitField0_ = (bitField0_ & ~0x00000002);
maxBurstSize_ = 0;
onChanged();
return this;
}
private int maxConcurrentTasks_;
/**
*
*
* <pre>
* The maximum number of concurrent tasks that Cloud Tasks allows
* to be dispatched for this queue. After this threshold has been
* reached, Cloud Tasks stops dispatching tasks until the number of
* concurrent requests decreases.
*
* If unspecified when the queue is created, Cloud Tasks will pick the
* default.
*
*
* The maximum allowed value is 5,000.
*
* This field is output only for
* [pull queues][google.cloud.tasks.v2beta2.PullTarget] and always -1, which
* indicates no limit. No other queue types can have `max_concurrent_tasks`
* set to -1.
*
*
* This field has the same meaning as
* [max_concurrent_requests in
* queue.yaml/xml](https://cloud.google.com/appengine/docs/standard/python/config/queueref#max_concurrent_requests).
* </pre>
*
* <code>int32 max_concurrent_tasks = 3;</code>
*
* @return The maxConcurrentTasks.
*/
@java.lang.Override
public int getMaxConcurrentTasks() {
return maxConcurrentTasks_;
}
/**
*
*
* <pre>
* The maximum number of concurrent tasks that Cloud Tasks allows
* to be dispatched for this queue. After this threshold has been
* reached, Cloud Tasks stops dispatching tasks until the number of
* concurrent requests decreases.
*
* If unspecified when the queue is created, Cloud Tasks will pick the
* default.
*
*
* The maximum allowed value is 5,000.
*
* This field is output only for
* [pull queues][google.cloud.tasks.v2beta2.PullTarget] and always -1, which
* indicates no limit. No other queue types can have `max_concurrent_tasks`
* set to -1.
*
*
* This field has the same meaning as
* [max_concurrent_requests in
* queue.yaml/xml](https://cloud.google.com/appengine/docs/standard/python/config/queueref#max_concurrent_requests).
* </pre>
*
* <code>int32 max_concurrent_tasks = 3;</code>
*
* @param value The maxConcurrentTasks to set.
* @return This builder for chaining.
*/
public Builder setMaxConcurrentTasks(int value) {
maxConcurrentTasks_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
*
*
* <pre>
* The maximum number of concurrent tasks that Cloud Tasks allows
* to be dispatched for this queue. After this threshold has been
* reached, Cloud Tasks stops dispatching tasks until the number of
* concurrent requests decreases.
*
* If unspecified when the queue is created, Cloud Tasks will pick the
* default.
*
*
* The maximum allowed value is 5,000.
*
* This field is output only for
* [pull queues][google.cloud.tasks.v2beta2.PullTarget] and always -1, which
* indicates no limit. No other queue types can have `max_concurrent_tasks`
* set to -1.
*
*
* This field has the same meaning as
* [max_concurrent_requests in
* queue.yaml/xml](https://cloud.google.com/appengine/docs/standard/python/config/queueref#max_concurrent_requests).
* </pre>
*
* <code>int32 max_concurrent_tasks = 3;</code>
*
* @return This builder for chaining.
*/
public Builder clearMaxConcurrentTasks() {
bitField0_ = (bitField0_ & ~0x00000004);
maxConcurrentTasks_ = 0;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.tasks.v2beta2.RateLimits)
}
// @@protoc_insertion_point(class_scope:google.cloud.tasks.v2beta2.RateLimits)
private static final com.google.cloud.tasks.v2beta2.RateLimits DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.tasks.v2beta2.RateLimits();
}
public static com.google.cloud.tasks.v2beta2.RateLimits getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<RateLimits> PARSER =
new com.google.protobuf.AbstractParser<RateLimits>() {
@java.lang.Override
public RateLimits parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<RateLimits> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<RateLimits> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.tasks.v2beta2.RateLimits getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleapis/google-cloud-java | 34,984 | java-grafeas/src/main/java/io/grafeas/v1/GrafeasOuterClass.java | /*
* Copyright 2025 The Grafeas Authors. All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: grafeas/v1/grafeas.proto
// Protobuf Java Version: 3.25.8
package io.grafeas.v1;
public final class GrafeasOuterClass {
private GrafeasOuterClass() {}
public static void registerAllExtensions(com.google.protobuf.ExtensionRegistryLite registry) {}
public static void registerAllExtensions(com.google.protobuf.ExtensionRegistry registry) {
registerAllExtensions((com.google.protobuf.ExtensionRegistryLite) registry);
}
static final com.google.protobuf.Descriptors.Descriptor
internal_static_grafeas_v1_Occurrence_descriptor;
static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_grafeas_v1_Occurrence_fieldAccessorTable;
static final com.google.protobuf.Descriptors.Descriptor
internal_static_grafeas_v1_Note_descriptor;
static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_grafeas_v1_Note_fieldAccessorTable;
static final com.google.protobuf.Descriptors.Descriptor
internal_static_grafeas_v1_GetOccurrenceRequest_descriptor;
static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_grafeas_v1_GetOccurrenceRequest_fieldAccessorTable;
static final com.google.protobuf.Descriptors.Descriptor
internal_static_grafeas_v1_ListOccurrencesRequest_descriptor;
static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_grafeas_v1_ListOccurrencesRequest_fieldAccessorTable;
static final com.google.protobuf.Descriptors.Descriptor
internal_static_grafeas_v1_ListOccurrencesResponse_descriptor;
static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_grafeas_v1_ListOccurrencesResponse_fieldAccessorTable;
static final com.google.protobuf.Descriptors.Descriptor
internal_static_grafeas_v1_DeleteOccurrenceRequest_descriptor;
static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_grafeas_v1_DeleteOccurrenceRequest_fieldAccessorTable;
static final com.google.protobuf.Descriptors.Descriptor
internal_static_grafeas_v1_CreateOccurrenceRequest_descriptor;
static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_grafeas_v1_CreateOccurrenceRequest_fieldAccessorTable;
static final com.google.protobuf.Descriptors.Descriptor
internal_static_grafeas_v1_UpdateOccurrenceRequest_descriptor;
static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_grafeas_v1_UpdateOccurrenceRequest_fieldAccessorTable;
static final com.google.protobuf.Descriptors.Descriptor
internal_static_grafeas_v1_GetNoteRequest_descriptor;
static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_grafeas_v1_GetNoteRequest_fieldAccessorTable;
static final com.google.protobuf.Descriptors.Descriptor
internal_static_grafeas_v1_GetOccurrenceNoteRequest_descriptor;
static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_grafeas_v1_GetOccurrenceNoteRequest_fieldAccessorTable;
static final com.google.protobuf.Descriptors.Descriptor
internal_static_grafeas_v1_ListNotesRequest_descriptor;
static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_grafeas_v1_ListNotesRequest_fieldAccessorTable;
static final com.google.protobuf.Descriptors.Descriptor
internal_static_grafeas_v1_ListNotesResponse_descriptor;
static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_grafeas_v1_ListNotesResponse_fieldAccessorTable;
static final com.google.protobuf.Descriptors.Descriptor
internal_static_grafeas_v1_DeleteNoteRequest_descriptor;
static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_grafeas_v1_DeleteNoteRequest_fieldAccessorTable;
static final com.google.protobuf.Descriptors.Descriptor
internal_static_grafeas_v1_CreateNoteRequest_descriptor;
static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_grafeas_v1_CreateNoteRequest_fieldAccessorTable;
static final com.google.protobuf.Descriptors.Descriptor
internal_static_grafeas_v1_UpdateNoteRequest_descriptor;
static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_grafeas_v1_UpdateNoteRequest_fieldAccessorTable;
static final com.google.protobuf.Descriptors.Descriptor
internal_static_grafeas_v1_ListNoteOccurrencesRequest_descriptor;
static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_grafeas_v1_ListNoteOccurrencesRequest_fieldAccessorTable;
static final com.google.protobuf.Descriptors.Descriptor
internal_static_grafeas_v1_ListNoteOccurrencesResponse_descriptor;
static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_grafeas_v1_ListNoteOccurrencesResponse_fieldAccessorTable;
static final com.google.protobuf.Descriptors.Descriptor
internal_static_grafeas_v1_BatchCreateNotesRequest_descriptor;
static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_grafeas_v1_BatchCreateNotesRequest_fieldAccessorTable;
static final com.google.protobuf.Descriptors.Descriptor
internal_static_grafeas_v1_BatchCreateNotesRequest_NotesEntry_descriptor;
static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_grafeas_v1_BatchCreateNotesRequest_NotesEntry_fieldAccessorTable;
static final com.google.protobuf.Descriptors.Descriptor
internal_static_grafeas_v1_BatchCreateNotesResponse_descriptor;
static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_grafeas_v1_BatchCreateNotesResponse_fieldAccessorTable;
static final com.google.protobuf.Descriptors.Descriptor
internal_static_grafeas_v1_BatchCreateOccurrencesRequest_descriptor;
static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_grafeas_v1_BatchCreateOccurrencesRequest_fieldAccessorTable;
static final com.google.protobuf.Descriptors.Descriptor
internal_static_grafeas_v1_BatchCreateOccurrencesResponse_descriptor;
static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_grafeas_v1_BatchCreateOccurrencesResponse_fieldAccessorTable;
public static com.google.protobuf.Descriptors.FileDescriptor getDescriptor() {
return descriptor;
}
private static com.google.protobuf.Descriptors.FileDescriptor descriptor;
static {
java.lang.String[] descriptorData = {
"\n"
+ "\030grafeas/v1/grafeas.proto\022\n"
+ "grafeas.v1\032\034google/api/annotations.proto\032\027google/api"
+ "/client.proto\032\037google/api/field_behavior"
+ ".proto\032\031google/api/resource.proto\032\033google/protobuf/empty.proto\032"
+ " google/protobuf/field_mask.proto\032\037google/protobuf/timest"
+ "amp.proto\032\034grafeas/v1/attestation.proto\032"
+ "\026grafeas/v1/build.proto\032\027grafeas/v1/comm"
+ "on.proto\032\033grafeas/v1/compliance.proto\032\033g"
+ "rafeas/v1/deployment.proto\032\032grafeas/v1/d"
+ "iscovery.proto\032!grafeas/v1/dsse_attestat"
+ "ion.proto\032\026grafeas/v1/image.proto\032\030grafe"
+ "as/v1/package.proto\032\025grafeas/v1/sbom.pro"
+ "to\032\027grafeas/v1/secret.proto\032\030grafeas/v1/"
+ "upgrade.proto\032\024grafeas/v1/vex.proto\032\036grafeas/v1/vulnerability.proto\"\352\007\n\n"
+ "Occurrence\022\014\n"
+ "\004name\030\001 \001(\t\022\024\n"
+ "\014resource_uri\030\002 \001(\t\022\021\n"
+ "\tnote_name\030\003 \001(\t\022\"\n"
+ "\004kind\030\004 \001(\0162\024.grafeas.v1.NoteKind\022\023\n"
+ "\013remediation\030\005 \001(\t\022/\n"
+ "\013create_time\030\006 \001(\0132\032.google.protobuf.Timestamp\022/\n"
+ "\013update_time\030\007 \001(\0132\032.google.protobuf.Timestamp\022<\n\r"
+ "vulnerability\030\010 \001(\0132#.grafeas.v1.VulnerabilityOccurrenceH\000\022,\n"
+ "\005build\030\t \001(\0132\033.grafeas.v1.BuildOccurrenceH\000\022,\n"
+ "\005image\030\n"
+ " \001(\0132\033.grafeas.v1.ImageOccurrenceH\000\0220\n"
+ "\007package\030\013 \001(\0132\035.grafeas.v1.PackageOccurrenceH\000\0226\n\n"
+ "deployment\030\014 \001(\0132 .grafeas.v1.DeploymentOccurrenceH\000\0224\n"
+ "\tdiscovery\030\r"
+ " \001(\0132\037.grafeas.v1.DiscoveryOccurrenceH\000\0228\n"
+ "\013attestation\030\016 \001(\0132!.grafeas.v1.AttestationOccurrenceH\000\0220\n"
+ "\007upgrade\030\017 \001(\0132\035.grafeas.v1.UpgradeOccurrenceH\000\0226\n\n"
+ "compliance\030\020 \001(\0132 .grafeas.v1.ComplianceOccurrenceH\000\022A\n"
+ "\020dsse_attestation\030\021"
+ " \001(\0132%.grafeas.v1.DSSEAttestationOccurrenceH\000\022=\n"
+ "\016sbom_reference\030\023 \001(\0132#.grafeas.v1.SBOMReferenceOccurrenceH\000\022.\n"
+ "\006secret\030\024 \001(\0132\034.grafeas.v1.SecretOccurrenceH\000\022&\n"
+ "\010envelope\030\022 \001(\0132\024.grafeas.v1.Envelope:G\352AD\n"
+ "\025grafeas.io/Occurrence\022+projects/{project}/occurrences/{occurrence}B\t\n"
+ "\007details\"\241\010\n"
+ "\004Note\022\014\n"
+ "\004name\030\001 \001(\t\022\031\n"
+ "\021short_description\030\002 \001(\t\022\030\n"
+ "\020long_description\030\003 \001(\t\022\"\n"
+ "\004kind\030\004 \001(\0162\024.grafeas.v1.NoteKind\022+\n"
+ "\013related_url\030\005 \003(\0132\026.grafeas.v1.RelatedUrl\0223\n"
+ "\017expiration_time\030\006 \001(\0132\032.google.protobuf.Timestamp\022/\n"
+ "\013create_time\030\007 \001(\0132\032.google.protobuf.Timestamp\022/\n"
+ "\013update_time\030\010 \001(\0132\032.google.protobuf.Timestamp\022\032\n"
+ "\022related_note_names\030\t \003(\t\0226\n\r"
+ "vulnerability\030\n"
+ " \001(\0132\035.grafeas.v1.VulnerabilityNoteH\000\022&\n"
+ "\005build\030\013 \001(\0132\025.grafeas.v1.BuildNoteH\000\022&\n"
+ "\005image\030\014 \001(\0132\025.grafeas.v1.ImageNoteH\000\022*\n"
+ "\007package\030\r"
+ " \001(\0132\027.grafeas.v1.PackageNoteH\000\0220\n\n"
+ "deployment\030\016 \001(\0132\032.grafeas.v1.DeploymentNoteH\000\022.\n"
+ "\tdiscovery\030\017 \001(\0132\031.grafeas.v1.DiscoveryNoteH\000\0222\n"
+ "\013attestation\030\020 \001(\0132\033.grafeas.v1.AttestationNoteH\000\022*\n"
+ "\007upgrade\030\021 \001(\0132\027.grafeas.v1.UpgradeNoteH\000\0220\n\n"
+ "compliance\030\022 \001(\0132\032.grafeas.v1.ComplianceNoteH\000\022;\n"
+ "\020dsse_attestation\030\023 \001(\0132\037.grafeas.v1.DSSEAttestationNoteH\000\022K\n"
+ "\030vulnerability_assessment\030\024"
+ " \001(\0132\'.grafeas.v1.VulnerabilityAssessmentNoteH\000\0227\n"
+ "\016sbom_reference\030\025 \001(\0132\035.grafeas.v1.SBOMReferenceNoteH\000\022(\n"
+ "\006secret\030\026 \001(\0132\026.grafeas.v1.SecretNoteH\000:5\352A2\n"
+ "\017grafeas.io/Note\022\037projects/{project}/notes/{note}B\006\n"
+ "\004type\"C\n"
+ "\024GetOccurrenceRequest\022+\n"
+ "\004name\030\001 \001(\tB\035\340A\002\372A\027\n"
+ "\025grafeas.io/Occurrence\"{\n"
+ "\026ListOccurrencesRequest\022*\n"
+ "\006parent\030\001 \001(\tB\032\340A\002\372A\024\n"
+ "\022grafeas.io/Project\022\016\n"
+ "\006filter\030\002 \001(\t\022\021\n"
+ "\tpage_size\030\003 \001(\005\022\022\n\n"
+ "page_token\030\004 \001(\t\"_\n"
+ "\027ListOccurrencesResponse\022+\n"
+ "\013occurrences\030\001 \003(\0132\026.grafeas.v1.Occurrence\022\027\n"
+ "\017next_page_token\030\002 \001(\t\"F\n"
+ "\027DeleteOccurrenceRequest\022+\n"
+ "\004name\030\001 \001(\tB\035\340A\002\372A\027\n"
+ "\025grafeas.io/Occurrence\"v\n"
+ "\027CreateOccurrenceRequest\022*\n"
+ "\006parent\030\001 \001(\tB\032\340A\002\372A\024\n"
+ "\022grafeas.io/Project\022/\n\n"
+ "occurrence\030\002 \001(\0132\026.grafeas.v1.OccurrenceB\003\340A\002\"\250\001\n"
+ "\027UpdateOccurrenceRequest\022+\n"
+ "\004name\030\001 \001(\tB\035\340A\002\372A\027\n"
+ "\025grafeas.io/Occurrence\022/\n\n"
+ "occurrence\030\002 \001(\0132\026.grafeas.v1.OccurrenceB\003\340A\002\022/\n"
+ "\013update_mask\030\003 \001(\0132\032.google.protobuf.FieldMask\"7\n"
+ "\016GetNoteRequest\022%\n"
+ "\004name\030\001 \001(\tB\027\340A\002\372A\021\n"
+ "\017grafeas.io/Note\"G\n"
+ "\030GetOccurrenceNoteRequest\022+\n"
+ "\004name\030\001 \001(\tB\035\340A\002\372A\027\n"
+ "\025grafeas.io/Occurrence\"u\n"
+ "\020ListNotesRequest\022*\n"
+ "\006parent\030\001 \001(\tB\032\340A\002\372A\024\n"
+ "\022grafeas.io/Project\022\016\n"
+ "\006filter\030\002 \001(\t\022\021\n"
+ "\tpage_size\030\003 \001(\005\022\022\n\n"
+ "page_token\030\004 \001(\t\"M\n"
+ "\021ListNotesResponse\022\037\n"
+ "\005notes\030\001 \003(\0132\020.grafeas.v1.Note\022\027\n"
+ "\017next_page_token\030\002 \001(\t\":\n"
+ "\021DeleteNoteRequest\022%\n"
+ "\004name\030\001 \001(\tB\027\340A\002\372A\021\n"
+ "\017grafeas.io/Note\"z\n"
+ "\021CreateNoteRequest\022*\n"
+ "\006parent\030\001 \001(\tB\032\340A\002\372A\024\n"
+ "\022grafeas.io/Project\022\024\n"
+ "\007note_id\030\002 \001(\tB\003\340A\002\022#\n"
+ "\004note\030\003 \001(\0132\020.grafeas.v1.NoteB\003\340A\002\"\220\001\n"
+ "\021UpdateNoteRequest\022%\n"
+ "\004name\030\001 \001(\tB\027\340A\002\372A\021\n"
+ "\017grafeas.io/Note\022#\n"
+ "\004note\030\002 \001(\0132\020.grafeas.v1.NoteB\003\340A\002\022/\n"
+ "\013update_mask\030\003 \001(\0132\032.google.protobuf.FieldMask\"z\n"
+ "\032ListNoteOccurrencesRequest\022%\n"
+ "\004name\030\001 \001(\tB\027\340A\002\372A\021\n"
+ "\017grafeas.io/Note\022\016\n"
+ "\006filter\030\002 \001(\t\022\021\n"
+ "\tpage_size\030\003 \001(\005\022\022\n\n"
+ "page_token\030\004 \001(\t\"c\n"
+ "\033ListNoteOccurrencesResponse\022+\n"
+ "\013occurrences\030\001 \003(\0132\026.grafeas.v1.Occurrence\022\027\n"
+ "\017next_page_token\030\002 \001(\t\"\311\001\n"
+ "\027BatchCreateNotesRequest\022*\n"
+ "\006parent\030\001 \001(\tB\032\340A\002\372A\024\n"
+ "\022grafeas.io/Project\022B\n"
+ "\005notes\030\002 \003(\0132.."
+ "grafeas.v1.BatchCreateNotesRequest.NotesEntryB\003\340A\002\032>\n\n"
+ "NotesEntry\022\013\n"
+ "\003key\030\001 \001(\t\022\037\n"
+ "\005value\030\002 \001(\0132\020.grafeas.v1.Note:\0028\001\";\n"
+ "\030BatchCreateNotesResponse\022\037\n"
+ "\005notes\030\001 \003(\0132\020.grafeas.v1.Note\"}\n"
+ "\035BatchCreateOccurrencesRequest\022*\n"
+ "\006parent\030\001 \001(\tB\032\340A\002\372A\024\n"
+ "\022grafeas.io/Project\0220\n"
+ "\013occurrences\030\002 \003(\0132\026.grafeas.v1.OccurrenceB\003\340A\002\"M\n"
+ "\036BatchCreateOccurrencesResponse\022+\n"
+ "\013occurrences\030\001 \003(\0132\026.grafeas.v1.Occurrence2\215\026\n"
+ "\007Grafeas\022\260\001\n\r"
+ "GetOccurrence\022 .grafeas.v1.GetOccurrenceRe"
+ "quest\032\026.grafeas.v1.Occurrence\"e\332A\004name\202\323"
+ "\344\223\002X\022#/v1/{name=projects/*/occurrences/*"
+ "}Z1\022//v1/{name=projects/*/locations/*/occurrences/*}\022\312\001\n"
+ "\017ListOccurrences\022\".grafe"
+ "as.v1.ListOccurrencesRequest\032#.grafeas.v1.ListOccurrencesResponse\"n\332A\r"
+ "parent,filter\202\323\344\223\002X\022#/v1/{parent=projects/*}/occur"
+ "rencesZ1\022//v1/{parent=projects/*/locations/*}/occurrences\022\266\001\n"
+ "\020DeleteOccurrence\022#.grafeas.v1.DeleteOccurrenceRequest\032\026.go"
+ "ogle.protobuf.Empty\"e\332A\004name\202\323\344\223\002X*#/v1/"
+ "{name=projects/*/occurrences/*}Z1*//v1/{"
+ "name=projects/*/locations/*/occurrences/*}\022\334\001\n"
+ "\020CreateOccurrence\022#.grafeas.v1.CreateOccurrenceRequest\032\026.grafeas.v1.Occurr"
+ "ence\"\212\001\332A\021parent,occurrence\202\323\344\223\002p\"#/v1/{parent=projects/*}/occurrences:\n"
+ "occurrenceZ=\"//v1/{parent=projects/*/locations/*}/occurrences:\n"
+ "occurrence\022\203\002\n"
+ "\026BatchCreateOccurrences\022).grafeas.v1.BatchCreateOcc"
+ "urrencesRequest\032*.grafeas.v1.BatchCreate"
+ "OccurrencesResponse\"\221\001\332A\022parent,occurren"
+ "ces\202\323\344\223\002v\"//v1/{parent=projects/*}/occur"
+ "rences:batchCreate:\001*Z@\";/v1/{parent=pro"
+ "jects/*/locations/*}/occurrences:batchCreate:\001*\022\346\001\n"
+ "\020UpdateOccurrence\022#.grafeas.v1.UpdateOccurrenceRequest\032\026.grafeas.v1.O"
+ "ccurrence\"\224\001\332A\033name,occurrence,update_ma"
+ "sk\202\323\344\223\002p2#/v1/{name=projects/*/occurrences/*}:\n"
+ "occurrenceZ=2//v1/{name=projects/*/locations/*/occurrences/*}:\n"
+ "occurrence\022\276\001\n"
+ "\021GetOccurrenceNote\022$.grafeas.v1.GetO"
+ "ccurrenceNoteRequest\032\020.grafeas.v1.Note\"q"
+ "\332A\004name\202\323\344\223\002d\022)/v1/{name=projects/*/occu"
+ "rrences/*}/notesZ7\0225/v1/{name=projects/*/locations/*/occurrences/*}/notes\022\222\001\n"
+ "\007GetNote\022\032.grafeas.v1.GetNoteRequest\032\020.graf"
+ "eas.v1.Note\"Y\332A\004name\202\323\344\223\002L\022\035/v1/{name=pr"
+ "ojects/*/notes/*}Z+\022)/v1/{name=projects/*/locations/*/notes/*}\022\254\001\n"
+ "\tListNotes\022\034.g"
+ "rafeas.v1.ListNotesRequest\032\035.grafeas.v1.ListNotesResponse\"b\332A\r"
+ "parent,filter\202\323\344\223\002L\022\035/v1/{parent=projects/*}/notesZ+\022)/v1/"
+ "{parent=projects/*/locations/*}/notes\022\236\001\n\n"
+ "DeleteNote\022\035.grafeas.v1.DeleteNoteRequ"
+ "est\032\026.google.protobuf.Empty\"Y\332A\004name\202\323\344\223"
+ "\002L*\035/v1/{name=projects/*/notes/*}Z+*)/v1"
+ "/{name=projects/*/locations/*/notes/*}\022\263\001\n\n"
+ "CreateNote\022\035.grafeas.v1.CreateNoteReq"
+ "uest\032\020.grafeas.v1.Note\"t\332A\023parent,note_i"
+ "d,note\202\323\344\223\002X\"\035/v1/{parent=projects/*}/no"
+ "tes:\004noteZ1\")/v1/{parent=projects/*/locations/*}/notes:\004note\022\336\001\n"
+ "\020BatchCreateNotes\022#.grafeas.v1.BatchCreateNotesRequest\032$"
+ ".grafeas.v1.BatchCreateNotesResponse\"\177\332A"
+ "\014parent,notes\202\323\344\223\002j\")/v1/{parent=project"
+ "s/*}/notes:batchCreate:\001*Z:\"5/v1/{parent"
+ "=projects/*/locations/*}/notes:batchCreate:\001*\022\265\001\n\n"
+ "UpdateNote\022\035.grafeas.v1.Update"
+ "NoteRequest\032\020.grafeas.v1.Note\"v\332A\025name,n"
+ "ote,update_mask\202\323\344\223\002X2\035/v1/{name=project"
+ "s/*/notes/*}:\004noteZ12)/v1/{name=projects/*/locations/*/notes/*}:\004note\022\340\001\n"
+ "\023ListNoteOccurrences\022&.grafeas.v1.ListNoteOccur"
+ "rencesRequest\032\'.grafeas.v1.ListNoteOccur"
+ "rencesResponse\"x\332A\013name,filter\202\323\344\223\002d\022)/v"
+ "1/{name=projects/*/notes/*}/occurrencesZ"
+ "7\0225/v1/{name=projects/*/locations/*/notes/*}/occurrences\032#\312A"
+ " containeranalysis.googleapis.comB|\n\r"
+ "io.grafeas.v1P\001Z8google"
+ ".golang.org/genproto/googleapis/grafeas/v1;grafeas\242\002\003GRA\352A(\n"
+ "\022grafeas.io/Project\022\022projects/{project}b\006proto3"
};
descriptor =
com.google.protobuf.Descriptors.FileDescriptor.internalBuildGeneratedFileFrom(
descriptorData,
new com.google.protobuf.Descriptors.FileDescriptor[] {
com.google.api.AnnotationsProto.getDescriptor(),
com.google.api.ClientProto.getDescriptor(),
com.google.api.FieldBehaviorProto.getDescriptor(),
com.google.api.ResourceProto.getDescriptor(),
com.google.protobuf.EmptyProto.getDescriptor(),
com.google.protobuf.FieldMaskProto.getDescriptor(),
com.google.protobuf.TimestampProto.getDescriptor(),
io.grafeas.v1.Attestation.getDescriptor(),
io.grafeas.v1.Build.getDescriptor(),
io.grafeas.v1.Common.getDescriptor(),
io.grafeas.v1.Compliance.getDescriptor(),
io.grafeas.v1.Deployment.getDescriptor(),
io.grafeas.v1.Discovery.getDescriptor(),
io.grafeas.v1.DsseAttestation.getDescriptor(),
io.grafeas.v1.Image.getDescriptor(),
io.grafeas.v1.Package.getDescriptor(),
io.grafeas.v1.Sbom.getDescriptor(),
io.grafeas.v1.Secret.getDescriptor(),
io.grafeas.v1.Upgrade.getDescriptor(),
io.grafeas.v1.Vex.getDescriptor(),
io.grafeas.v1.Vulnerability.getDescriptor(),
});
internal_static_grafeas_v1_Occurrence_descriptor = getDescriptor().getMessageTypes().get(0);
internal_static_grafeas_v1_Occurrence_fieldAccessorTable =
new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_grafeas_v1_Occurrence_descriptor,
new java.lang.String[] {
"Name",
"ResourceUri",
"NoteName",
"Kind",
"Remediation",
"CreateTime",
"UpdateTime",
"Vulnerability",
"Build",
"Image",
"Package",
"Deployment",
"Discovery",
"Attestation",
"Upgrade",
"Compliance",
"DsseAttestation",
"SbomReference",
"Secret",
"Envelope",
"Details",
});
internal_static_grafeas_v1_Note_descriptor = getDescriptor().getMessageTypes().get(1);
internal_static_grafeas_v1_Note_fieldAccessorTable =
new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_grafeas_v1_Note_descriptor,
new java.lang.String[] {
"Name",
"ShortDescription",
"LongDescription",
"Kind",
"RelatedUrl",
"ExpirationTime",
"CreateTime",
"UpdateTime",
"RelatedNoteNames",
"Vulnerability",
"Build",
"Image",
"Package",
"Deployment",
"Discovery",
"Attestation",
"Upgrade",
"Compliance",
"DsseAttestation",
"VulnerabilityAssessment",
"SbomReference",
"Secret",
"Type",
});
internal_static_grafeas_v1_GetOccurrenceRequest_descriptor =
getDescriptor().getMessageTypes().get(2);
internal_static_grafeas_v1_GetOccurrenceRequest_fieldAccessorTable =
new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_grafeas_v1_GetOccurrenceRequest_descriptor,
new java.lang.String[] {
"Name",
});
internal_static_grafeas_v1_ListOccurrencesRequest_descriptor =
getDescriptor().getMessageTypes().get(3);
internal_static_grafeas_v1_ListOccurrencesRequest_fieldAccessorTable =
new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_grafeas_v1_ListOccurrencesRequest_descriptor,
new java.lang.String[] {
"Parent", "Filter", "PageSize", "PageToken",
});
internal_static_grafeas_v1_ListOccurrencesResponse_descriptor =
getDescriptor().getMessageTypes().get(4);
internal_static_grafeas_v1_ListOccurrencesResponse_fieldAccessorTable =
new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_grafeas_v1_ListOccurrencesResponse_descriptor,
new java.lang.String[] {
"Occurrences", "NextPageToken",
});
internal_static_grafeas_v1_DeleteOccurrenceRequest_descriptor =
getDescriptor().getMessageTypes().get(5);
internal_static_grafeas_v1_DeleteOccurrenceRequest_fieldAccessorTable =
new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_grafeas_v1_DeleteOccurrenceRequest_descriptor,
new java.lang.String[] {
"Name",
});
internal_static_grafeas_v1_CreateOccurrenceRequest_descriptor =
getDescriptor().getMessageTypes().get(6);
internal_static_grafeas_v1_CreateOccurrenceRequest_fieldAccessorTable =
new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_grafeas_v1_CreateOccurrenceRequest_descriptor,
new java.lang.String[] {
"Parent", "Occurrence",
});
internal_static_grafeas_v1_UpdateOccurrenceRequest_descriptor =
getDescriptor().getMessageTypes().get(7);
internal_static_grafeas_v1_UpdateOccurrenceRequest_fieldAccessorTable =
new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_grafeas_v1_UpdateOccurrenceRequest_descriptor,
new java.lang.String[] {
"Name", "Occurrence", "UpdateMask",
});
internal_static_grafeas_v1_GetNoteRequest_descriptor = getDescriptor().getMessageTypes().get(8);
internal_static_grafeas_v1_GetNoteRequest_fieldAccessorTable =
new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_grafeas_v1_GetNoteRequest_descriptor,
new java.lang.String[] {
"Name",
});
internal_static_grafeas_v1_GetOccurrenceNoteRequest_descriptor =
getDescriptor().getMessageTypes().get(9);
internal_static_grafeas_v1_GetOccurrenceNoteRequest_fieldAccessorTable =
new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_grafeas_v1_GetOccurrenceNoteRequest_descriptor,
new java.lang.String[] {
"Name",
});
internal_static_grafeas_v1_ListNotesRequest_descriptor =
getDescriptor().getMessageTypes().get(10);
internal_static_grafeas_v1_ListNotesRequest_fieldAccessorTable =
new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_grafeas_v1_ListNotesRequest_descriptor,
new java.lang.String[] {
"Parent", "Filter", "PageSize", "PageToken",
});
internal_static_grafeas_v1_ListNotesResponse_descriptor =
getDescriptor().getMessageTypes().get(11);
internal_static_grafeas_v1_ListNotesResponse_fieldAccessorTable =
new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_grafeas_v1_ListNotesResponse_descriptor,
new java.lang.String[] {
"Notes", "NextPageToken",
});
internal_static_grafeas_v1_DeleteNoteRequest_descriptor =
getDescriptor().getMessageTypes().get(12);
internal_static_grafeas_v1_DeleteNoteRequest_fieldAccessorTable =
new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_grafeas_v1_DeleteNoteRequest_descriptor,
new java.lang.String[] {
"Name",
});
internal_static_grafeas_v1_CreateNoteRequest_descriptor =
getDescriptor().getMessageTypes().get(13);
internal_static_grafeas_v1_CreateNoteRequest_fieldAccessorTable =
new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_grafeas_v1_CreateNoteRequest_descriptor,
new java.lang.String[] {
"Parent", "NoteId", "Note",
});
internal_static_grafeas_v1_UpdateNoteRequest_descriptor =
getDescriptor().getMessageTypes().get(14);
internal_static_grafeas_v1_UpdateNoteRequest_fieldAccessorTable =
new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_grafeas_v1_UpdateNoteRequest_descriptor,
new java.lang.String[] {
"Name", "Note", "UpdateMask",
});
internal_static_grafeas_v1_ListNoteOccurrencesRequest_descriptor =
getDescriptor().getMessageTypes().get(15);
internal_static_grafeas_v1_ListNoteOccurrencesRequest_fieldAccessorTable =
new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_grafeas_v1_ListNoteOccurrencesRequest_descriptor,
new java.lang.String[] {
"Name", "Filter", "PageSize", "PageToken",
});
internal_static_grafeas_v1_ListNoteOccurrencesResponse_descriptor =
getDescriptor().getMessageTypes().get(16);
internal_static_grafeas_v1_ListNoteOccurrencesResponse_fieldAccessorTable =
new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_grafeas_v1_ListNoteOccurrencesResponse_descriptor,
new java.lang.String[] {
"Occurrences", "NextPageToken",
});
internal_static_grafeas_v1_BatchCreateNotesRequest_descriptor =
getDescriptor().getMessageTypes().get(17);
internal_static_grafeas_v1_BatchCreateNotesRequest_fieldAccessorTable =
new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_grafeas_v1_BatchCreateNotesRequest_descriptor,
new java.lang.String[] {
"Parent", "Notes",
});
internal_static_grafeas_v1_BatchCreateNotesRequest_NotesEntry_descriptor =
internal_static_grafeas_v1_BatchCreateNotesRequest_descriptor.getNestedTypes().get(0);
internal_static_grafeas_v1_BatchCreateNotesRequest_NotesEntry_fieldAccessorTable =
new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_grafeas_v1_BatchCreateNotesRequest_NotesEntry_descriptor,
new java.lang.String[] {
"Key", "Value",
});
internal_static_grafeas_v1_BatchCreateNotesResponse_descriptor =
getDescriptor().getMessageTypes().get(18);
internal_static_grafeas_v1_BatchCreateNotesResponse_fieldAccessorTable =
new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_grafeas_v1_BatchCreateNotesResponse_descriptor,
new java.lang.String[] {
"Notes",
});
internal_static_grafeas_v1_BatchCreateOccurrencesRequest_descriptor =
getDescriptor().getMessageTypes().get(19);
internal_static_grafeas_v1_BatchCreateOccurrencesRequest_fieldAccessorTable =
new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_grafeas_v1_BatchCreateOccurrencesRequest_descriptor,
new java.lang.String[] {
"Parent", "Occurrences",
});
internal_static_grafeas_v1_BatchCreateOccurrencesResponse_descriptor =
getDescriptor().getMessageTypes().get(20);
internal_static_grafeas_v1_BatchCreateOccurrencesResponse_fieldAccessorTable =
new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_grafeas_v1_BatchCreateOccurrencesResponse_descriptor,
new java.lang.String[] {
"Occurrences",
});
com.google.protobuf.ExtensionRegistry registry =
com.google.protobuf.ExtensionRegistry.newInstance();
registry.add(com.google.api.ClientProto.defaultHost);
registry.add(com.google.api.FieldBehaviorProto.fieldBehavior);
registry.add(com.google.api.AnnotationsProto.http);
registry.add(com.google.api.ClientProto.methodSignature);
registry.add(com.google.api.ResourceProto.resource);
registry.add(com.google.api.ResourceProto.resourceDefinition);
registry.add(com.google.api.ResourceProto.resourceReference);
com.google.protobuf.Descriptors.FileDescriptor.internalUpdateFileDescriptor(
descriptor, registry);
com.google.api.AnnotationsProto.getDescriptor();
com.google.api.ClientProto.getDescriptor();
com.google.api.FieldBehaviorProto.getDescriptor();
com.google.api.ResourceProto.getDescriptor();
com.google.protobuf.EmptyProto.getDescriptor();
com.google.protobuf.FieldMaskProto.getDescriptor();
com.google.protobuf.TimestampProto.getDescriptor();
io.grafeas.v1.Attestation.getDescriptor();
io.grafeas.v1.Build.getDescriptor();
io.grafeas.v1.Common.getDescriptor();
io.grafeas.v1.Compliance.getDescriptor();
io.grafeas.v1.Deployment.getDescriptor();
io.grafeas.v1.Discovery.getDescriptor();
io.grafeas.v1.DsseAttestation.getDescriptor();
io.grafeas.v1.Image.getDescriptor();
io.grafeas.v1.Package.getDescriptor();
io.grafeas.v1.Sbom.getDescriptor();
io.grafeas.v1.Secret.getDescriptor();
io.grafeas.v1.Upgrade.getDescriptor();
io.grafeas.v1.Vex.getDescriptor();
io.grafeas.v1.Vulnerability.getDescriptor();
}
// @@protoc_insertion_point(outer_class_scope)
}
|
apache/directory-studio | 36,407 | plugins/schemaeditor/src/main/java/org/apache/directory/studio/schemaeditor/model/difference/DifferenceEngine.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*
*/
package org.apache.directory.studio.schemaeditor.model.difference;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.apache.directory.api.ldap.model.schema.AttributeType;
import org.apache.directory.api.ldap.model.schema.ObjectClass;
import org.apache.directory.api.ldap.model.schema.ObjectClassTypeEnum;
import org.apache.directory.api.ldap.model.schema.SchemaObject;
import org.apache.directory.api.ldap.model.schema.UsageEnum;
import org.apache.directory.api.util.Strings;
import org.apache.directory.studio.schemaeditor.model.Schema;
/**
* This class represents the difference engine.
* It is used to generate the difference between two Objects.
*
* @author <a href="mailto:dev@directory.apache.org">Apache Directory Project</a>
*/
public class DifferenceEngine
{
/**
* Gets the differences between two Lists of Schemas.
*
* @param l1
* the first list
* @param l2
* the second list
* @return
* the differences between the two schema Lists
*/
public static List<SchemaDifference> getDifferences( List<Schema> l1, List<Schema> l2 )
{
List<SchemaDifference> differences = new ArrayList<SchemaDifference>();
// Building Maps for schemas
Map<String, Schema> mapL1 = new HashMap<String, Schema>();
for ( Schema schema : l1 )
{
mapL1.put( Strings.toLowerCase( schema.getSchemaName() ), schema );
}
Map<String, Schema> mapL2 = new HashMap<String, Schema>();
for ( Schema schema : l2 )
{
mapL2.put( Strings.toLowerCase( schema.getSchemaName() ), schema );
}
// Looping on schemas from the first list
for ( Schema schemaFromL1 : l1 )
{
Schema schemaFromL2 = mapL2.get( Strings.toLowerCase( schemaFromL1.getSchemaName() ) );
if ( schemaFromL2 == null )
{
SchemaDifference schemaDifference = new SchemaDifference( schemaFromL1, null, DifferenceType.REMOVED );
differences.add( schemaDifference );
// Adding attribute types
for ( AttributeType at : schemaFromL1.getAttributeTypes() )
{
schemaDifference.addAttributeTypeDifference( new AttributeTypeDifference( null, at,
DifferenceType.REMOVED ) );
}
// Adding object classes
for ( ObjectClass oc : schemaFromL1.getObjectClasses() )
{
schemaDifference.addObjectClassDifference( new ObjectClassDifference( null, oc,
DifferenceType.REMOVED ) );
}
}
else
{
SchemaDifference schemaDifference = new SchemaDifference( schemaFromL1, schemaFromL2,
DifferenceType.IDENTICAL );
differences.add( schemaDifference );
// Building Maps for attribute types
Map<String, AttributeType> atMapL1 = new HashMap<String, AttributeType>();
for ( AttributeType at : schemaFromL1.getAttributeTypes() )
{
atMapL1.put( at.getOid(), at );
}
Map<String, AttributeType> atMapL2 = new HashMap<String, AttributeType>();
for ( AttributeType at : schemaFromL2.getAttributeTypes() )
{
atMapL2.put( at.getOid(), at );
}
// Looping on the attribute types from the Schema from the first list
for ( AttributeType atFromL1 : schemaFromL1.getAttributeTypes() )
{
AttributeType atFromL2 = atMapL2.get( atFromL1.getOid() );
if ( atFromL2 == null )
{
AttributeTypeDifference attributeTypeDifference = new AttributeTypeDifference( atFromL1, null,
DifferenceType.REMOVED );
schemaDifference.addAttributeTypeDifference( attributeTypeDifference );
schemaDifference.setType( DifferenceType.MODIFIED );
}
else
{
AttributeTypeDifference attributeTypeDifference = new AttributeTypeDifference( atFromL1,
atFromL2, DifferenceType.IDENTICAL );
schemaDifference.addAttributeTypeDifference( attributeTypeDifference );
List<PropertyDifference> atDifferences = getDifferences( atFromL1, atFromL2 );
if ( atDifferences.size() > 0 )
{
attributeTypeDifference.setType( DifferenceType.MODIFIED );
attributeTypeDifference.addDifferences( atDifferences );
schemaDifference.setType( DifferenceType.MODIFIED );
}
}
}
// Looping on the attribute types from the Schema from the second list
for ( AttributeType atFromL2 : schemaFromL2.getAttributeTypes() )
{
AttributeType atFromL1 = atMapL1.get( atFromL2.getOid() );
if ( atFromL1 == null )
{
AttributeTypeDifference attributeTypeDifference = new AttributeTypeDifference( null, atFromL2,
DifferenceType.ADDED );
schemaDifference.addAttributeTypeDifference( attributeTypeDifference );
schemaDifference.setType( DifferenceType.MODIFIED );
}
// If atFromL1 exists, then it has already been processed when looping on the first list.
}
// Building Maps for object classes
Map<String, ObjectClass> ocMapL1 = new HashMap<String, ObjectClass>();
for ( ObjectClass oc : schemaFromL1.getObjectClasses() )
{
ocMapL1.put( oc.getOid(), oc );
}
Map<String, ObjectClass> ocMapL2 = new HashMap<String, ObjectClass>();
for ( ObjectClass oc : schemaFromL2.getObjectClasses() )
{
ocMapL2.put( oc.getOid(), oc );
}
// Looping on the object classes from the Schema from the first list
for ( ObjectClass ocFromL1 : schemaFromL1.getObjectClasses() )
{
ObjectClass ocFromL2 = ocMapL2.get( ocFromL1.getOid() );
if ( ocFromL2 == null )
{
ObjectClassDifference objectClassDifference = new ObjectClassDifference( ocFromL1, null,
DifferenceType.REMOVED );
schemaDifference.addObjectClassDifference( objectClassDifference );
schemaDifference.setType( DifferenceType.MODIFIED );
}
else
{
ObjectClassDifference objectClassDifference = new ObjectClassDifference( ocFromL1, ocFromL2,
DifferenceType.IDENTICAL );
schemaDifference.addObjectClassDifference( objectClassDifference );
List<PropertyDifference> ocDifferences = getDifferences( ocFromL1, ocFromL2 );
if ( ocDifferences.size() > 0 )
{
objectClassDifference.setType( DifferenceType.MODIFIED );
objectClassDifference.addDifferences( ocDifferences );
schemaDifference.setType( DifferenceType.MODIFIED );
}
}
}
// Looping on the object classes from the Schema from the second list
for ( ObjectClass ocFromL2 : schemaFromL2.getObjectClasses() )
{
ObjectClass ocFromL1 = ocMapL1.get( ocFromL2.getOid() );
if ( ocFromL1 == null )
{
ObjectClassDifference objectClassDifference = new ObjectClassDifference( null, ocFromL2,
DifferenceType.ADDED );
schemaDifference.addObjectClassDifference( objectClassDifference );
schemaDifference.setType( DifferenceType.MODIFIED );
}
// If ocFromL1 exists, then it has already been processed when looping on the first list.
}
}
}
// Looping on schemas from the second list
for ( Schema schemaFromL2 : l2 )
{
Schema schemaFromL1 = mapL1.get( Strings.toLowerCase( schemaFromL2.getSchemaName() ) );
if ( schemaFromL1 == null )
{
SchemaDifference schemaDifference = new SchemaDifference( null, schemaFromL2, DifferenceType.ADDED );
differences.add( schemaDifference );
// Adding attribute types
for ( AttributeType at : schemaFromL2.getAttributeTypes() )
{
schemaDifference.addAttributeTypeDifference( new AttributeTypeDifference( null, at,
DifferenceType.ADDED ) );
}
// Adding object classes
for ( ObjectClass oc : schemaFromL2.getObjectClasses() )
{
schemaDifference.addObjectClassDifference( new ObjectClassDifference( null, oc,
DifferenceType.ADDED ) );
}
}
}
return differences;
}
/**
* Gets the differences between two ObjectClassImpl Objects.
*
* @param oc1
* the source ObjectClassImpl Object
* @param oc2
* the destination ObjectClassImpl Object
* @return
* the differences between two ObjectClassImpl Objects.
*/
public static List<PropertyDifference> getDifferences( ObjectClass oc1, ObjectClass oc2 )
{
List<PropertyDifference> differences = new ArrayList<PropertyDifference>();
// Aliases
differences.addAll( getAliasesDifferences( oc1, oc2 ) );
// Description
PropertyDifference descriptionDifference = getDescriptionDifference( oc1, oc2 );
if ( descriptionDifference != null )
{
differences.add( descriptionDifference );
}
// Obsolete
PropertyDifference obsoleteDifference = getObsoleteDifference( oc1, oc2 );
if ( obsoleteDifference != null )
{
differences.add( obsoleteDifference );
}
// Class type
PropertyDifference classTypeDifference = getClassTypeDifference( oc1, oc2 );
if ( classTypeDifference != null )
{
differences.add( classTypeDifference );
}
// Superior classes
differences.addAll( getSuperiorClassesDifferences( oc1, oc2 ) );
// Mandatory attribute types
differences.addAll( getMandatoryAttributeTypesDifferences( oc1, oc2 ) );
// Optional attribute types
differences.addAll( getOptionalAttributeTypesDifferences( oc1, oc2 ) );
return differences;
}
/**
* Gets the differences between two AttributeType Objects.
*
* @param at1
* the source AttributeType Object
* @param at2
* the destination AttributeType Object
* @return
* the differences between two AttributeType Objects.
*/
public static List<PropertyDifference> getDifferences( AttributeType at1, AttributeType at2 )
{
List<PropertyDifference> differences = new ArrayList<PropertyDifference>();
// Aliases
differences.addAll( getAliasesDifferences( at1, at2 ) );
// Description
PropertyDifference descriptionDifference = getDescriptionDifference( at1, at2 );
if ( descriptionDifference != null )
{
differences.add( descriptionDifference );
}
// Obsolete
PropertyDifference obsoleteDifference = getObsoleteDifference( at1, at2 );
if ( obsoleteDifference != null )
{
differences.add( obsoleteDifference );
}
// Usage
PropertyDifference usageDifference = getUsageDifference( at1, at2 );
if ( usageDifference != null )
{
differences.add( usageDifference );
}
// Superior
PropertyDifference superiorDifference = getSuperiorDifference( at1, at2 );
if ( superiorDifference != null )
{
differences.add( superiorDifference );
}
// Syntax
PropertyDifference syntaxDifference = getSyntaxDifference( at1, at2 );
if ( syntaxDifference != null )
{
differences.add( syntaxDifference );
}
// Syntax length
PropertyDifference syntaxLengthDifference = getSyntaxLengthDifference( at1, at2 );
if ( syntaxLengthDifference != null )
{
differences.add( syntaxLengthDifference );
}
// Single value
PropertyDifference singleValueDifference = getSingleValueDifference( at1, at2 );
if ( singleValueDifference != null )
{
differences.add( singleValueDifference );
}
// Collective
PropertyDifference collectiveDifference = getCollectiveDifference( at1, at2 );
if ( collectiveDifference != null )
{
differences.add( collectiveDifference );
}
// No user modification
PropertyDifference noUserModificationDifference = getNoUserModificationDifference( at1, at2 );
if ( noUserModificationDifference != null )
{
differences.add( noUserModificationDifference );
}
// Equality
PropertyDifference equalityDifference = getEqualityDifference( at1, at2 );
if ( equalityDifference != null )
{
differences.add( equalityDifference );
}
// Ordering
PropertyDifference orderingDifference = getOrderingDifference( at1, at2 );
if ( orderingDifference != null )
{
differences.add( orderingDifference );
}
// Substring
PropertyDifference substringDifference = getSubstringDifference( at1, at2 );
if ( substringDifference != null )
{
differences.add( substringDifference );
}
return differences;
}
/**
* Gets the 'Aliases' differences between the two SchemaObject Objects.
*
* @param so1
* the source SchemaObject Object
* @param so2
* the destination SchemaObject Object
* @return
* the 'Aliases' differences between the two SchemaObject Objects
*/
private static List<PropertyDifference> getAliasesDifferences( SchemaObject so1, SchemaObject so2 )
{
List<PropertyDifference> differences = new ArrayList<PropertyDifference>();
List<String> so1Names = so1.getNames();
List<String> so2Names = so2.getNames();
for ( String name : so1Names )
{
if ( !so2Names.contains( name ) )
{
PropertyDifference diff = new AliasDifference( so1, so2, DifferenceType.REMOVED );
diff.setOldValue( name );
differences.add( diff );
}
}
for ( String name : so2Names )
{
if ( !so1Names.contains( name ) )
{
PropertyDifference diff = new AliasDifference( so1, so2, DifferenceType.ADDED );
diff.setNewValue( name );
differences.add( diff );
}
}
return differences;
}
/**
* Gets the 'Description' difference between the two SchemaObject Objects.
*
* @param so1
* the source SchemaObject Object
* @param so2
* the destination SchemaObject Object
* @return
* the 'Description' difference between the two SchemaObject Objects
*/
private static PropertyDifference getDescriptionDifference( SchemaObject so1, SchemaObject so2 )
{
String so1Description = so1.getDescription();
String so2Description = so2.getDescription();
if ( ( so1Description == null ) && ( so2Description != null ) )
{
PropertyDifference diff = new DescriptionDifference( so1, so2, DifferenceType.ADDED );
diff.setNewValue( so2Description );
return diff;
}
else if ( ( so1Description != null ) && ( so2Description == null ) )
{
PropertyDifference diff = new DescriptionDifference( so1, so2, DifferenceType.REMOVED );
diff.setOldValue( so1Description );
return diff;
}
else if ( ( so1Description != null ) && ( so2Description != null ) )
{
if ( !so1Description.equals( so2Description ) )
{
PropertyDifference diff = new DescriptionDifference( so1, so2, DifferenceType.MODIFIED );
diff.setOldValue( so1Description );
diff.setNewValue( so2Description );
return diff;
}
}
return null;
}
/**
* Gets the 'Obsolete' difference between the two SchemaObject Objects.
*
* @param so1
* the source SchemaObject Object
* @param so2
* the destination SchemaObject Object
* @return
* the 'Obsolete' difference between the two SchemaObject Objects
*/
private static PropertyDifference getObsoleteDifference( SchemaObject so1, SchemaObject so2 )
{
boolean so1Obsolete = so1.isObsolete();
boolean so2Obsolete = so2.isObsolete();
if ( so1Obsolete != so2Obsolete )
{
PropertyDifference diff = new ObsoleteDifference( so1, so2 );
diff.setOldValue( so1Obsolete );
diff.setNewValue( so2Obsolete );
return diff;
}
return null;
}
/**
* Gets the 'Class type' difference between the two ObjectClassImpl Objects.
*
* @param oc1
* the source ObjectClassImpl Object
* @param oc2
* the destination ObjectClassImpl Object
* @return
* the 'Class type' difference between the two ObjectClassImpl Objects
*/
private static PropertyDifference getClassTypeDifference( ObjectClass oc1, ObjectClass oc2 )
{
ObjectClassTypeEnum oc1ClassType = oc1.getType();
ObjectClassTypeEnum oc2ClassType = oc2.getType();
if ( oc1ClassType != oc2ClassType )
{
PropertyDifference diff = new ClassTypeDifference( oc1, oc2 );
diff.setOldValue( oc1ClassType );
diff.setNewValue( oc2ClassType );
return diff;
}
return null;
}
/**
* Gets the 'Superior Classes' differences between the two ObjectClassImpl Objects.
*
* @param oc1
* the source ObjectClassImpl Object
* @param oc2
* the destination ObjectClassImpl Object
* @return
* the 'Superior Classes' differences between the two ObjectClassImpl Objects
*/
private static List<PropertyDifference> getSuperiorClassesDifferences( ObjectClass oc1, ObjectClass oc2 )
{
List<PropertyDifference> differences = new ArrayList<PropertyDifference>();
List<String> oc1Sups = oc1.getSuperiorOids();
List<String> oc2Sups = oc2.getSuperiorOids();
for ( String name : oc1Sups )
{
if ( !oc2Sups.contains( name ) )
{
PropertyDifference diff = new SuperiorOCDifference( oc1, oc2, DifferenceType.REMOVED );
diff.setOldValue( name );
differences.add( diff );
}
}
for ( String name : oc2Sups )
{
if ( !oc1Sups.contains( name ) )
{
PropertyDifference diff = new SuperiorOCDifference( oc1, oc2, DifferenceType.ADDED );
diff.setNewValue( name );
differences.add( diff );
}
}
return differences;
}
/**
* Gets the 'Mandatory attribute types' differences between the two ObjectClassImpl Objects.
*
* @param oc1
* the source ObjectClassImpl Object
* @param oc2
* the destination ObjectClassImpl Object
* @return
* the 'Mandatory attribute types' differences between the two ObjectClassImpl Objects
*/
private static List<PropertyDifference> getMandatoryAttributeTypesDifferences( ObjectClass oc1,
ObjectClass oc2 )
{
List<PropertyDifference> differences = new ArrayList<PropertyDifference>();
List<String> oc1Musts = oc1.getMustAttributeTypeOids();
List<String> oc2Musts = oc2.getMustAttributeTypeOids();
for ( String name : oc1Musts )
{
if ( !oc2Musts.contains( name ) )
{
PropertyDifference diff = new MandatoryATDifference( oc1, oc2, DifferenceType.REMOVED );
diff.setOldValue( name );
differences.add( diff );
}
}
for ( String name : oc2Musts )
{
if ( !oc1Musts.contains( name ) )
{
PropertyDifference diff = new MandatoryATDifference( oc1, oc2, DifferenceType.ADDED );
diff.setNewValue( name );
differences.add( diff );
}
}
return differences;
}
/**
* Gets the 'Optional attribute types' differences between the two ObjectClassImpl Objects.
*
* @param oc1
* the source ObjectClassImpl Object
* @param oc2
* the destination ObjectClassImpl Object
* @return
* the 'Optional attribute types' differences between the two ObjectClassImpl Objects
*/
private static List<PropertyDifference> getOptionalAttributeTypesDifferences( ObjectClass oc1,
ObjectClass oc2 )
{
List<PropertyDifference> differences = new ArrayList<PropertyDifference>();
List<String> oc1Mays = oc1.getMayAttributeTypeOids();
List<String> oc2Mays = oc2.getMayAttributeTypeOids();
for ( String name : oc1Mays )
{
if ( !oc2Mays.contains( name ) )
{
PropertyDifference diff = new OptionalATDifference( oc1, oc2, DifferenceType.REMOVED );
diff.setOldValue( name );
differences.add( diff );
}
}
for ( String name : oc2Mays )
{
if ( !oc1Mays.contains( name ) )
{
PropertyDifference diff = new OptionalATDifference( oc1, oc2, DifferenceType.ADDED );
diff.setNewValue( name );
differences.add( diff );
}
}
return differences;
}
/**
* Gets the 'Usage' difference between the two AttributeType Objects.
*
* @param at1
* the source AttributeType Object
* @param at2
* the destination AttributeType Object
* @return
* the 'Usage' difference between the two AttributeType Objects
*/
private static PropertyDifference getUsageDifference( AttributeType at1, AttributeType at2 )
{
UsageEnum at1Usage = at1.getUsage();
UsageEnum at2Usage = at2.getUsage();
if ( at1Usage != at2Usage )
{
PropertyDifference diff = new UsageDifference( at1, at2 );
diff.setOldValue( at1Usage );
diff.setNewValue( at2Usage );
return diff;
}
return null;
}
/**
* Gets the 'Superior' difference between the two AttributeType Objects.
*
* @param at1
* the source AttributeType Object
* @param at2
* the destination AttributeType Object
* @return
* the 'Superior' difference between the two AttributeType Objects
*/
private static PropertyDifference getSuperiorDifference( AttributeType at1, AttributeType at2 )
{
String at1Superior = at1.getSuperiorOid();
String at2Superior = at2.getSuperiorOid();
if ( ( at1Superior == null ) && ( at2Superior != null ) )
{
PropertyDifference diff = new SuperiorATDifference( at1, at2, DifferenceType.ADDED );
diff.setNewValue( at2Superior );
return diff;
}
else if ( ( at1Superior != null ) && ( at2Superior == null ) )
{
PropertyDifference diff = new SuperiorATDifference( at1, at2, DifferenceType.REMOVED );
diff.setOldValue( at1Superior );
return diff;
}
else if ( ( at1Superior != null ) && ( at2Superior != null ) )
{
if ( !at1Superior.equals( at2Superior ) )
{
PropertyDifference diff = new SuperiorATDifference( at1, at2, DifferenceType.MODIFIED );
diff.setOldValue( at1Superior );
diff.setNewValue( at2Superior );
return diff;
}
}
return null;
}
/**
* Gets the 'Syntax' difference between the two AttributeType Objects.
*
* @param at1
* the source AttributeType Object
* @param at2
* the destination AttributeType Object
* @return
* the 'Syntax' difference between the two AttributeType Objects
*/
private static PropertyDifference getSyntaxDifference( AttributeType at1, AttributeType at2 )
{
String at1Syntax = at1.getSyntaxOid();
String at2Syntax = at2.getSyntaxOid();
if ( ( at1Syntax == null ) && ( at2Syntax != null ) )
{
PropertyDifference diff = new SyntaxDifference( at1, at2, DifferenceType.ADDED );
diff.setNewValue( at2Syntax );
return diff;
}
else if ( ( at1Syntax != null ) && ( at2Syntax == null ) )
{
PropertyDifference diff = new SyntaxDifference( at1, at2, DifferenceType.REMOVED );
diff.setOldValue( at1Syntax );
return diff;
}
else if ( ( at1Syntax != null ) && ( at2Syntax != null ) )
{
if ( !at1Syntax.equals( at2Syntax ) )
{
PropertyDifference diff = new SyntaxDifference( at1, at2, DifferenceType.MODIFIED );
diff.setOldValue( at1Syntax );
diff.setNewValue( at2Syntax );
return diff;
}
}
return null;
}
/**
* Gets the 'Syntax length' difference between the two AttributeType Objects.
*
* @param at1
* the source AttributeType Object
* @param at2
* the destination AttributeType Object
* @return
* the 'Syntax length' difference between the two AttributeType Objects
*/
private static PropertyDifference getSyntaxLengthDifference( AttributeType at1, AttributeType at2 )
{
long at1SyntaxLength = at1.getSyntaxLength();
long at2SyntaxLength = at2.getSyntaxLength();
if ( ( at1SyntaxLength == 0 ) && ( at2SyntaxLength != 0 ) )
{
PropertyDifference diff = new SyntaxLengthDifference( at1, at2, DifferenceType.ADDED );
diff.setNewValue( at2SyntaxLength );
return diff;
}
else if ( ( at1SyntaxLength != 0 ) && ( at2SyntaxLength == 0 ) )
{
PropertyDifference diff = new SyntaxLengthDifference( at1, at2, DifferenceType.REMOVED );
diff.setOldValue( at1SyntaxLength );
return diff;
}
else if ( ( at1SyntaxLength != 0 ) && ( at2SyntaxLength != 0 ) )
{
if ( at1SyntaxLength != at2SyntaxLength )
{
PropertyDifference diff = new SyntaxLengthDifference( at1, at2, DifferenceType.MODIFIED );
diff.setOldValue( at1SyntaxLength );
diff.setNewValue( at2SyntaxLength );
return diff;
}
}
return null;
}
/**
* Gets the 'Single value' difference between the two AttributeType Objects.
*
* @param at1
* the source AttributeType Object
* @param at2
* the destination AttributeType Object
* @return
* the 'Single value' difference between the two AttributeType Objects
*/
private static PropertyDifference getSingleValueDifference( AttributeType at1, AttributeType at2 )
{
boolean at1SingleValued = at1.isSingleValued();
boolean at2SingleValued = at2.isSingleValued();
if ( at1SingleValued != at2SingleValued )
{
PropertyDifference diff = new SingleValueDifference( at1, at2 );
diff.setOldValue( at1SingleValued );
diff.setNewValue( at2SingleValued );
return diff;
}
return null;
}
/**
* Gets the 'Collective' difference between the two AttributeType Objects.
*
* @param at1
* the source AttributeType Object
* @param at2
* the destination AttributeType Object
* @return
* the 'Collective' difference between the two AttributeType Objects
*/
private static PropertyDifference getCollectiveDifference( AttributeType at1, AttributeType at2 )
{
boolean at1Collective = at1.isCollective();
boolean at2Collective = at2.isCollective();
if ( at1Collective != at2Collective )
{
PropertyDifference diff = new CollectiveDifference( at1, at2 );
diff.setOldValue( at1Collective );
diff.setNewValue( at2Collective );
return diff;
}
return null;
}
/**
* Gets the 'No user modification' difference between the two AttributeType Objects.
*
* @param at1
* the source AttributeType Object
* @param at2
* the destination AttributeType Object
* @return
* the 'No user modification' difference between the two AttributeType Objects
*/
private static PropertyDifference getNoUserModificationDifference( AttributeType at1, AttributeType at2 )
{
boolean at1IsUserModifiable = at1.isUserModifiable();
boolean at2IsUserModifiable = at2.isUserModifiable();
if ( at1IsUserModifiable != at2IsUserModifiable )
{
PropertyDifference diff = new NoUserModificationDifference( at1, at2 );
diff.setOldValue( at1IsUserModifiable );
diff.setNewValue( at2IsUserModifiable );
return diff;
}
return null;
}
/**
* Gets the 'Equality' difference between the two AttributeType Objects.
*
* @param at1
* the source AttributeType Object
* @param at2
* the destination AttributeType Object
* @return
* the 'Equality' difference between the two AttributeType Objects
*/
private static PropertyDifference getEqualityDifference( AttributeType at1, AttributeType at2 )
{
String at1Equality = at1.getEqualityOid();
String at2Equality = at2.getEqualityOid();
if ( ( at1Equality == null ) && ( at2Equality != null ) )
{
PropertyDifference diff = new EqualityDifference( at1, at2, DifferenceType.ADDED );
diff.setNewValue( at2Equality );
return diff;
}
else if ( ( at1Equality != null ) && ( at2Equality == null ) )
{
PropertyDifference diff = new EqualityDifference( at1, at2, DifferenceType.REMOVED );
diff.setOldValue( at1Equality );
return diff;
}
else if ( ( at1Equality != null ) && ( at2Equality != null ) )
{
if ( !at1Equality.equals( at2Equality ) )
{
PropertyDifference diff = new EqualityDifference( at1, at2, DifferenceType.MODIFIED );
diff.setOldValue( at1Equality );
diff.setNewValue( at2Equality );
return diff;
}
}
return null;
}
/**
* Gets the 'Ordering' difference between the two AttributeType Objects.
*
* @param at1
* the source AttributeType Object
* @param at2
* the destination AttributeType Object
* @return
* the 'Ordering' difference between the two AttributeType Objects
*/
private static PropertyDifference getOrderingDifference( AttributeType at1, AttributeType at2 )
{
String at1Ordering = at1.getOrderingOid();
String at2Ordering = at2.getOrderingOid();
if ( ( at1Ordering == null ) && ( at2Ordering != null ) )
{
PropertyDifference diff = new OrderingDifference( at1, at2, DifferenceType.ADDED );
diff.setNewValue( at2Ordering );
return diff;
}
else if ( ( at1Ordering != null ) && ( at2Ordering == null ) )
{
PropertyDifference diff = new OrderingDifference( at1, at2, DifferenceType.REMOVED );
diff.setOldValue( at1Ordering );
return diff;
}
else if ( ( at1Ordering != null ) && ( at2Ordering != null ) )
{
if ( !at1Ordering.equals( at2Ordering ) )
{
PropertyDifference diff = new OrderingDifference( at1, at2, DifferenceType.MODIFIED );
diff.setOldValue( at1Ordering );
diff.setNewValue( at2Ordering );
return diff;
}
}
return null;
}
/**
* Gets the 'Substring' difference between the two AttributeType Objects.
*
* @param at1
* the source AttributeType Object
* @param at2
* the destination AttributeType Object
* @return
* the 'Substring' difference between the two AttributeType Objects
*/
private static PropertyDifference getSubstringDifference( AttributeType at1, AttributeType at2 )
{
String at1Substring = at1.getSubstringOid();
String at2Substring = at2.getSubstringOid();
if ( ( at1Substring == null ) && ( at2Substring != null ) )
{
PropertyDifference diff = new SubstringDifference( at1, at2, DifferenceType.ADDED );
diff.setNewValue( at2Substring );
return diff;
}
else if ( ( at1Substring != null ) && ( at2Substring == null ) )
{
PropertyDifference diff = new SubstringDifference( at1, at2, DifferenceType.REMOVED );
diff.setOldValue( at1Substring );
return diff;
}
else if ( ( at1Substring != null ) && ( at2Substring != null ) )
{
if ( !at1Substring.equals( at2Substring ) )
{
PropertyDifference diff = new SubstringDifference( at1, at2, DifferenceType.MODIFIED );
diff.setOldValue( at1Substring );
diff.setNewValue( at2Substring );
return diff;
}
}
return null;
}
}
|
googleapis/google-cloud-java | 36,330 | java-monitoring/proto-google-cloud-monitoring-v3/src/main/java/com/google/monitoring/v3/CreateAlertPolicyRequest.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/monitoring/v3/alert_service.proto
// Protobuf Java Version: 3.25.8
package com.google.monitoring.v3;
/**
*
*
* <pre>
* The protocol for the `CreateAlertPolicy` request.
* </pre>
*
* Protobuf type {@code google.monitoring.v3.CreateAlertPolicyRequest}
*/
public final class CreateAlertPolicyRequest extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.monitoring.v3.CreateAlertPolicyRequest)
CreateAlertPolicyRequestOrBuilder {
private static final long serialVersionUID = 0L;
// Use CreateAlertPolicyRequest.newBuilder() to construct.
private CreateAlertPolicyRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private CreateAlertPolicyRequest() {
name_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new CreateAlertPolicyRequest();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.monitoring.v3.AlertServiceProto
.internal_static_google_monitoring_v3_CreateAlertPolicyRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.monitoring.v3.AlertServiceProto
.internal_static_google_monitoring_v3_CreateAlertPolicyRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.monitoring.v3.CreateAlertPolicyRequest.class,
com.google.monitoring.v3.CreateAlertPolicyRequest.Builder.class);
}
private int bitField0_;
public static final int NAME_FIELD_NUMBER = 3;
@SuppressWarnings("serial")
private volatile java.lang.Object name_ = "";
/**
*
*
* <pre>
* Required. The
* [project](https://cloud.google.com/monitoring/api/v3#project_name) in which
* to create the alerting policy. The format is:
*
* projects/[PROJECT_ID_OR_NUMBER]
*
* Note that this field names the parent container in which the alerting
* policy will be written, not the name of the created policy. |name| must be
* a host project of a Metrics Scope, otherwise INVALID_ARGUMENT error will
* return. The alerting policy that is returned will have a name that contains
* a normalized representation of this name as a prefix but adds a suffix of
* the form `/alertPolicies/[ALERT_POLICY_ID]`, identifying the policy in the
* container.
* </pre>
*
* <code>
* string name = 3 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The name.
*/
@java.lang.Override
public java.lang.String getName() {
java.lang.Object ref = name_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
name_ = s;
return s;
}
}
/**
*
*
* <pre>
* Required. The
* [project](https://cloud.google.com/monitoring/api/v3#project_name) in which
* to create the alerting policy. The format is:
*
* projects/[PROJECT_ID_OR_NUMBER]
*
* Note that this field names the parent container in which the alerting
* policy will be written, not the name of the created policy. |name| must be
* a host project of a Metrics Scope, otherwise INVALID_ARGUMENT error will
* return. The alerting policy that is returned will have a name that contains
* a normalized representation of this name as a prefix but adds a suffix of
* the form `/alertPolicies/[ALERT_POLICY_ID]`, identifying the policy in the
* container.
* </pre>
*
* <code>
* string name = 3 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for name.
*/
@java.lang.Override
public com.google.protobuf.ByteString getNameBytes() {
java.lang.Object ref = name_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
name_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int ALERT_POLICY_FIELD_NUMBER = 2;
private com.google.monitoring.v3.AlertPolicy alertPolicy_;
/**
*
*
* <pre>
* Required. The requested alerting policy. You should omit the `name` field
* in this policy. The name will be returned in the new policy, including a
* new `[ALERT_POLICY_ID]` value.
* </pre>
*
* <code>
* .google.monitoring.v3.AlertPolicy alert_policy = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the alertPolicy field is set.
*/
@java.lang.Override
public boolean hasAlertPolicy() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* Required. The requested alerting policy. You should omit the `name` field
* in this policy. The name will be returned in the new policy, including a
* new `[ALERT_POLICY_ID]` value.
* </pre>
*
* <code>
* .google.monitoring.v3.AlertPolicy alert_policy = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The alertPolicy.
*/
@java.lang.Override
public com.google.monitoring.v3.AlertPolicy getAlertPolicy() {
return alertPolicy_ == null
? com.google.monitoring.v3.AlertPolicy.getDefaultInstance()
: alertPolicy_;
}
/**
*
*
* <pre>
* Required. The requested alerting policy. You should omit the `name` field
* in this policy. The name will be returned in the new policy, including a
* new `[ALERT_POLICY_ID]` value.
* </pre>
*
* <code>
* .google.monitoring.v3.AlertPolicy alert_policy = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
@java.lang.Override
public com.google.monitoring.v3.AlertPolicyOrBuilder getAlertPolicyOrBuilder() {
return alertPolicy_ == null
? com.google.monitoring.v3.AlertPolicy.getDefaultInstance()
: alertPolicy_;
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (((bitField0_ & 0x00000001) != 0)) {
output.writeMessage(2, getAlertPolicy());
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(name_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 3, name_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) != 0)) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, getAlertPolicy());
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(name_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(3, name_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.monitoring.v3.CreateAlertPolicyRequest)) {
return super.equals(obj);
}
com.google.monitoring.v3.CreateAlertPolicyRequest other =
(com.google.monitoring.v3.CreateAlertPolicyRequest) obj;
if (!getName().equals(other.getName())) return false;
if (hasAlertPolicy() != other.hasAlertPolicy()) return false;
if (hasAlertPolicy()) {
if (!getAlertPolicy().equals(other.getAlertPolicy())) return false;
}
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + NAME_FIELD_NUMBER;
hash = (53 * hash) + getName().hashCode();
if (hasAlertPolicy()) {
hash = (37 * hash) + ALERT_POLICY_FIELD_NUMBER;
hash = (53 * hash) + getAlertPolicy().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.monitoring.v3.CreateAlertPolicyRequest parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.monitoring.v3.CreateAlertPolicyRequest parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.monitoring.v3.CreateAlertPolicyRequest parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.monitoring.v3.CreateAlertPolicyRequest parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.monitoring.v3.CreateAlertPolicyRequest parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.monitoring.v3.CreateAlertPolicyRequest parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.monitoring.v3.CreateAlertPolicyRequest parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.monitoring.v3.CreateAlertPolicyRequest parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.monitoring.v3.CreateAlertPolicyRequest parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.monitoring.v3.CreateAlertPolicyRequest parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.monitoring.v3.CreateAlertPolicyRequest parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.monitoring.v3.CreateAlertPolicyRequest parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.monitoring.v3.CreateAlertPolicyRequest prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* The protocol for the `CreateAlertPolicy` request.
* </pre>
*
* Protobuf type {@code google.monitoring.v3.CreateAlertPolicyRequest}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.monitoring.v3.CreateAlertPolicyRequest)
com.google.monitoring.v3.CreateAlertPolicyRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.monitoring.v3.AlertServiceProto
.internal_static_google_monitoring_v3_CreateAlertPolicyRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.monitoring.v3.AlertServiceProto
.internal_static_google_monitoring_v3_CreateAlertPolicyRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.monitoring.v3.CreateAlertPolicyRequest.class,
com.google.monitoring.v3.CreateAlertPolicyRequest.Builder.class);
}
// Construct using com.google.monitoring.v3.CreateAlertPolicyRequest.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {
getAlertPolicyFieldBuilder();
}
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
name_ = "";
alertPolicy_ = null;
if (alertPolicyBuilder_ != null) {
alertPolicyBuilder_.dispose();
alertPolicyBuilder_ = null;
}
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.monitoring.v3.AlertServiceProto
.internal_static_google_monitoring_v3_CreateAlertPolicyRequest_descriptor;
}
@java.lang.Override
public com.google.monitoring.v3.CreateAlertPolicyRequest getDefaultInstanceForType() {
return com.google.monitoring.v3.CreateAlertPolicyRequest.getDefaultInstance();
}
@java.lang.Override
public com.google.monitoring.v3.CreateAlertPolicyRequest build() {
com.google.monitoring.v3.CreateAlertPolicyRequest result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.monitoring.v3.CreateAlertPolicyRequest buildPartial() {
com.google.monitoring.v3.CreateAlertPolicyRequest result =
new com.google.monitoring.v3.CreateAlertPolicyRequest(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(com.google.monitoring.v3.CreateAlertPolicyRequest result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.name_ = name_;
}
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000002) != 0)) {
result.alertPolicy_ =
alertPolicyBuilder_ == null ? alertPolicy_ : alertPolicyBuilder_.build();
to_bitField0_ |= 0x00000001;
}
result.bitField0_ |= to_bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.monitoring.v3.CreateAlertPolicyRequest) {
return mergeFrom((com.google.monitoring.v3.CreateAlertPolicyRequest) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.monitoring.v3.CreateAlertPolicyRequest other) {
if (other == com.google.monitoring.v3.CreateAlertPolicyRequest.getDefaultInstance())
return this;
if (!other.getName().isEmpty()) {
name_ = other.name_;
bitField0_ |= 0x00000001;
onChanged();
}
if (other.hasAlertPolicy()) {
mergeAlertPolicy(other.getAlertPolicy());
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 18:
{
input.readMessage(getAlertPolicyFieldBuilder().getBuilder(), extensionRegistry);
bitField0_ |= 0x00000002;
break;
} // case 18
case 26:
{
name_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000001;
break;
} // case 26
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.lang.Object name_ = "";
/**
*
*
* <pre>
* Required. The
* [project](https://cloud.google.com/monitoring/api/v3#project_name) in which
* to create the alerting policy. The format is:
*
* projects/[PROJECT_ID_OR_NUMBER]
*
* Note that this field names the parent container in which the alerting
* policy will be written, not the name of the created policy. |name| must be
* a host project of a Metrics Scope, otherwise INVALID_ARGUMENT error will
* return. The alerting policy that is returned will have a name that contains
* a normalized representation of this name as a prefix but adds a suffix of
* the form `/alertPolicies/[ALERT_POLICY_ID]`, identifying the policy in the
* container.
* </pre>
*
* <code>
* string name = 3 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The name.
*/
public java.lang.String getName() {
java.lang.Object ref = name_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
name_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Required. The
* [project](https://cloud.google.com/monitoring/api/v3#project_name) in which
* to create the alerting policy. The format is:
*
* projects/[PROJECT_ID_OR_NUMBER]
*
* Note that this field names the parent container in which the alerting
* policy will be written, not the name of the created policy. |name| must be
* a host project of a Metrics Scope, otherwise INVALID_ARGUMENT error will
* return. The alerting policy that is returned will have a name that contains
* a normalized representation of this name as a prefix but adds a suffix of
* the form `/alertPolicies/[ALERT_POLICY_ID]`, identifying the policy in the
* container.
* </pre>
*
* <code>
* string name = 3 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for name.
*/
public com.google.protobuf.ByteString getNameBytes() {
java.lang.Object ref = name_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
name_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Required. The
* [project](https://cloud.google.com/monitoring/api/v3#project_name) in which
* to create the alerting policy. The format is:
*
* projects/[PROJECT_ID_OR_NUMBER]
*
* Note that this field names the parent container in which the alerting
* policy will be written, not the name of the created policy. |name| must be
* a host project of a Metrics Scope, otherwise INVALID_ARGUMENT error will
* return. The alerting policy that is returned will have a name that contains
* a normalized representation of this name as a prefix but adds a suffix of
* the form `/alertPolicies/[ALERT_POLICY_ID]`, identifying the policy in the
* container.
* </pre>
*
* <code>
* string name = 3 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The name to set.
* @return This builder for chaining.
*/
public Builder setName(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
name_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The
* [project](https://cloud.google.com/monitoring/api/v3#project_name) in which
* to create the alerting policy. The format is:
*
* projects/[PROJECT_ID_OR_NUMBER]
*
* Note that this field names the parent container in which the alerting
* policy will be written, not the name of the created policy. |name| must be
* a host project of a Metrics Scope, otherwise INVALID_ARGUMENT error will
* return. The alerting policy that is returned will have a name that contains
* a normalized representation of this name as a prefix but adds a suffix of
* the form `/alertPolicies/[ALERT_POLICY_ID]`, identifying the policy in the
* container.
* </pre>
*
* <code>
* string name = 3 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return This builder for chaining.
*/
public Builder clearName() {
name_ = getDefaultInstance().getName();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The
* [project](https://cloud.google.com/monitoring/api/v3#project_name) in which
* to create the alerting policy. The format is:
*
* projects/[PROJECT_ID_OR_NUMBER]
*
* Note that this field names the parent container in which the alerting
* policy will be written, not the name of the created policy. |name| must be
* a host project of a Metrics Scope, otherwise INVALID_ARGUMENT error will
* return. The alerting policy that is returned will have a name that contains
* a normalized representation of this name as a prefix but adds a suffix of
* the form `/alertPolicies/[ALERT_POLICY_ID]`, identifying the policy in the
* container.
* </pre>
*
* <code>
* string name = 3 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The bytes for name to set.
* @return This builder for chaining.
*/
public Builder setNameBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
name_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
private com.google.monitoring.v3.AlertPolicy alertPolicy_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.monitoring.v3.AlertPolicy,
com.google.monitoring.v3.AlertPolicy.Builder,
com.google.monitoring.v3.AlertPolicyOrBuilder>
alertPolicyBuilder_;
/**
*
*
* <pre>
* Required. The requested alerting policy. You should omit the `name` field
* in this policy. The name will be returned in the new policy, including a
* new `[ALERT_POLICY_ID]` value.
* </pre>
*
* <code>
* .google.monitoring.v3.AlertPolicy alert_policy = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the alertPolicy field is set.
*/
public boolean hasAlertPolicy() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
*
*
* <pre>
* Required. The requested alerting policy. You should omit the `name` field
* in this policy. The name will be returned in the new policy, including a
* new `[ALERT_POLICY_ID]` value.
* </pre>
*
* <code>
* .google.monitoring.v3.AlertPolicy alert_policy = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The alertPolicy.
*/
public com.google.monitoring.v3.AlertPolicy getAlertPolicy() {
if (alertPolicyBuilder_ == null) {
return alertPolicy_ == null
? com.google.monitoring.v3.AlertPolicy.getDefaultInstance()
: alertPolicy_;
} else {
return alertPolicyBuilder_.getMessage();
}
}
/**
*
*
* <pre>
* Required. The requested alerting policy. You should omit the `name` field
* in this policy. The name will be returned in the new policy, including a
* new `[ALERT_POLICY_ID]` value.
* </pre>
*
* <code>
* .google.monitoring.v3.AlertPolicy alert_policy = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setAlertPolicy(com.google.monitoring.v3.AlertPolicy value) {
if (alertPolicyBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
alertPolicy_ = value;
} else {
alertPolicyBuilder_.setMessage(value);
}
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The requested alerting policy. You should omit the `name` field
* in this policy. The name will be returned in the new policy, including a
* new `[ALERT_POLICY_ID]` value.
* </pre>
*
* <code>
* .google.monitoring.v3.AlertPolicy alert_policy = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setAlertPolicy(com.google.monitoring.v3.AlertPolicy.Builder builderForValue) {
if (alertPolicyBuilder_ == null) {
alertPolicy_ = builderForValue.build();
} else {
alertPolicyBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The requested alerting policy. You should omit the `name` field
* in this policy. The name will be returned in the new policy, including a
* new `[ALERT_POLICY_ID]` value.
* </pre>
*
* <code>
* .google.monitoring.v3.AlertPolicy alert_policy = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder mergeAlertPolicy(com.google.monitoring.v3.AlertPolicy value) {
if (alertPolicyBuilder_ == null) {
if (((bitField0_ & 0x00000002) != 0)
&& alertPolicy_ != null
&& alertPolicy_ != com.google.monitoring.v3.AlertPolicy.getDefaultInstance()) {
getAlertPolicyBuilder().mergeFrom(value);
} else {
alertPolicy_ = value;
}
} else {
alertPolicyBuilder_.mergeFrom(value);
}
if (alertPolicy_ != null) {
bitField0_ |= 0x00000002;
onChanged();
}
return this;
}
/**
*
*
* <pre>
* Required. The requested alerting policy. You should omit the `name` field
* in this policy. The name will be returned in the new policy, including a
* new `[ALERT_POLICY_ID]` value.
* </pre>
*
* <code>
* .google.monitoring.v3.AlertPolicy alert_policy = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder clearAlertPolicy() {
bitField0_ = (bitField0_ & ~0x00000002);
alertPolicy_ = null;
if (alertPolicyBuilder_ != null) {
alertPolicyBuilder_.dispose();
alertPolicyBuilder_ = null;
}
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The requested alerting policy. You should omit the `name` field
* in this policy. The name will be returned in the new policy, including a
* new `[ALERT_POLICY_ID]` value.
* </pre>
*
* <code>
* .google.monitoring.v3.AlertPolicy alert_policy = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.monitoring.v3.AlertPolicy.Builder getAlertPolicyBuilder() {
bitField0_ |= 0x00000002;
onChanged();
return getAlertPolicyFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* Required. The requested alerting policy. You should omit the `name` field
* in this policy. The name will be returned in the new policy, including a
* new `[ALERT_POLICY_ID]` value.
* </pre>
*
* <code>
* .google.monitoring.v3.AlertPolicy alert_policy = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.monitoring.v3.AlertPolicyOrBuilder getAlertPolicyOrBuilder() {
if (alertPolicyBuilder_ != null) {
return alertPolicyBuilder_.getMessageOrBuilder();
} else {
return alertPolicy_ == null
? com.google.monitoring.v3.AlertPolicy.getDefaultInstance()
: alertPolicy_;
}
}
/**
*
*
* <pre>
* Required. The requested alerting policy. You should omit the `name` field
* in this policy. The name will be returned in the new policy, including a
* new `[ALERT_POLICY_ID]` value.
* </pre>
*
* <code>
* .google.monitoring.v3.AlertPolicy alert_policy = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.monitoring.v3.AlertPolicy,
com.google.monitoring.v3.AlertPolicy.Builder,
com.google.monitoring.v3.AlertPolicyOrBuilder>
getAlertPolicyFieldBuilder() {
if (alertPolicyBuilder_ == null) {
alertPolicyBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.monitoring.v3.AlertPolicy,
com.google.monitoring.v3.AlertPolicy.Builder,
com.google.monitoring.v3.AlertPolicyOrBuilder>(
getAlertPolicy(), getParentForChildren(), isClean());
alertPolicy_ = null;
}
return alertPolicyBuilder_;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.monitoring.v3.CreateAlertPolicyRequest)
}
// @@protoc_insertion_point(class_scope:google.monitoring.v3.CreateAlertPolicyRequest)
private static final com.google.monitoring.v3.CreateAlertPolicyRequest DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.monitoring.v3.CreateAlertPolicyRequest();
}
public static com.google.monitoring.v3.CreateAlertPolicyRequest getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<CreateAlertPolicyRequest> PARSER =
new com.google.protobuf.AbstractParser<CreateAlertPolicyRequest>() {
@java.lang.Override
public CreateAlertPolicyRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<CreateAlertPolicyRequest> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<CreateAlertPolicyRequest> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.monitoring.v3.CreateAlertPolicyRequest getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleapis/google-cloud-java | 36,178 | java-chat/proto-google-cloud-chat-v1/src/main/java/com/google/chat/v1/SearchSpacesResponse.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/chat/v1/space.proto
// Protobuf Java Version: 3.25.8
package com.google.chat.v1;
/**
*
*
* <pre>
* Response with a list of spaces corresponding to the search spaces request.
* </pre>
*
* Protobuf type {@code google.chat.v1.SearchSpacesResponse}
*/
public final class SearchSpacesResponse extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.chat.v1.SearchSpacesResponse)
SearchSpacesResponseOrBuilder {
private static final long serialVersionUID = 0L;
// Use SearchSpacesResponse.newBuilder() to construct.
private SearchSpacesResponse(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private SearchSpacesResponse() {
spaces_ = java.util.Collections.emptyList();
nextPageToken_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new SearchSpacesResponse();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.chat.v1.SpaceProto
.internal_static_google_chat_v1_SearchSpacesResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.chat.v1.SpaceProto
.internal_static_google_chat_v1_SearchSpacesResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.chat.v1.SearchSpacesResponse.class,
com.google.chat.v1.SearchSpacesResponse.Builder.class);
}
public static final int SPACES_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private java.util.List<com.google.chat.v1.Space> spaces_;
/**
*
*
* <pre>
* A page of the requested spaces.
* </pre>
*
* <code>repeated .google.chat.v1.Space spaces = 1;</code>
*/
@java.lang.Override
public java.util.List<com.google.chat.v1.Space> getSpacesList() {
return spaces_;
}
/**
*
*
* <pre>
* A page of the requested spaces.
* </pre>
*
* <code>repeated .google.chat.v1.Space spaces = 1;</code>
*/
@java.lang.Override
public java.util.List<? extends com.google.chat.v1.SpaceOrBuilder> getSpacesOrBuilderList() {
return spaces_;
}
/**
*
*
* <pre>
* A page of the requested spaces.
* </pre>
*
* <code>repeated .google.chat.v1.Space spaces = 1;</code>
*/
@java.lang.Override
public int getSpacesCount() {
return spaces_.size();
}
/**
*
*
* <pre>
* A page of the requested spaces.
* </pre>
*
* <code>repeated .google.chat.v1.Space spaces = 1;</code>
*/
@java.lang.Override
public com.google.chat.v1.Space getSpaces(int index) {
return spaces_.get(index);
}
/**
*
*
* <pre>
* A page of the requested spaces.
* </pre>
*
* <code>repeated .google.chat.v1.Space spaces = 1;</code>
*/
@java.lang.Override
public com.google.chat.v1.SpaceOrBuilder getSpacesOrBuilder(int index) {
return spaces_.get(index);
}
public static final int NEXT_PAGE_TOKEN_FIELD_NUMBER = 2;
@SuppressWarnings("serial")
private volatile java.lang.Object nextPageToken_ = "";
/**
*
*
* <pre>
* A token that can be used to retrieve the next page. If this field is empty,
* there are no subsequent pages.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The nextPageToken.
*/
@java.lang.Override
public java.lang.String getNextPageToken() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
nextPageToken_ = s;
return s;
}
}
/**
*
*
* <pre>
* A token that can be used to retrieve the next page. If this field is empty,
* there are no subsequent pages.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The bytes for nextPageToken.
*/
@java.lang.Override
public com.google.protobuf.ByteString getNextPageTokenBytes() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
nextPageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int TOTAL_SIZE_FIELD_NUMBER = 3;
private int totalSize_ = 0;
/**
*
*
* <pre>
* The total number of spaces that match the query, across all pages. If the
* result is over 10,000 spaces, this value is an estimate.
* </pre>
*
* <code>int32 total_size = 3;</code>
*
* @return The totalSize.
*/
@java.lang.Override
public int getTotalSize() {
return totalSize_;
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
for (int i = 0; i < spaces_.size(); i++) {
output.writeMessage(1, spaces_.get(i));
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 2, nextPageToken_);
}
if (totalSize_ != 0) {
output.writeInt32(3, totalSize_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
for (int i = 0; i < spaces_.size(); i++) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, spaces_.get(i));
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, nextPageToken_);
}
if (totalSize_ != 0) {
size += com.google.protobuf.CodedOutputStream.computeInt32Size(3, totalSize_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.chat.v1.SearchSpacesResponse)) {
return super.equals(obj);
}
com.google.chat.v1.SearchSpacesResponse other = (com.google.chat.v1.SearchSpacesResponse) obj;
if (!getSpacesList().equals(other.getSpacesList())) return false;
if (!getNextPageToken().equals(other.getNextPageToken())) return false;
if (getTotalSize() != other.getTotalSize()) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (getSpacesCount() > 0) {
hash = (37 * hash) + SPACES_FIELD_NUMBER;
hash = (53 * hash) + getSpacesList().hashCode();
}
hash = (37 * hash) + NEXT_PAGE_TOKEN_FIELD_NUMBER;
hash = (53 * hash) + getNextPageToken().hashCode();
hash = (37 * hash) + TOTAL_SIZE_FIELD_NUMBER;
hash = (53 * hash) + getTotalSize();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.chat.v1.SearchSpacesResponse parseFrom(java.nio.ByteBuffer data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.chat.v1.SearchSpacesResponse parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.chat.v1.SearchSpacesResponse parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.chat.v1.SearchSpacesResponse parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.chat.v1.SearchSpacesResponse parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.chat.v1.SearchSpacesResponse parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.chat.v1.SearchSpacesResponse parseFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.chat.v1.SearchSpacesResponse parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.chat.v1.SearchSpacesResponse parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.chat.v1.SearchSpacesResponse parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.chat.v1.SearchSpacesResponse parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.chat.v1.SearchSpacesResponse parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.chat.v1.SearchSpacesResponse prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Response with a list of spaces corresponding to the search spaces request.
* </pre>
*
* Protobuf type {@code google.chat.v1.SearchSpacesResponse}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.chat.v1.SearchSpacesResponse)
com.google.chat.v1.SearchSpacesResponseOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.chat.v1.SpaceProto
.internal_static_google_chat_v1_SearchSpacesResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.chat.v1.SpaceProto
.internal_static_google_chat_v1_SearchSpacesResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.chat.v1.SearchSpacesResponse.class,
com.google.chat.v1.SearchSpacesResponse.Builder.class);
}
// Construct using com.google.chat.v1.SearchSpacesResponse.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
if (spacesBuilder_ == null) {
spaces_ = java.util.Collections.emptyList();
} else {
spaces_ = null;
spacesBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000001);
nextPageToken_ = "";
totalSize_ = 0;
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.chat.v1.SpaceProto
.internal_static_google_chat_v1_SearchSpacesResponse_descriptor;
}
@java.lang.Override
public com.google.chat.v1.SearchSpacesResponse getDefaultInstanceForType() {
return com.google.chat.v1.SearchSpacesResponse.getDefaultInstance();
}
@java.lang.Override
public com.google.chat.v1.SearchSpacesResponse build() {
com.google.chat.v1.SearchSpacesResponse result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.chat.v1.SearchSpacesResponse buildPartial() {
com.google.chat.v1.SearchSpacesResponse result =
new com.google.chat.v1.SearchSpacesResponse(this);
buildPartialRepeatedFields(result);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartialRepeatedFields(com.google.chat.v1.SearchSpacesResponse result) {
if (spacesBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0)) {
spaces_ = java.util.Collections.unmodifiableList(spaces_);
bitField0_ = (bitField0_ & ~0x00000001);
}
result.spaces_ = spaces_;
} else {
result.spaces_ = spacesBuilder_.build();
}
}
private void buildPartial0(com.google.chat.v1.SearchSpacesResponse result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000002) != 0)) {
result.nextPageToken_ = nextPageToken_;
}
if (((from_bitField0_ & 0x00000004) != 0)) {
result.totalSize_ = totalSize_;
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.chat.v1.SearchSpacesResponse) {
return mergeFrom((com.google.chat.v1.SearchSpacesResponse) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.chat.v1.SearchSpacesResponse other) {
if (other == com.google.chat.v1.SearchSpacesResponse.getDefaultInstance()) return this;
if (spacesBuilder_ == null) {
if (!other.spaces_.isEmpty()) {
if (spaces_.isEmpty()) {
spaces_ = other.spaces_;
bitField0_ = (bitField0_ & ~0x00000001);
} else {
ensureSpacesIsMutable();
spaces_.addAll(other.spaces_);
}
onChanged();
}
} else {
if (!other.spaces_.isEmpty()) {
if (spacesBuilder_.isEmpty()) {
spacesBuilder_.dispose();
spacesBuilder_ = null;
spaces_ = other.spaces_;
bitField0_ = (bitField0_ & ~0x00000001);
spacesBuilder_ =
com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders
? getSpacesFieldBuilder()
: null;
} else {
spacesBuilder_.addAllMessages(other.spaces_);
}
}
}
if (!other.getNextPageToken().isEmpty()) {
nextPageToken_ = other.nextPageToken_;
bitField0_ |= 0x00000002;
onChanged();
}
if (other.getTotalSize() != 0) {
setTotalSize(other.getTotalSize());
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
com.google.chat.v1.Space m =
input.readMessage(com.google.chat.v1.Space.parser(), extensionRegistry);
if (spacesBuilder_ == null) {
ensureSpacesIsMutable();
spaces_.add(m);
} else {
spacesBuilder_.addMessage(m);
}
break;
} // case 10
case 18:
{
nextPageToken_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000002;
break;
} // case 18
case 24:
{
totalSize_ = input.readInt32();
bitField0_ |= 0x00000004;
break;
} // case 24
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.util.List<com.google.chat.v1.Space> spaces_ = java.util.Collections.emptyList();
private void ensureSpacesIsMutable() {
if (!((bitField0_ & 0x00000001) != 0)) {
spaces_ = new java.util.ArrayList<com.google.chat.v1.Space>(spaces_);
bitField0_ |= 0x00000001;
}
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.chat.v1.Space,
com.google.chat.v1.Space.Builder,
com.google.chat.v1.SpaceOrBuilder>
spacesBuilder_;
/**
*
*
* <pre>
* A page of the requested spaces.
* </pre>
*
* <code>repeated .google.chat.v1.Space spaces = 1;</code>
*/
public java.util.List<com.google.chat.v1.Space> getSpacesList() {
if (spacesBuilder_ == null) {
return java.util.Collections.unmodifiableList(spaces_);
} else {
return spacesBuilder_.getMessageList();
}
}
/**
*
*
* <pre>
* A page of the requested spaces.
* </pre>
*
* <code>repeated .google.chat.v1.Space spaces = 1;</code>
*/
public int getSpacesCount() {
if (spacesBuilder_ == null) {
return spaces_.size();
} else {
return spacesBuilder_.getCount();
}
}
/**
*
*
* <pre>
* A page of the requested spaces.
* </pre>
*
* <code>repeated .google.chat.v1.Space spaces = 1;</code>
*/
public com.google.chat.v1.Space getSpaces(int index) {
if (spacesBuilder_ == null) {
return spaces_.get(index);
} else {
return spacesBuilder_.getMessage(index);
}
}
/**
*
*
* <pre>
* A page of the requested spaces.
* </pre>
*
* <code>repeated .google.chat.v1.Space spaces = 1;</code>
*/
public Builder setSpaces(int index, com.google.chat.v1.Space value) {
if (spacesBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureSpacesIsMutable();
spaces_.set(index, value);
onChanged();
} else {
spacesBuilder_.setMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* A page of the requested spaces.
* </pre>
*
* <code>repeated .google.chat.v1.Space spaces = 1;</code>
*/
public Builder setSpaces(int index, com.google.chat.v1.Space.Builder builderForValue) {
if (spacesBuilder_ == null) {
ensureSpacesIsMutable();
spaces_.set(index, builderForValue.build());
onChanged();
} else {
spacesBuilder_.setMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* A page of the requested spaces.
* </pre>
*
* <code>repeated .google.chat.v1.Space spaces = 1;</code>
*/
public Builder addSpaces(com.google.chat.v1.Space value) {
if (spacesBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureSpacesIsMutable();
spaces_.add(value);
onChanged();
} else {
spacesBuilder_.addMessage(value);
}
return this;
}
/**
*
*
* <pre>
* A page of the requested spaces.
* </pre>
*
* <code>repeated .google.chat.v1.Space spaces = 1;</code>
*/
public Builder addSpaces(int index, com.google.chat.v1.Space value) {
if (spacesBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureSpacesIsMutable();
spaces_.add(index, value);
onChanged();
} else {
spacesBuilder_.addMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* A page of the requested spaces.
* </pre>
*
* <code>repeated .google.chat.v1.Space spaces = 1;</code>
*/
public Builder addSpaces(com.google.chat.v1.Space.Builder builderForValue) {
if (spacesBuilder_ == null) {
ensureSpacesIsMutable();
spaces_.add(builderForValue.build());
onChanged();
} else {
spacesBuilder_.addMessage(builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* A page of the requested spaces.
* </pre>
*
* <code>repeated .google.chat.v1.Space spaces = 1;</code>
*/
public Builder addSpaces(int index, com.google.chat.v1.Space.Builder builderForValue) {
if (spacesBuilder_ == null) {
ensureSpacesIsMutable();
spaces_.add(index, builderForValue.build());
onChanged();
} else {
spacesBuilder_.addMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* A page of the requested spaces.
* </pre>
*
* <code>repeated .google.chat.v1.Space spaces = 1;</code>
*/
public Builder addAllSpaces(java.lang.Iterable<? extends com.google.chat.v1.Space> values) {
if (spacesBuilder_ == null) {
ensureSpacesIsMutable();
com.google.protobuf.AbstractMessageLite.Builder.addAll(values, spaces_);
onChanged();
} else {
spacesBuilder_.addAllMessages(values);
}
return this;
}
/**
*
*
* <pre>
* A page of the requested spaces.
* </pre>
*
* <code>repeated .google.chat.v1.Space spaces = 1;</code>
*/
public Builder clearSpaces() {
if (spacesBuilder_ == null) {
spaces_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
} else {
spacesBuilder_.clear();
}
return this;
}
/**
*
*
* <pre>
* A page of the requested spaces.
* </pre>
*
* <code>repeated .google.chat.v1.Space spaces = 1;</code>
*/
public Builder removeSpaces(int index) {
if (spacesBuilder_ == null) {
ensureSpacesIsMutable();
spaces_.remove(index);
onChanged();
} else {
spacesBuilder_.remove(index);
}
return this;
}
/**
*
*
* <pre>
* A page of the requested spaces.
* </pre>
*
* <code>repeated .google.chat.v1.Space spaces = 1;</code>
*/
public com.google.chat.v1.Space.Builder getSpacesBuilder(int index) {
return getSpacesFieldBuilder().getBuilder(index);
}
/**
*
*
* <pre>
* A page of the requested spaces.
* </pre>
*
* <code>repeated .google.chat.v1.Space spaces = 1;</code>
*/
public com.google.chat.v1.SpaceOrBuilder getSpacesOrBuilder(int index) {
if (spacesBuilder_ == null) {
return spaces_.get(index);
} else {
return spacesBuilder_.getMessageOrBuilder(index);
}
}
/**
*
*
* <pre>
* A page of the requested spaces.
* </pre>
*
* <code>repeated .google.chat.v1.Space spaces = 1;</code>
*/
public java.util.List<? extends com.google.chat.v1.SpaceOrBuilder> getSpacesOrBuilderList() {
if (spacesBuilder_ != null) {
return spacesBuilder_.getMessageOrBuilderList();
} else {
return java.util.Collections.unmodifiableList(spaces_);
}
}
/**
*
*
* <pre>
* A page of the requested spaces.
* </pre>
*
* <code>repeated .google.chat.v1.Space spaces = 1;</code>
*/
public com.google.chat.v1.Space.Builder addSpacesBuilder() {
return getSpacesFieldBuilder().addBuilder(com.google.chat.v1.Space.getDefaultInstance());
}
/**
*
*
* <pre>
* A page of the requested spaces.
* </pre>
*
* <code>repeated .google.chat.v1.Space spaces = 1;</code>
*/
public com.google.chat.v1.Space.Builder addSpacesBuilder(int index) {
return getSpacesFieldBuilder()
.addBuilder(index, com.google.chat.v1.Space.getDefaultInstance());
}
/**
*
*
* <pre>
* A page of the requested spaces.
* </pre>
*
* <code>repeated .google.chat.v1.Space spaces = 1;</code>
*/
public java.util.List<com.google.chat.v1.Space.Builder> getSpacesBuilderList() {
return getSpacesFieldBuilder().getBuilderList();
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.chat.v1.Space,
com.google.chat.v1.Space.Builder,
com.google.chat.v1.SpaceOrBuilder>
getSpacesFieldBuilder() {
if (spacesBuilder_ == null) {
spacesBuilder_ =
new com.google.protobuf.RepeatedFieldBuilderV3<
com.google.chat.v1.Space,
com.google.chat.v1.Space.Builder,
com.google.chat.v1.SpaceOrBuilder>(
spaces_, ((bitField0_ & 0x00000001) != 0), getParentForChildren(), isClean());
spaces_ = null;
}
return spacesBuilder_;
}
private java.lang.Object nextPageToken_ = "";
/**
*
*
* <pre>
* A token that can be used to retrieve the next page. If this field is empty,
* there are no subsequent pages.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The nextPageToken.
*/
public java.lang.String getNextPageToken() {
java.lang.Object ref = nextPageToken_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
nextPageToken_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* A token that can be used to retrieve the next page. If this field is empty,
* there are no subsequent pages.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The bytes for nextPageToken.
*/
public com.google.protobuf.ByteString getNextPageTokenBytes() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
nextPageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* A token that can be used to retrieve the next page. If this field is empty,
* there are no subsequent pages.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @param value The nextPageToken to set.
* @return This builder for chaining.
*/
public Builder setNextPageToken(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
nextPageToken_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* A token that can be used to retrieve the next page. If this field is empty,
* there are no subsequent pages.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return This builder for chaining.
*/
public Builder clearNextPageToken() {
nextPageToken_ = getDefaultInstance().getNextPageToken();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
return this;
}
/**
*
*
* <pre>
* A token that can be used to retrieve the next page. If this field is empty,
* there are no subsequent pages.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @param value The bytes for nextPageToken to set.
* @return This builder for chaining.
*/
public Builder setNextPageTokenBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
nextPageToken_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
private int totalSize_;
/**
*
*
* <pre>
* The total number of spaces that match the query, across all pages. If the
* result is over 10,000 spaces, this value is an estimate.
* </pre>
*
* <code>int32 total_size = 3;</code>
*
* @return The totalSize.
*/
@java.lang.Override
public int getTotalSize() {
return totalSize_;
}
/**
*
*
* <pre>
* The total number of spaces that match the query, across all pages. If the
* result is over 10,000 spaces, this value is an estimate.
* </pre>
*
* <code>int32 total_size = 3;</code>
*
* @param value The totalSize to set.
* @return This builder for chaining.
*/
public Builder setTotalSize(int value) {
totalSize_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
*
*
* <pre>
* The total number of spaces that match the query, across all pages. If the
* result is over 10,000 spaces, this value is an estimate.
* </pre>
*
* <code>int32 total_size = 3;</code>
*
* @return This builder for chaining.
*/
public Builder clearTotalSize() {
bitField0_ = (bitField0_ & ~0x00000004);
totalSize_ = 0;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.chat.v1.SearchSpacesResponse)
}
// @@protoc_insertion_point(class_scope:google.chat.v1.SearchSpacesResponse)
private static final com.google.chat.v1.SearchSpacesResponse DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.chat.v1.SearchSpacesResponse();
}
public static com.google.chat.v1.SearchSpacesResponse getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<SearchSpacesResponse> PARSER =
new com.google.protobuf.AbstractParser<SearchSpacesResponse>() {
@java.lang.Override
public SearchSpacesResponse parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<SearchSpacesResponse> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<SearchSpacesResponse> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.chat.v1.SearchSpacesResponse getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
apache/phoenix | 36,426 | phoenix-core/src/test/java/org/apache/phoenix/expression/ArrayConcatFunctionTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.phoenix.expression;
import static org.junit.Assert.assertArrayEquals;
import static org.junit.Assert.assertEquals;
import java.math.BigDecimal;
import java.sql.Date;
import java.sql.SQLException;
import java.util.List;
import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
import org.apache.phoenix.exception.DataExceedsCapacityException;
import org.apache.phoenix.expression.function.ArrayConcatFunction;
import org.apache.phoenix.schema.SortOrder;
import org.apache.phoenix.schema.TypeMismatchException;
import org.apache.phoenix.schema.types.PBoolean;
import org.apache.phoenix.schema.types.PBooleanArray;
import org.apache.phoenix.schema.types.PChar;
import org.apache.phoenix.schema.types.PCharArray;
import org.apache.phoenix.schema.types.PDataType;
import org.apache.phoenix.schema.types.PDate;
import org.apache.phoenix.schema.types.PDateArray;
import org.apache.phoenix.schema.types.PDecimal;
import org.apache.phoenix.schema.types.PDecimalArray;
import org.apache.phoenix.schema.types.PDouble;
import org.apache.phoenix.schema.types.PDoubleArray;
import org.apache.phoenix.schema.types.PFloat;
import org.apache.phoenix.schema.types.PFloatArray;
import org.apache.phoenix.schema.types.PInteger;
import org.apache.phoenix.schema.types.PIntegerArray;
import org.apache.phoenix.schema.types.PLong;
import org.apache.phoenix.schema.types.PLongArray;
import org.apache.phoenix.schema.types.PSmallint;
import org.apache.phoenix.schema.types.PSmallintArray;
import org.apache.phoenix.schema.types.PTinyint;
import org.apache.phoenix.schema.types.PTinyintArray;
import org.apache.phoenix.schema.types.PVarchar;
import org.apache.phoenix.schema.types.PVarcharArray;
import org.apache.phoenix.schema.types.PhoenixArray;
import org.junit.Test;
import org.apache.phoenix.thirdparty.com.google.common.collect.Lists;
public class ArrayConcatFunctionTest {
private static void testExpression(LiteralExpression array1, LiteralExpression array2,
PhoenixArray expected) throws SQLException {
List<Expression> expressions = Lists.newArrayList((Expression) array1);
expressions.add(array2);
Expression arrayConcatFunction = new ArrayConcatFunction(expressions);
ImmutableBytesWritable ptr = new ImmutableBytesWritable();
arrayConcatFunction.evaluate(null, ptr);
PhoenixArray result = (PhoenixArray) arrayConcatFunction.getDataType().toObject(ptr,
expressions.get(0).getSortOrder(), array1.getMaxLength(), array1.getScale());
assertEquals(expected, result);
}
private static void test(PhoenixArray array1, PhoenixArray array2, PDataType array1DataType,
Integer arr1MaxLen, Integer arr1Scale, PDataType array2DataType, Integer arr2MaxLen,
Integer arr2Scale, PhoenixArray expected, SortOrder array1SortOrder, SortOrder array2SortOrder)
throws SQLException {
LiteralExpression array1Literal, array2Literal;
array1Literal = LiteralExpression.newConstant(array1, array1DataType, arr1MaxLen, arr1Scale,
array1SortOrder, Determinism.ALWAYS);
array2Literal = LiteralExpression.newConstant(array2, array2DataType, arr2MaxLen, arr2Scale,
array2SortOrder, Determinism.ALWAYS);
testExpression(array1Literal, array2Literal, expected);
}
@Test
public void testChar1() throws SQLException {
Object[] o1 = new Object[] { "aa", "bb" };
Object[] o2 = new Object[] { "c", "d" };
Object[] e = new Object[] { "aa", "bb", "c", "d" };
PDataType type = PCharArray.INSTANCE;
PDataType base = PChar.INSTANCE;
PhoenixArray arr1 = new PhoenixArray(base, o1);
PhoenixArray arr2 = new PhoenixArray(base, o2);
PhoenixArray expected = new PhoenixArray(base, e);
test(arr1, arr2, type, 2, null, type, 1, null, expected, SortOrder.ASC, SortOrder.ASC);
test(arr1, arr2, type, 2, null, type, 1, null, expected, SortOrder.DESC, SortOrder.DESC);
test(arr1, arr2, type, 2, null, type, 1, null, expected, SortOrder.ASC, SortOrder.DESC);
test(arr1, arr2, type, 2, null, type, 1, null, expected, SortOrder.DESC, SortOrder.ASC);
}
@Test
public void testChar2() throws SQLException {
Object[] o1 = new Object[] { "aa", "bb" };
Object[] o2 = new Object[] { "cc", "dc", "ee" };
Object[] e = new Object[] { "aa", "bb", "cc", "dc", "ee" };
PDataType type = PCharArray.INSTANCE;
PDataType base = PChar.INSTANCE;
PhoenixArray arr1 = new PhoenixArray(base, o1);
PhoenixArray arr2 = new PhoenixArray(base, o2);
PhoenixArray expected = new PhoenixArray(base, e);
test(arr1, arr2, type, 2, null, type, 2, null, expected, SortOrder.ASC, SortOrder.ASC);
test(arr1, arr2, type, 2, null, type, 2, null, expected, SortOrder.ASC, SortOrder.DESC);
test(arr1, arr2, type, 2, null, type, 2, null, expected, SortOrder.DESC, SortOrder.DESC);
test(arr1, arr2, type, 2, null, type, 2, null, expected, SortOrder.DESC, SortOrder.ASC);
}
@Test(expected = DataExceedsCapacityException.class)
public void testChar3() throws SQLException {
Object[] o1 = new Object[] { "c", "d" };
Object[] o2 = new Object[] { "aa", "bb" };
Object[] e = new Object[] { "aa", "bb", "c", "d" };
PDataType type = PCharArray.INSTANCE;
PDataType base = PChar.INSTANCE;
PhoenixArray arr1 = new PhoenixArray(base, o1);
PhoenixArray arr2 = new PhoenixArray(base, o2);
PhoenixArray expected = new PhoenixArray(base, e);
test(arr1, arr2, type, 2, null, type, 1, null, expected, SortOrder.ASC, SortOrder.ASC);
test(arr1, arr2, type, 2, null, type, 1, null, expected, SortOrder.DESC, SortOrder.DESC);
test(arr1, arr2, type, 2, null, type, 1, null, expected, SortOrder.ASC, SortOrder.DESC);
test(arr1, arr2, type, 2, null, type, 1, null, expected, SortOrder.DESC, SortOrder.ASC);
}
@Test
public void testInt1() throws SQLException {
Object[] o1 = new Object[] { 1, 2 };
Object[] o2 = new Object[] { 5, 6, 7 };
Object[] e = new Object[] { 1, 2, 5, 6, 7 };
PDataType type = PIntegerArray.INSTANCE;
PDataType base = PInteger.INSTANCE;
PhoenixArray arr1 = new PhoenixArray.PrimitiveIntPhoenixArray(base, o1);
PhoenixArray arr2 = new PhoenixArray.PrimitiveIntPhoenixArray(base, o2);
PhoenixArray expected = new PhoenixArray.PrimitiveIntPhoenixArray(base, e);
test(arr1, arr2, type, null, null, type, null, null, expected, SortOrder.ASC, SortOrder.ASC);
test(arr1, arr2, type, null, null, type, null, null, expected, SortOrder.ASC, SortOrder.DESC);
test(arr1, arr2, type, null, null, type, null, null, expected, SortOrder.DESC, SortOrder.DESC);
test(arr1, arr2, type, null, null, type, null, null, expected, SortOrder.DESC, SortOrder.ASC);
}
@Test
public void testFloat1() throws SQLException {
Object[] o1 = new Object[] { (float) 1.2, (float) 2 };
Object[] o2 = new Object[] { (float) 5, (float) 6, (float) 7 };
Object[] e = new Object[] { (float) 1.2, (float) 2, (float) 5, (float) 6, (float) 7 };
PDataType type = PFloatArray.INSTANCE;
PDataType base = PFloat.INSTANCE;
PhoenixArray arr1 = new PhoenixArray.PrimitiveFloatPhoenixArray(base, o1);
PhoenixArray arr2 = new PhoenixArray.PrimitiveFloatPhoenixArray(base, o2);
PhoenixArray expected = new PhoenixArray.PrimitiveFloatPhoenixArray(base, e);
test(arr1, arr2, type, null, null, type, null, null, expected, SortOrder.ASC, SortOrder.ASC);
}
@Test
public void testDouble1() throws SQLException {
Object[] o1 = new Object[] { (double) 1.2, (double) 2 };
Object[] o2 = new Object[] { (double) 5.2, (double) 6, (double) 7 };
Object[] e = new Object[] { (double) 1.2, (double) 2, (double) 5.2, (double) 6, (double) 7 };
PDataType type = PDoubleArray.INSTANCE;
PDataType base = PDouble.INSTANCE;
PhoenixArray arr1 = new PhoenixArray.PrimitiveDoublePhoenixArray(base, o1);
PhoenixArray arr2 = new PhoenixArray.PrimitiveDoublePhoenixArray(base, o2);
PhoenixArray expected = new PhoenixArray.PrimitiveDoublePhoenixArray(base, e);
test(arr1, arr2, type, null, null, type, null, null, expected, SortOrder.ASC, SortOrder.ASC);
}
@Test
public void testLong1() throws SQLException {
Object[] o1 = new Object[] { (long) 1, (long) 2 };
Object[] o2 = new Object[] { (long) 5, (long) 6, (long) 7 };
Object[] e = new Object[] { (long) 1, (long) 2, (long) 5, (long) 6, (long) 7 };
PDataType type = PLongArray.INSTANCE;
PDataType base = PLong.INSTANCE;
PhoenixArray arr1 = new PhoenixArray.PrimitiveLongPhoenixArray(base, o1);
PhoenixArray arr2 = new PhoenixArray.PrimitiveLongPhoenixArray(base, o2);
PhoenixArray expected = new PhoenixArray.PrimitiveLongPhoenixArray(base, e);
test(arr1, arr2, type, null, null, type, null, null, expected, SortOrder.ASC, SortOrder.ASC);
test(arr1, arr2, type, null, null, type, null, null, expected, SortOrder.ASC, SortOrder.DESC);
test(arr1, arr2, type, null, null, type, null, null, expected, SortOrder.DESC, SortOrder.DESC);
test(arr1, arr2, type, null, null, type, null, null, expected, SortOrder.DESC, SortOrder.ASC);
}
@Test
public void testShort1() throws SQLException {
Object[] o1 = new Object[] { (short) 1, (short) 2 };
Object[] o2 = new Object[] { (short) 5, (short) 6, (short) 7 };
Object[] e = new Object[] { (short) 1, (short) 2, (short) 5, (short) 6, (short) 7 };
PDataType type = PSmallintArray.INSTANCE;
PDataType base = PSmallint.INSTANCE;
PhoenixArray arr1 = new PhoenixArray.PrimitiveShortPhoenixArray(base, o1);
PhoenixArray arr2 = new PhoenixArray.PrimitiveShortPhoenixArray(base, o2);
PhoenixArray expected = new PhoenixArray.PrimitiveShortPhoenixArray(base, e);
test(arr1, arr2, type, null, null, type, null, null, expected, SortOrder.ASC, SortOrder.ASC);
test(arr1, arr2, type, null, null, type, null, null, expected, SortOrder.ASC, SortOrder.DESC);
test(arr1, arr2, type, null, null, type, null, null, expected, SortOrder.DESC, SortOrder.DESC);
test(arr1, arr2, type, null, null, type, null, null, expected, SortOrder.DESC, SortOrder.ASC);
}
@Test
public void testBoolean1() throws SQLException {
Object[] o1 = new Object[] { true, true };
Object[] o2 = new Object[] { false, false, false };
Object[] e = new Object[] { true, true, false, false, false };
PDataType type = PBooleanArray.INSTANCE;
PDataType base = PBoolean.INSTANCE;
PhoenixArray arr1 = new PhoenixArray.PrimitiveBooleanPhoenixArray(base, o1);
PhoenixArray arr2 = new PhoenixArray.PrimitiveBooleanPhoenixArray(base, o2);
PhoenixArray expected = new PhoenixArray.PrimitiveBooleanPhoenixArray(base, e);
test(arr1, arr2, type, null, null, type, null, null, expected, SortOrder.ASC, SortOrder.ASC);
}
@Test
public void testTinyInt1() throws SQLException {
Object[] o1 = new Object[] { (byte) 2, (byte) 2 };
Object[] o2 = new Object[] { (byte) 5, (byte) 6, (byte) 7 };
Object[] e = new Object[] { (byte) 2, (byte) 2, (byte) 5, (byte) 6, (byte) 7 };
PDataType type = PTinyintArray.INSTANCE;
PDataType base = PTinyint.INSTANCE;
PhoenixArray arr1 = new PhoenixArray.PrimitiveBytePhoenixArray(base, o1);
PhoenixArray arr2 = new PhoenixArray.PrimitiveBytePhoenixArray(base, o2);
PhoenixArray expected = new PhoenixArray.PrimitiveBytePhoenixArray(base, e);
test(arr1, arr2, type, null, null, type, null, null, expected, SortOrder.ASC, SortOrder.ASC);
test(arr1, arr2, type, null, null, type, null, null, expected, SortOrder.ASC, SortOrder.DESC);
test(arr1, arr2, type, null, null, type, null, null, expected, SortOrder.DESC, SortOrder.DESC);
test(arr1, arr2, type, null, null, type, null, null, expected, SortOrder.DESC, SortOrder.ASC);
}
@Test
public void testDate1() throws SQLException {
Object[] o1 = new Object[] { new Date(0l), new Date(0l) };
Object[] o2 = new Object[] { new Date(0l), new Date(0l), new Date(0l) };
Object[] e =
new Object[] { new Date(0l), new Date(0l), new Date(0l), new Date(0l), new Date(0l) };
PDataType type = PDateArray.INSTANCE;
PDataType base = PDate.INSTANCE;
PhoenixArray arr1 = new PhoenixArray(base, o1);
PhoenixArray arr2 = new PhoenixArray(base, o2);
PhoenixArray expected = new PhoenixArray(base, e);
test(arr1, arr2, type, null, null, type, null, null, expected, SortOrder.ASC, SortOrder.ASC);
test(arr1, arr2, type, null, null, type, null, null, expected, SortOrder.ASC, SortOrder.DESC);
test(arr1, arr2, type, null, null, type, null, null, expected, SortOrder.DESC, SortOrder.DESC);
test(arr1, arr2, type, null, null, type, null, null, expected, SortOrder.DESC, SortOrder.ASC);
}
@Test
public void testDecimal1() throws SQLException {
Object[] o1 = new Object[] { BigDecimal.valueOf(32.4), BigDecimal.valueOf(34) };
Object[] o2 = new Object[] { BigDecimal.valueOf(32.4), BigDecimal.valueOf(34) };
Object[] e = new Object[] { BigDecimal.valueOf(32.4), BigDecimal.valueOf(34),
BigDecimal.valueOf(32.4), BigDecimal.valueOf(34) };
PDataType type = PDecimalArray.INSTANCE;
PDataType base = PDecimal.INSTANCE;
PhoenixArray arr1 = new PhoenixArray(base, o1);
PhoenixArray arr2 = new PhoenixArray(base, o2);
PhoenixArray expected = new PhoenixArray(base, e);
test(arr1, arr2, type, null, null, type, null, null, expected, SortOrder.ASC, SortOrder.ASC);
test(arr1, arr2, type, null, null, type, null, null, expected, SortOrder.ASC, SortOrder.DESC);
test(arr1, arr2, type, null, null, type, null, null, expected, SortOrder.DESC, SortOrder.DESC);
test(arr1, arr2, type, null, null, type, null, null, expected, SortOrder.DESC, SortOrder.ASC);
}
@Test
public void testVarchar1() throws SQLException {
Object[] o1 = new Object[] { "a", "b" };
Object[] o2 = new Object[] { "c", "d" };
Object[] e = new Object[] { "a", "b", "c", "d" };
PDataType type = PVarcharArray.INSTANCE;
PDataType base = PVarchar.INSTANCE;
PhoenixArray arr1 = new PhoenixArray(base, o1);
PhoenixArray arr2 = new PhoenixArray(base, o2);
PhoenixArray expected = new PhoenixArray(base, e);
test(arr1, arr2, type, null, null, type, null, null, expected, SortOrder.ASC, SortOrder.ASC);
test(arr1, arr2, type, null, null, type, null, null, expected, SortOrder.ASC, SortOrder.DESC);
test(arr1, arr2, type, null, null, type, null, null, expected, SortOrder.DESC, SortOrder.DESC);
test(arr1, arr2, type, null, null, type, null, null, expected, SortOrder.DESC, SortOrder.ASC);
}
@Test
public void testVarchar2() throws SQLException {
Object[] o1 = new Object[] { "a" };
Object[] o2 = new Object[] { "c", "d" };
Object[] e = new Object[] { "a", "c", "d" };
PDataType type = PVarcharArray.INSTANCE;
PDataType base = PVarchar.INSTANCE;
PhoenixArray arr1 = new PhoenixArray(base, o1);
PhoenixArray arr2 = new PhoenixArray(base, o2);
PhoenixArray expected = new PhoenixArray(base, e);
test(arr1, arr2, type, null, null, type, null, null, expected, SortOrder.ASC, SortOrder.ASC);
test(arr1, arr2, type, null, null, type, null, null, expected, SortOrder.ASC, SortOrder.DESC);
test(arr1, arr2, type, null, null, type, null, null, expected, SortOrder.DESC, SortOrder.DESC);
test(arr1, arr2, type, null, null, type, null, null, expected, SortOrder.DESC, SortOrder.ASC);
}
@Test
public void testVarchar3() throws SQLException {
Object[] o1 = new Object[] { "a", "b" };
Object[] o2 = new Object[] { "c" };
Object[] e = new Object[] { "a", "b", "c" };
PDataType type = PVarcharArray.INSTANCE;
PDataType base = PVarchar.INSTANCE;
PhoenixArray arr1 = new PhoenixArray(base, o1);
PhoenixArray arr2 = new PhoenixArray(base, o2);
PhoenixArray expected = new PhoenixArray(base, e);
test(arr1, arr2, type, null, null, type, null, null, expected, SortOrder.ASC, SortOrder.ASC);
test(arr1, arr2, type, null, null, type, null, null, expected, SortOrder.ASC, SortOrder.DESC);
test(arr1, arr2, type, null, null, type, null, null, expected, SortOrder.DESC, SortOrder.DESC);
test(arr1, arr2, type, null, null, type, null, null, expected, SortOrder.DESC, SortOrder.ASC);
}
@Test
public void testVarchar4() throws SQLException {
Object[] o1 = new Object[] { "a" };
Object[] o2 = new Object[] { null, "c" };
Object[] e = new Object[] { "a", null, "c" };
PDataType type = PVarcharArray.INSTANCE;
PDataType base = PVarchar.INSTANCE;
PhoenixArray arr1 = new PhoenixArray(base, o1);
PhoenixArray arr2 = new PhoenixArray(base, o2);
PhoenixArray expected = new PhoenixArray(base, e);
test(arr1, arr2, type, null, null, type, null, null, expected, SortOrder.ASC, SortOrder.ASC);
test(arr1, arr2, type, null, null, type, null, null, expected, SortOrder.ASC, SortOrder.DESC);
test(arr1, arr2, type, null, null, type, null, null, expected, SortOrder.DESC, SortOrder.DESC);
test(arr1, arr2, type, null, null, type, null, null, expected, SortOrder.DESC, SortOrder.ASC);
}
@Test
public void testVarchar5() throws SQLException {
Object[] o1 = new Object[] { "a", null, null };
Object[] o2 = new Object[] { null, null, "c" };
Object[] e = new Object[] { "a", null, null, null, null, "c" };
PDataType type = PVarcharArray.INSTANCE;
PDataType base = PVarchar.INSTANCE;
PhoenixArray arr1 = new PhoenixArray(base, o1);
PhoenixArray arr2 = new PhoenixArray(base, o2);
PhoenixArray expected = new PhoenixArray(base, e);
test(arr1, arr2, type, null, null, type, null, null, expected, SortOrder.ASC, SortOrder.ASC);
test(arr1, arr2, type, null, null, type, null, null, expected, SortOrder.ASC, SortOrder.DESC);
test(arr1, arr2, type, null, null, type, null, null, expected, SortOrder.DESC, SortOrder.DESC);
test(arr1, arr2, type, null, null, type, null, null, expected, SortOrder.DESC, SortOrder.ASC);
}
@Test
public void testVarchar6() throws SQLException {
Object[] o1 = new Object[] { "a", "b" };
Object[] e = new Object[] { "a", "b" };
PDataType type = PVarcharArray.INSTANCE;
PDataType base = PVarchar.INSTANCE;
PhoenixArray arr1 = new PhoenixArray(base, o1);
PhoenixArray arr2 = null;
PhoenixArray expected = new PhoenixArray(base, e);
test(arr1, arr2, type, null, null, type, null, null, expected, SortOrder.ASC, SortOrder.ASC);
test(arr1, arr2, type, null, null, type, null, null, expected, SortOrder.ASC, SortOrder.DESC);
test(arr1, arr2, type, null, null, type, null, null, expected, SortOrder.DESC, SortOrder.DESC);
test(arr1, arr2, type, null, null, type, null, null, expected, SortOrder.DESC, SortOrder.ASC);
}
@Test
public void testVarchar7() throws SQLException {
Object[] o2 = new Object[] { "a", "b" };
PDataType type = PVarcharArray.INSTANCE;
PDataType base = PVarchar.INSTANCE;
PhoenixArray arr1 = null;
PhoenixArray arr2 = new PhoenixArray(base, o2);
PhoenixArray expected = arr2;
test(arr1, arr2, type, null, null, type, null, null, expected, SortOrder.ASC, SortOrder.ASC);
test(arr1, arr2, type, null, null, type, null, null, expected, SortOrder.ASC, SortOrder.DESC);
test(arr1, arr2, type, null, null, type, null, null, expected, SortOrder.DESC, SortOrder.DESC);
test(arr1, arr2, type, null, null, type, null, null, expected, SortOrder.DESC, SortOrder.ASC);
}
@Test
public void testVarchar8() throws SQLException {
Object[] o1 = new Object[] { "a", null, null, "b" };
Object[] o2 = new Object[] { "c", null, "d", null, "e" };
Object[] e = new Object[] { "a", null, null, "b", "c", null, "d", null, "e" };
PDataType type = PVarcharArray.INSTANCE;
PDataType base = PVarchar.INSTANCE;
PhoenixArray arr1 = new PhoenixArray(base, o1);
PhoenixArray arr2 = new PhoenixArray(base, o2);
PhoenixArray expected = new PhoenixArray(base, e);
test(arr1, arr2, type, null, null, type, null, null, expected, SortOrder.ASC, SortOrder.ASC);
test(arr1, arr2, type, null, null, type, null, null, expected, SortOrder.ASC, SortOrder.DESC);
test(arr1, arr2, type, null, null, type, null, null, expected, SortOrder.DESC, SortOrder.DESC);
test(arr1, arr2, type, null, null, type, null, null, expected, SortOrder.DESC, SortOrder.ASC);
}
@Test(expected = TypeMismatchException.class)
public void testVarchar9() throws SQLException {
Object[] o1 = new Object[] { "a", "b" };
Object[] o2 = new Object[] { 1, 2 };
PhoenixArray arr1 = new PhoenixArray(PVarchar.INSTANCE, o1);
PhoenixArray arr2 = new PhoenixArray.PrimitiveIntPhoenixArray(PInteger.INSTANCE, o2);
test(arr1, arr2, PVarcharArray.INSTANCE, null, null, PIntegerArray.INSTANCE, null, null, null,
SortOrder.ASC, SortOrder.ASC);
test(arr1, arr2, PVarcharArray.INSTANCE, null, null, PIntegerArray.INSTANCE, null, null, null,
SortOrder.ASC, SortOrder.DESC);
test(arr1, arr2, PVarcharArray.INSTANCE, null, null, PIntegerArray.INSTANCE, null, null, null,
SortOrder.DESC, SortOrder.DESC);
test(arr1, arr2, PVarcharArray.INSTANCE, null, null, PIntegerArray.INSTANCE, null, null, null,
SortOrder.DESC, SortOrder.ASC);
}
@Test
public void testWithIntOffsetArray() throws SQLException {
Object[] o1 = new Object[Short.MAX_VALUE + 7];
Object[] o2 = new Object[] { "b", "b" };
Object[] e = new Object[Short.MAX_VALUE + 9];
for (int i = 0; i < o1.length; i++) {
o1[i] = "a";
e[i] = "a";
}
e[Short.MAX_VALUE + 7] = "b";
e[Short.MAX_VALUE + 8] = "b";
PDataType type = PVarcharArray.INSTANCE;
PDataType base = PVarchar.INSTANCE;
PhoenixArray arr1 = new PhoenixArray(base, o1);
PhoenixArray arr2 = new PhoenixArray(base, o2);
PhoenixArray expected = new PhoenixArray(base, e);
test(arr1, arr2, type, null, null, type, null, null, expected, SortOrder.ASC, SortOrder.ASC);
test(arr1, arr2, type, null, null, type, null, null, expected, SortOrder.ASC, SortOrder.DESC);
test(arr1, arr2, type, null, null, type, null, null, expected, SortOrder.DESC, SortOrder.DESC);
test(arr1, arr2, type, null, null, type, null, null, expected, SortOrder.DESC, SortOrder.ASC);
}
@Test
public void testWithShortToIntOffsetArray() throws SQLException {
Object[] o1 = new Object[Short.MAX_VALUE + 1];
Object[] o2 = new Object[] { "b", "b" };
Object[] e = new Object[Short.MAX_VALUE + 3];
for (int i = 0; i < o1.length; i++) {
o1[i] = "a";
e[i] = "a";
}
e[Short.MAX_VALUE + 2] = "b";
e[Short.MAX_VALUE + 1] = "b";
PDataType type = PVarcharArray.INSTANCE;
PDataType base = PVarchar.INSTANCE;
PhoenixArray arr1 = new PhoenixArray(base, o1);
PhoenixArray arr2 = new PhoenixArray(base, o2);
PhoenixArray expected = new PhoenixArray(base, e);
test(arr1, arr2, type, null, null, type, null, null, expected, SortOrder.ASC, SortOrder.ASC);
test(arr1, arr2, type, null, null, type, null, null, expected, SortOrder.ASC, SortOrder.DESC);
test(arr1, arr2, type, null, null, type, null, null, expected, SortOrder.DESC, SortOrder.DESC);
test(arr1, arr2, type, null, null, type, null, null, expected, SortOrder.DESC, SortOrder.ASC);
}
@Test
public void testWithShortToIntOffsetArray2() throws SQLException {
Object[] o1 = new Object[Short.MAX_VALUE + 1];
Object[] o2 = new Object[] { null, "b" };
Object[] e = new Object[Short.MAX_VALUE + 3];
for (int i = 0; i < o1.length; i++) {
o1[i] = "a";
e[i] = "a";
}
e[Short.MAX_VALUE + 1] = null;
e[Short.MAX_VALUE + 2] = "b";
PDataType type = PVarcharArray.INSTANCE;
PDataType base = PVarchar.INSTANCE;
PhoenixArray arr1 = new PhoenixArray(base, o1);
PhoenixArray arr2 = new PhoenixArray(base, o2);
PhoenixArray expected = new PhoenixArray(base, e);
test(arr1, arr2, type, null, null, type, null, null, expected, SortOrder.ASC, SortOrder.ASC);
test(arr1, arr2, type, null, null, type, null, null, expected, SortOrder.ASC, SortOrder.DESC);
test(arr1, arr2, type, null, null, type, null, null, expected, SortOrder.DESC, SortOrder.DESC);
test(arr1, arr2, type, null, null, type, null, null, expected, SortOrder.DESC, SortOrder.ASC);
}
@Test
public void testWith10NullsAnd246Nulls() throws SQLException {
Object[] o1 = new Object[11];
Object[] o2 = new Object[247];
Object[] e = new Object[258];
o1[0] = "a";
o2[o2.length - 1] = "a";
e[e.length - 1] = "a";
e[0] = "a";
PDataType type = PVarcharArray.INSTANCE;
PDataType base = PVarchar.INSTANCE;
PhoenixArray arr1 = new PhoenixArray(base, o1);
PhoenixArray arr2 = new PhoenixArray(base, o2);
PhoenixArray expected = new PhoenixArray(base, e);
test(arr1, arr2, type, null, null, type, null, null, expected, SortOrder.ASC, SortOrder.ASC);
test(arr1, arr2, type, null, null, type, null, null, expected, SortOrder.ASC, SortOrder.DESC);
test(arr1, arr2, type, null, null, type, null, null, expected, SortOrder.DESC, SortOrder.DESC);
test(arr1, arr2, type, null, null, type, null, null, expected, SortOrder.DESC, SortOrder.ASC);
}
@Test
public void testWith0NullsAnd256Nulls() throws SQLException {
Object[] o1 = new Object[1];
Object[] o2 = new Object[257];
Object[] e = new Object[258];
o1[0] = "a";
o2[o2.length - 1] = "a";
e[e.length - 1] = "a";
e[0] = "a";
PDataType type = PVarcharArray.INSTANCE;
PDataType base = PVarchar.INSTANCE;
PhoenixArray arr1 = new PhoenixArray(base, o1);
PhoenixArray arr2 = new PhoenixArray(base, o2);
PhoenixArray expected = new PhoenixArray(base, e);
test(arr1, arr2, type, null, null, type, null, null, expected, SortOrder.ASC, SortOrder.ASC);
test(arr1, arr2, type, null, null, type, null, null, expected, SortOrder.ASC, SortOrder.DESC);
test(arr1, arr2, type, null, null, type, null, null, expected, SortOrder.DESC, SortOrder.DESC);
test(arr1, arr2, type, null, null, type, null, null, expected, SortOrder.DESC, SortOrder.ASC);
}
@Test
public void testWith256NullsAnd0Nulls() throws SQLException {
Object[] o1 = new Object[257];
Object[] o2 = new Object[1];
Object[] e = new Object[258];
o1[0] = "a";
o2[o2.length - 1] = "a";
e[e.length - 1] = "a";
e[0] = "a";
PDataType type = PVarcharArray.INSTANCE;
PDataType base = PVarchar.INSTANCE;
PhoenixArray arr1 = new PhoenixArray(base, o1);
PhoenixArray arr2 = new PhoenixArray(base, o2);
PhoenixArray expected = new PhoenixArray(base, e);
test(arr1, arr2, type, null, null, type, null, null, expected, SortOrder.ASC, SortOrder.ASC);
test(arr1, arr2, type, null, null, type, null, null, expected, SortOrder.ASC, SortOrder.DESC);
test(arr1, arr2, type, null, null, type, null, null, expected, SortOrder.DESC, SortOrder.DESC);
test(arr1, arr2, type, null, null, type, null, null, expected, SortOrder.DESC, SortOrder.ASC);
}
@Test
public void testWith255NullsAnd0Nulls() throws SQLException {
Object[] o1 = new Object[256];
Object[] o2 = new Object[1];
Object[] e = new Object[257];
o1[0] = "a";
o2[o2.length - 1] = "a";
e[e.length - 1] = "a";
e[0] = "a";
PDataType type = PVarcharArray.INSTANCE;
PDataType base = PVarchar.INSTANCE;
PhoenixArray arr1 = new PhoenixArray(base, o1);
PhoenixArray arr2 = new PhoenixArray(base, o2);
PhoenixArray expected = new PhoenixArray(base, e);
test(arr1, arr2, type, null, null, type, null, null, expected, SortOrder.ASC, SortOrder.ASC);
test(arr1, arr2, type, null, null, type, null, null, expected, SortOrder.ASC, SortOrder.DESC);
test(arr1, arr2, type, null, null, type, null, null, expected, SortOrder.DESC, SortOrder.DESC);
test(arr1, arr2, type, null, null, type, null, null, expected, SortOrder.DESC, SortOrder.ASC);
}
@Test
public void testWith0NullsAnd255Nulls() throws SQLException {
Object[] o1 = new Object[1];
Object[] o2 = new Object[256];
Object[] e = new Object[257];
o1[0] = "a";
o2[o2.length - 1] = "a";
e[e.length - 1] = "a";
e[0] = "a";
PDataType type = PVarcharArray.INSTANCE;
PDataType base = PVarchar.INSTANCE;
PhoenixArray arr1 = new PhoenixArray(base, o1);
PhoenixArray arr2 = new PhoenixArray(base, o2);
PhoenixArray expected = new PhoenixArray(base, e);
test(arr1, arr2, type, null, null, type, null, null, expected, SortOrder.ASC, SortOrder.ASC);
test(arr1, arr2, type, null, null, type, null, null, expected, SortOrder.ASC, SortOrder.DESC);
test(arr1, arr2, type, null, null, type, null, null, expected, SortOrder.DESC, SortOrder.DESC);
test(arr1, arr2, type, null, null, type, null, null, expected, SortOrder.DESC, SortOrder.ASC);
}
@Test
public void testWith10NullsAnd245Nulls() throws SQLException {
Object[] o1 = new Object[11];
Object[] o2 = new Object[246];
Object[] e = new Object[257];
o1[0] = "a";
o2[o2.length - 1] = "a";
e[e.length - 1] = "a";
e[0] = "a";
PDataType type = PVarcharArray.INSTANCE;
PDataType base = PVarchar.INSTANCE;
PhoenixArray arr1 = new PhoenixArray(base, o1);
PhoenixArray arr2 = new PhoenixArray(base, o2);
PhoenixArray expected = new PhoenixArray(base, e);
test(arr1, arr2, type, null, null, type, null, null, expected, SortOrder.ASC, SortOrder.ASC);
test(arr1, arr2, type, null, null, type, null, null, expected, SortOrder.ASC, SortOrder.DESC);
test(arr1, arr2, type, null, null, type, null, null, expected, SortOrder.DESC, SortOrder.DESC);
test(arr1, arr2, type, null, null, type, null, null, expected, SortOrder.DESC, SortOrder.ASC);
}
@Test
public void testForCorrectSeparatorBytes1() throws Exception {
Object[] o1 = new Object[] { "a", "b" };
Object[] o2 = new Object[] { "c", "d", "e" };
PDataType type = PVarcharArray.INSTANCE;
PDataType base = PVarchar.INSTANCE;
PhoenixArray arr1 = new PhoenixArray(base, o1);
PhoenixArray arr2 = new PhoenixArray(base, o2);
LiteralExpression array1Literal, array2Literal;
array1Literal =
LiteralExpression.newConstant(arr1, type, null, null, SortOrder.ASC, Determinism.ALWAYS);
array2Literal =
LiteralExpression.newConstant(arr2, type, null, null, SortOrder.ASC, Determinism.ALWAYS);
List<Expression> expressions = Lists.newArrayList((Expression) array1Literal);
expressions.add(array2Literal);
Expression arrayConcatFunction = new ArrayConcatFunction(expressions);
ImmutableBytesWritable ptr = new ImmutableBytesWritable();
arrayConcatFunction.evaluate(null, ptr);
byte[] expected = new byte[] { 97, 0, 98, 0, 99, 0, 100, 0, 101, 0, 0, 0, -128, 1, -128, 3,
-128, 5, -128, 7, -128, 9, 0, 0, 0, 12, 0, 0, 0, 5, 1 };
assertArrayEquals(expected, ptr.get());
}
@Test
public void testForCorrectSeparatorBytes2() throws Exception {
Object[] o1 = new Object[] { "a", "b" };
Object[] o2 = new Object[] { "c", "d", "e" };
PDataType type = PVarcharArray.INSTANCE;
PDataType base = PVarchar.INSTANCE;
PhoenixArray arr1 = new PhoenixArray(base, o1);
PhoenixArray arr2 = new PhoenixArray(base, o2);
LiteralExpression array1Literal, array2Literal;
array1Literal =
LiteralExpression.newConstant(arr1, type, null, null, SortOrder.ASC, Determinism.ALWAYS);
array2Literal =
LiteralExpression.newConstant(arr2, type, null, null, SortOrder.DESC, Determinism.ALWAYS);
List<Expression> expressions = Lists.newArrayList((Expression) array1Literal);
expressions.add(array2Literal);
Expression arrayConcatFunction = new ArrayConcatFunction(expressions);
ImmutableBytesWritable ptr = new ImmutableBytesWritable();
arrayConcatFunction.evaluate(null, ptr);
byte[] expected = new byte[] { 97, 0, 98, 0, 99, 0, 100, 0, 101, 0, 0, 0, -128, 1, -128, 3,
-128, 5, -128, 7, -128, 9, 0, 0, 0, 12, 0, 0, 0, 5, 1 };
assertArrayEquals(expected, ptr.get());
}
@Test
public void testForCorrectSeparatorBytes3() throws Exception {
Object[] o1 = new Object[] { "a", "b" };
Object[] o2 = new Object[] { "c", "d", "e" };
PDataType type = PVarcharArray.INSTANCE;
PDataType base = PVarchar.INSTANCE;
PhoenixArray arr1 = new PhoenixArray(base, o1);
PhoenixArray arr2 = new PhoenixArray(base, o2);
LiteralExpression array1Literal, array2Literal;
array1Literal =
LiteralExpression.newConstant(arr1, type, null, null, SortOrder.DESC, Determinism.ALWAYS);
array2Literal =
LiteralExpression.newConstant(arr2, type, null, null, SortOrder.DESC, Determinism.ALWAYS);
List<Expression> expressions = Lists.newArrayList((Expression) array1Literal);
expressions.add(array2Literal);
Expression arrayConcatFunction = new ArrayConcatFunction(expressions);
ImmutableBytesWritable ptr = new ImmutableBytesWritable();
arrayConcatFunction.evaluate(null, ptr);
byte[] expected = new byte[] { -98, -1, -99, -1, -100, -1, -101, -1, -102, -1, -1, -1, -128, 1,
-128, 3, -128, 5, -128, 7, -128, 9, 0, 0, 0, 12, 0, 0, 0, 5, 1 };
assertArrayEquals(expected, ptr.get());
}
@Test
public void testForCorrectSeparatorBytes4() throws Exception {
Object[] o1 = new Object[] { "a", "b", null };
Object[] o2 = new Object[] { null, "c", "d", "e" };
PDataType type = PVarcharArray.INSTANCE;
PDataType base = PVarchar.INSTANCE;
PhoenixArray arr1 = new PhoenixArray(base, o1);
PhoenixArray arr2 = new PhoenixArray(base, o2);
LiteralExpression array1Literal, array2Literal;
array1Literal =
LiteralExpression.newConstant(arr1, type, null, null, SortOrder.ASC, Determinism.ALWAYS);
array2Literal =
LiteralExpression.newConstant(arr2, type, null, null, SortOrder.DESC, Determinism.ALWAYS);
List<Expression> expressions = Lists.newArrayList((Expression) array1Literal);
expressions.add(array2Literal);
Expression arrayConcatFunction = new ArrayConcatFunction(expressions);
ImmutableBytesWritable ptr = new ImmutableBytesWritable();
arrayConcatFunction.evaluate(null, ptr);
byte[] expected = new byte[] { 97, 0, 98, 0, 0, -2, 99, 0, 100, 0, 101, 0, 0, 0, -128, 1, -128,
3, -128, 5, -128, 5, -128, 7, -128, 9, -128, 11, 0, 0, 0, 14, 0, 0, 0, 7, 1 };
assertArrayEquals(expected, ptr.get());
}
@Test
public void testForCorrectSeparatorBytes5() throws Exception {
Object[] o1 = new Object[] { "a", "b", null, null };
Object[] o2 = new Object[] { null, "c", "d", "e" };
PDataType type = PVarcharArray.INSTANCE;
PDataType base = PVarchar.INSTANCE;
PhoenixArray arr1 = new PhoenixArray(base, o1);
PhoenixArray arr2 = new PhoenixArray(base, o2);
LiteralExpression array1Literal, array2Literal;
array1Literal =
LiteralExpression.newConstant(arr1, type, null, null, SortOrder.DESC, Determinism.ALWAYS);
array2Literal =
LiteralExpression.newConstant(arr2, type, null, null, SortOrder.DESC, Determinism.ALWAYS);
List<Expression> expressions = Lists.newArrayList((Expression) array1Literal);
expressions.add(array2Literal);
Expression arrayConcatFunction = new ArrayConcatFunction(expressions);
ImmutableBytesWritable ptr = new ImmutableBytesWritable();
arrayConcatFunction.evaluate(null, ptr);
byte[] expected =
new byte[] { -98, -1, -99, -1, 0, -3, -100, -1, -101, -1, -102, -1, -1, -1, -128, 1, -128, 3,
-128, 5, -128, 5, -128, 5, -128, 7, -128, 9, -128, 11, 0, 0, 0, 14, 0, 0, 0, 8, 1 };
assertArrayEquals(expected, ptr.get());
}
}
|
googleapis/google-cloud-java | 36,354 | java-aiplatform/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/GetFeatureRequest.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/aiplatform/v1beta1/featurestore_service.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.aiplatform.v1beta1;
/**
*
*
* <pre>
* Request message for
* [FeaturestoreService.GetFeature][google.cloud.aiplatform.v1beta1.FeaturestoreService.GetFeature].
* Request message for
* [FeatureRegistryService.GetFeature][google.cloud.aiplatform.v1beta1.FeatureRegistryService.GetFeature].
* </pre>
*
* Protobuf type {@code google.cloud.aiplatform.v1beta1.GetFeatureRequest}
*/
public final class GetFeatureRequest extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.aiplatform.v1beta1.GetFeatureRequest)
GetFeatureRequestOrBuilder {
private static final long serialVersionUID = 0L;
// Use GetFeatureRequest.newBuilder() to construct.
private GetFeatureRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private GetFeatureRequest() {
name_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new GetFeatureRequest();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.aiplatform.v1beta1.FeaturestoreServiceProto
.internal_static_google_cloud_aiplatform_v1beta1_GetFeatureRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.aiplatform.v1beta1.FeaturestoreServiceProto
.internal_static_google_cloud_aiplatform_v1beta1_GetFeatureRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.aiplatform.v1beta1.GetFeatureRequest.class,
com.google.cloud.aiplatform.v1beta1.GetFeatureRequest.Builder.class);
}
private int bitField0_;
public static final int NAME_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private volatile java.lang.Object name_ = "";
/**
*
*
* <pre>
* Required. The name of the Feature resource.
* Format for entity_type as parent:
* `projects/{project}/locations/{location}/featurestores/{featurestore}/entityTypes/{entity_type}`
* Format for feature_group as parent:
* `projects/{project}/locations/{location}/featureGroups/{feature_group}`
* </pre>
*
* <code>
* string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The name.
*/
@java.lang.Override
public java.lang.String getName() {
java.lang.Object ref = name_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
name_ = s;
return s;
}
}
/**
*
*
* <pre>
* Required. The name of the Feature resource.
* Format for entity_type as parent:
* `projects/{project}/locations/{location}/featurestores/{featurestore}/entityTypes/{entity_type}`
* Format for feature_group as parent:
* `projects/{project}/locations/{location}/featureGroups/{feature_group}`
* </pre>
*
* <code>
* string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for name.
*/
@java.lang.Override
public com.google.protobuf.ByteString getNameBytes() {
java.lang.Object ref = name_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
name_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int FEATURE_STATS_AND_ANOMALY_SPEC_FIELD_NUMBER = 3;
private com.google.cloud.aiplatform.v1beta1.FeatureStatsAndAnomalySpec
featureStatsAndAnomalySpec_;
/**
*
*
* <pre>
* Optional. Only applicable for Vertex AI Feature Store.
* If set, retrieves FeatureStatsAndAnomaly generated by FeatureMonitors based
* on this spec.
* </pre>
*
* <code>
* .google.cloud.aiplatform.v1beta1.FeatureStatsAndAnomalySpec feature_stats_and_anomaly_spec = 3 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*
* @return Whether the featureStatsAndAnomalySpec field is set.
*/
@java.lang.Override
public boolean hasFeatureStatsAndAnomalySpec() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* Optional. Only applicable for Vertex AI Feature Store.
* If set, retrieves FeatureStatsAndAnomaly generated by FeatureMonitors based
* on this spec.
* </pre>
*
* <code>
* .google.cloud.aiplatform.v1beta1.FeatureStatsAndAnomalySpec feature_stats_and_anomaly_spec = 3 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*
* @return The featureStatsAndAnomalySpec.
*/
@java.lang.Override
public com.google.cloud.aiplatform.v1beta1.FeatureStatsAndAnomalySpec
getFeatureStatsAndAnomalySpec() {
return featureStatsAndAnomalySpec_ == null
? com.google.cloud.aiplatform.v1beta1.FeatureStatsAndAnomalySpec.getDefaultInstance()
: featureStatsAndAnomalySpec_;
}
/**
*
*
* <pre>
* Optional. Only applicable for Vertex AI Feature Store.
* If set, retrieves FeatureStatsAndAnomaly generated by FeatureMonitors based
* on this spec.
* </pre>
*
* <code>
* .google.cloud.aiplatform.v1beta1.FeatureStatsAndAnomalySpec feature_stats_and_anomaly_spec = 3 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
@java.lang.Override
public com.google.cloud.aiplatform.v1beta1.FeatureStatsAndAnomalySpecOrBuilder
getFeatureStatsAndAnomalySpecOrBuilder() {
return featureStatsAndAnomalySpec_ == null
? com.google.cloud.aiplatform.v1beta1.FeatureStatsAndAnomalySpec.getDefaultInstance()
: featureStatsAndAnomalySpec_;
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(name_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 1, name_);
}
if (((bitField0_ & 0x00000001) != 0)) {
output.writeMessage(3, getFeatureStatsAndAnomalySpec());
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(name_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, name_);
}
if (((bitField0_ & 0x00000001) != 0)) {
size +=
com.google.protobuf.CodedOutputStream.computeMessageSize(
3, getFeatureStatsAndAnomalySpec());
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.aiplatform.v1beta1.GetFeatureRequest)) {
return super.equals(obj);
}
com.google.cloud.aiplatform.v1beta1.GetFeatureRequest other =
(com.google.cloud.aiplatform.v1beta1.GetFeatureRequest) obj;
if (!getName().equals(other.getName())) return false;
if (hasFeatureStatsAndAnomalySpec() != other.hasFeatureStatsAndAnomalySpec()) return false;
if (hasFeatureStatsAndAnomalySpec()) {
if (!getFeatureStatsAndAnomalySpec().equals(other.getFeatureStatsAndAnomalySpec()))
return false;
}
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + NAME_FIELD_NUMBER;
hash = (53 * hash) + getName().hashCode();
if (hasFeatureStatsAndAnomalySpec()) {
hash = (37 * hash) + FEATURE_STATS_AND_ANOMALY_SPEC_FIELD_NUMBER;
hash = (53 * hash) + getFeatureStatsAndAnomalySpec().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.aiplatform.v1beta1.GetFeatureRequest parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.aiplatform.v1beta1.GetFeatureRequest parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.aiplatform.v1beta1.GetFeatureRequest parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.aiplatform.v1beta1.GetFeatureRequest parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.aiplatform.v1beta1.GetFeatureRequest parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.aiplatform.v1beta1.GetFeatureRequest parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.aiplatform.v1beta1.GetFeatureRequest parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.aiplatform.v1beta1.GetFeatureRequest parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.aiplatform.v1beta1.GetFeatureRequest parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.aiplatform.v1beta1.GetFeatureRequest parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.aiplatform.v1beta1.GetFeatureRequest parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.aiplatform.v1beta1.GetFeatureRequest parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.aiplatform.v1beta1.GetFeatureRequest prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Request message for
* [FeaturestoreService.GetFeature][google.cloud.aiplatform.v1beta1.FeaturestoreService.GetFeature].
* Request message for
* [FeatureRegistryService.GetFeature][google.cloud.aiplatform.v1beta1.FeatureRegistryService.GetFeature].
* </pre>
*
* Protobuf type {@code google.cloud.aiplatform.v1beta1.GetFeatureRequest}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.aiplatform.v1beta1.GetFeatureRequest)
com.google.cloud.aiplatform.v1beta1.GetFeatureRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.aiplatform.v1beta1.FeaturestoreServiceProto
.internal_static_google_cloud_aiplatform_v1beta1_GetFeatureRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.aiplatform.v1beta1.FeaturestoreServiceProto
.internal_static_google_cloud_aiplatform_v1beta1_GetFeatureRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.aiplatform.v1beta1.GetFeatureRequest.class,
com.google.cloud.aiplatform.v1beta1.GetFeatureRequest.Builder.class);
}
// Construct using com.google.cloud.aiplatform.v1beta1.GetFeatureRequest.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {
getFeatureStatsAndAnomalySpecFieldBuilder();
}
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
name_ = "";
featureStatsAndAnomalySpec_ = null;
if (featureStatsAndAnomalySpecBuilder_ != null) {
featureStatsAndAnomalySpecBuilder_.dispose();
featureStatsAndAnomalySpecBuilder_ = null;
}
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.aiplatform.v1beta1.FeaturestoreServiceProto
.internal_static_google_cloud_aiplatform_v1beta1_GetFeatureRequest_descriptor;
}
@java.lang.Override
public com.google.cloud.aiplatform.v1beta1.GetFeatureRequest getDefaultInstanceForType() {
return com.google.cloud.aiplatform.v1beta1.GetFeatureRequest.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.aiplatform.v1beta1.GetFeatureRequest build() {
com.google.cloud.aiplatform.v1beta1.GetFeatureRequest result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.aiplatform.v1beta1.GetFeatureRequest buildPartial() {
com.google.cloud.aiplatform.v1beta1.GetFeatureRequest result =
new com.google.cloud.aiplatform.v1beta1.GetFeatureRequest(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(com.google.cloud.aiplatform.v1beta1.GetFeatureRequest result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.name_ = name_;
}
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000002) != 0)) {
result.featureStatsAndAnomalySpec_ =
featureStatsAndAnomalySpecBuilder_ == null
? featureStatsAndAnomalySpec_
: featureStatsAndAnomalySpecBuilder_.build();
to_bitField0_ |= 0x00000001;
}
result.bitField0_ |= to_bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.aiplatform.v1beta1.GetFeatureRequest) {
return mergeFrom((com.google.cloud.aiplatform.v1beta1.GetFeatureRequest) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.aiplatform.v1beta1.GetFeatureRequest other) {
if (other == com.google.cloud.aiplatform.v1beta1.GetFeatureRequest.getDefaultInstance())
return this;
if (!other.getName().isEmpty()) {
name_ = other.name_;
bitField0_ |= 0x00000001;
onChanged();
}
if (other.hasFeatureStatsAndAnomalySpec()) {
mergeFeatureStatsAndAnomalySpec(other.getFeatureStatsAndAnomalySpec());
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
name_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000001;
break;
} // case 10
case 26:
{
input.readMessage(
getFeatureStatsAndAnomalySpecFieldBuilder().getBuilder(), extensionRegistry);
bitField0_ |= 0x00000002;
break;
} // case 26
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.lang.Object name_ = "";
/**
*
*
* <pre>
* Required. The name of the Feature resource.
* Format for entity_type as parent:
* `projects/{project}/locations/{location}/featurestores/{featurestore}/entityTypes/{entity_type}`
* Format for feature_group as parent:
* `projects/{project}/locations/{location}/featureGroups/{feature_group}`
* </pre>
*
* <code>
* string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The name.
*/
public java.lang.String getName() {
java.lang.Object ref = name_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
name_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Required. The name of the Feature resource.
* Format for entity_type as parent:
* `projects/{project}/locations/{location}/featurestores/{featurestore}/entityTypes/{entity_type}`
* Format for feature_group as parent:
* `projects/{project}/locations/{location}/featureGroups/{feature_group}`
* </pre>
*
* <code>
* string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for name.
*/
public com.google.protobuf.ByteString getNameBytes() {
java.lang.Object ref = name_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
name_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Required. The name of the Feature resource.
* Format for entity_type as parent:
* `projects/{project}/locations/{location}/featurestores/{featurestore}/entityTypes/{entity_type}`
* Format for feature_group as parent:
* `projects/{project}/locations/{location}/featureGroups/{feature_group}`
* </pre>
*
* <code>
* string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The name to set.
* @return This builder for chaining.
*/
public Builder setName(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
name_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The name of the Feature resource.
* Format for entity_type as parent:
* `projects/{project}/locations/{location}/featurestores/{featurestore}/entityTypes/{entity_type}`
* Format for feature_group as parent:
* `projects/{project}/locations/{location}/featureGroups/{feature_group}`
* </pre>
*
* <code>
* string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return This builder for chaining.
*/
public Builder clearName() {
name_ = getDefaultInstance().getName();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The name of the Feature resource.
* Format for entity_type as parent:
* `projects/{project}/locations/{location}/featurestores/{featurestore}/entityTypes/{entity_type}`
* Format for feature_group as parent:
* `projects/{project}/locations/{location}/featureGroups/{feature_group}`
* </pre>
*
* <code>
* string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The bytes for name to set.
* @return This builder for chaining.
*/
public Builder setNameBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
name_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
private com.google.cloud.aiplatform.v1beta1.FeatureStatsAndAnomalySpec
featureStatsAndAnomalySpec_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.aiplatform.v1beta1.FeatureStatsAndAnomalySpec,
com.google.cloud.aiplatform.v1beta1.FeatureStatsAndAnomalySpec.Builder,
com.google.cloud.aiplatform.v1beta1.FeatureStatsAndAnomalySpecOrBuilder>
featureStatsAndAnomalySpecBuilder_;
/**
*
*
* <pre>
* Optional. Only applicable for Vertex AI Feature Store.
* If set, retrieves FeatureStatsAndAnomaly generated by FeatureMonitors based
* on this spec.
* </pre>
*
* <code>
* .google.cloud.aiplatform.v1beta1.FeatureStatsAndAnomalySpec feature_stats_and_anomaly_spec = 3 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*
* @return Whether the featureStatsAndAnomalySpec field is set.
*/
public boolean hasFeatureStatsAndAnomalySpec() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
*
*
* <pre>
* Optional. Only applicable for Vertex AI Feature Store.
* If set, retrieves FeatureStatsAndAnomaly generated by FeatureMonitors based
* on this spec.
* </pre>
*
* <code>
* .google.cloud.aiplatform.v1beta1.FeatureStatsAndAnomalySpec feature_stats_and_anomaly_spec = 3 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*
* @return The featureStatsAndAnomalySpec.
*/
public com.google.cloud.aiplatform.v1beta1.FeatureStatsAndAnomalySpec
getFeatureStatsAndAnomalySpec() {
if (featureStatsAndAnomalySpecBuilder_ == null) {
return featureStatsAndAnomalySpec_ == null
? com.google.cloud.aiplatform.v1beta1.FeatureStatsAndAnomalySpec.getDefaultInstance()
: featureStatsAndAnomalySpec_;
} else {
return featureStatsAndAnomalySpecBuilder_.getMessage();
}
}
/**
*
*
* <pre>
* Optional. Only applicable for Vertex AI Feature Store.
* If set, retrieves FeatureStatsAndAnomaly generated by FeatureMonitors based
* on this spec.
* </pre>
*
* <code>
* .google.cloud.aiplatform.v1beta1.FeatureStatsAndAnomalySpec feature_stats_and_anomaly_spec = 3 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
public Builder setFeatureStatsAndAnomalySpec(
com.google.cloud.aiplatform.v1beta1.FeatureStatsAndAnomalySpec value) {
if (featureStatsAndAnomalySpecBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
featureStatsAndAnomalySpec_ = value;
} else {
featureStatsAndAnomalySpecBuilder_.setMessage(value);
}
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. Only applicable for Vertex AI Feature Store.
* If set, retrieves FeatureStatsAndAnomaly generated by FeatureMonitors based
* on this spec.
* </pre>
*
* <code>
* .google.cloud.aiplatform.v1beta1.FeatureStatsAndAnomalySpec feature_stats_and_anomaly_spec = 3 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
public Builder setFeatureStatsAndAnomalySpec(
com.google.cloud.aiplatform.v1beta1.FeatureStatsAndAnomalySpec.Builder builderForValue) {
if (featureStatsAndAnomalySpecBuilder_ == null) {
featureStatsAndAnomalySpec_ = builderForValue.build();
} else {
featureStatsAndAnomalySpecBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. Only applicable for Vertex AI Feature Store.
* If set, retrieves FeatureStatsAndAnomaly generated by FeatureMonitors based
* on this spec.
* </pre>
*
* <code>
* .google.cloud.aiplatform.v1beta1.FeatureStatsAndAnomalySpec feature_stats_and_anomaly_spec = 3 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
public Builder mergeFeatureStatsAndAnomalySpec(
com.google.cloud.aiplatform.v1beta1.FeatureStatsAndAnomalySpec value) {
if (featureStatsAndAnomalySpecBuilder_ == null) {
if (((bitField0_ & 0x00000002) != 0)
&& featureStatsAndAnomalySpec_ != null
&& featureStatsAndAnomalySpec_
!= com.google.cloud.aiplatform.v1beta1.FeatureStatsAndAnomalySpec
.getDefaultInstance()) {
getFeatureStatsAndAnomalySpecBuilder().mergeFrom(value);
} else {
featureStatsAndAnomalySpec_ = value;
}
} else {
featureStatsAndAnomalySpecBuilder_.mergeFrom(value);
}
if (featureStatsAndAnomalySpec_ != null) {
bitField0_ |= 0x00000002;
onChanged();
}
return this;
}
/**
*
*
* <pre>
* Optional. Only applicable for Vertex AI Feature Store.
* If set, retrieves FeatureStatsAndAnomaly generated by FeatureMonitors based
* on this spec.
* </pre>
*
* <code>
* .google.cloud.aiplatform.v1beta1.FeatureStatsAndAnomalySpec feature_stats_and_anomaly_spec = 3 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
public Builder clearFeatureStatsAndAnomalySpec() {
bitField0_ = (bitField0_ & ~0x00000002);
featureStatsAndAnomalySpec_ = null;
if (featureStatsAndAnomalySpecBuilder_ != null) {
featureStatsAndAnomalySpecBuilder_.dispose();
featureStatsAndAnomalySpecBuilder_ = null;
}
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. Only applicable for Vertex AI Feature Store.
* If set, retrieves FeatureStatsAndAnomaly generated by FeatureMonitors based
* on this spec.
* </pre>
*
* <code>
* .google.cloud.aiplatform.v1beta1.FeatureStatsAndAnomalySpec feature_stats_and_anomaly_spec = 3 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
public com.google.cloud.aiplatform.v1beta1.FeatureStatsAndAnomalySpec.Builder
getFeatureStatsAndAnomalySpecBuilder() {
bitField0_ |= 0x00000002;
onChanged();
return getFeatureStatsAndAnomalySpecFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* Optional. Only applicable for Vertex AI Feature Store.
* If set, retrieves FeatureStatsAndAnomaly generated by FeatureMonitors based
* on this spec.
* </pre>
*
* <code>
* .google.cloud.aiplatform.v1beta1.FeatureStatsAndAnomalySpec feature_stats_and_anomaly_spec = 3 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
public com.google.cloud.aiplatform.v1beta1.FeatureStatsAndAnomalySpecOrBuilder
getFeatureStatsAndAnomalySpecOrBuilder() {
if (featureStatsAndAnomalySpecBuilder_ != null) {
return featureStatsAndAnomalySpecBuilder_.getMessageOrBuilder();
} else {
return featureStatsAndAnomalySpec_ == null
? com.google.cloud.aiplatform.v1beta1.FeatureStatsAndAnomalySpec.getDefaultInstance()
: featureStatsAndAnomalySpec_;
}
}
/**
*
*
* <pre>
* Optional. Only applicable for Vertex AI Feature Store.
* If set, retrieves FeatureStatsAndAnomaly generated by FeatureMonitors based
* on this spec.
* </pre>
*
* <code>
* .google.cloud.aiplatform.v1beta1.FeatureStatsAndAnomalySpec feature_stats_and_anomaly_spec = 3 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.aiplatform.v1beta1.FeatureStatsAndAnomalySpec,
com.google.cloud.aiplatform.v1beta1.FeatureStatsAndAnomalySpec.Builder,
com.google.cloud.aiplatform.v1beta1.FeatureStatsAndAnomalySpecOrBuilder>
getFeatureStatsAndAnomalySpecFieldBuilder() {
if (featureStatsAndAnomalySpecBuilder_ == null) {
featureStatsAndAnomalySpecBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.aiplatform.v1beta1.FeatureStatsAndAnomalySpec,
com.google.cloud.aiplatform.v1beta1.FeatureStatsAndAnomalySpec.Builder,
com.google.cloud.aiplatform.v1beta1.FeatureStatsAndAnomalySpecOrBuilder>(
getFeatureStatsAndAnomalySpec(), getParentForChildren(), isClean());
featureStatsAndAnomalySpec_ = null;
}
return featureStatsAndAnomalySpecBuilder_;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.aiplatform.v1beta1.GetFeatureRequest)
}
// @@protoc_insertion_point(class_scope:google.cloud.aiplatform.v1beta1.GetFeatureRequest)
private static final com.google.cloud.aiplatform.v1beta1.GetFeatureRequest DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.aiplatform.v1beta1.GetFeatureRequest();
}
public static com.google.cloud.aiplatform.v1beta1.GetFeatureRequest getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<GetFeatureRequest> PARSER =
new com.google.protobuf.AbstractParser<GetFeatureRequest>() {
@java.lang.Override
public GetFeatureRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<GetFeatureRequest> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<GetFeatureRequest> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.aiplatform.v1beta1.GetFeatureRequest getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleapis/google-cloud-java | 36,284 | java-gkehub/proto-google-cloud-gkehub-v1alpha/src/main/java/com/google/cloud/gkehub/configmanagement/v1alpha/HierarchyControllerState.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/gkehub/v1alpha/configmanagement/configmanagement.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.gkehub.configmanagement.v1alpha;
/**
*
*
* <pre>
* State for Hierarchy Controller
* </pre>
*
* Protobuf type {@code google.cloud.gkehub.configmanagement.v1alpha.HierarchyControllerState}
*/
public final class HierarchyControllerState extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.gkehub.configmanagement.v1alpha.HierarchyControllerState)
HierarchyControllerStateOrBuilder {
private static final long serialVersionUID = 0L;
// Use HierarchyControllerState.newBuilder() to construct.
private HierarchyControllerState(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private HierarchyControllerState() {}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new HierarchyControllerState();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.gkehub.configmanagement.v1alpha.ConfigManagementProto
.internal_static_google_cloud_gkehub_configmanagement_v1alpha_HierarchyControllerState_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.gkehub.configmanagement.v1alpha.ConfigManagementProto
.internal_static_google_cloud_gkehub_configmanagement_v1alpha_HierarchyControllerState_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.gkehub.configmanagement.v1alpha.HierarchyControllerState.class,
com.google.cloud.gkehub.configmanagement.v1alpha.HierarchyControllerState.Builder
.class);
}
private int bitField0_;
public static final int VERSION_FIELD_NUMBER = 1;
private com.google.cloud.gkehub.configmanagement.v1alpha.HierarchyControllerVersion version_;
/**
*
*
* <pre>
* The version for Hierarchy Controller
* </pre>
*
* <code>.google.cloud.gkehub.configmanagement.v1alpha.HierarchyControllerVersion version = 1;
* </code>
*
* @return Whether the version field is set.
*/
@java.lang.Override
public boolean hasVersion() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* The version for Hierarchy Controller
* </pre>
*
* <code>.google.cloud.gkehub.configmanagement.v1alpha.HierarchyControllerVersion version = 1;
* </code>
*
* @return The version.
*/
@java.lang.Override
public com.google.cloud.gkehub.configmanagement.v1alpha.HierarchyControllerVersion getVersion() {
return version_ == null
? com.google.cloud.gkehub.configmanagement.v1alpha.HierarchyControllerVersion
.getDefaultInstance()
: version_;
}
/**
*
*
* <pre>
* The version for Hierarchy Controller
* </pre>
*
* <code>.google.cloud.gkehub.configmanagement.v1alpha.HierarchyControllerVersion version = 1;
* </code>
*/
@java.lang.Override
public com.google.cloud.gkehub.configmanagement.v1alpha.HierarchyControllerVersionOrBuilder
getVersionOrBuilder() {
return version_ == null
? com.google.cloud.gkehub.configmanagement.v1alpha.HierarchyControllerVersion
.getDefaultInstance()
: version_;
}
public static final int STATE_FIELD_NUMBER = 2;
private com.google.cloud.gkehub.configmanagement.v1alpha.HierarchyControllerDeploymentState
state_;
/**
*
*
* <pre>
* The deployment state for Hierarchy Controller
* </pre>
*
* <code>
* .google.cloud.gkehub.configmanagement.v1alpha.HierarchyControllerDeploymentState state = 2;
* </code>
*
* @return Whether the state field is set.
*/
@java.lang.Override
public boolean hasState() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
*
*
* <pre>
* The deployment state for Hierarchy Controller
* </pre>
*
* <code>
* .google.cloud.gkehub.configmanagement.v1alpha.HierarchyControllerDeploymentState state = 2;
* </code>
*
* @return The state.
*/
@java.lang.Override
public com.google.cloud.gkehub.configmanagement.v1alpha.HierarchyControllerDeploymentState
getState() {
return state_ == null
? com.google.cloud.gkehub.configmanagement.v1alpha.HierarchyControllerDeploymentState
.getDefaultInstance()
: state_;
}
/**
*
*
* <pre>
* The deployment state for Hierarchy Controller
* </pre>
*
* <code>
* .google.cloud.gkehub.configmanagement.v1alpha.HierarchyControllerDeploymentState state = 2;
* </code>
*/
@java.lang.Override
public com.google.cloud.gkehub.configmanagement.v1alpha
.HierarchyControllerDeploymentStateOrBuilder
getStateOrBuilder() {
return state_ == null
? com.google.cloud.gkehub.configmanagement.v1alpha.HierarchyControllerDeploymentState
.getDefaultInstance()
: state_;
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (((bitField0_ & 0x00000001) != 0)) {
output.writeMessage(1, getVersion());
}
if (((bitField0_ & 0x00000002) != 0)) {
output.writeMessage(2, getState());
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) != 0)) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, getVersion());
}
if (((bitField0_ & 0x00000002) != 0)) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, getState());
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj
instanceof com.google.cloud.gkehub.configmanagement.v1alpha.HierarchyControllerState)) {
return super.equals(obj);
}
com.google.cloud.gkehub.configmanagement.v1alpha.HierarchyControllerState other =
(com.google.cloud.gkehub.configmanagement.v1alpha.HierarchyControllerState) obj;
if (hasVersion() != other.hasVersion()) return false;
if (hasVersion()) {
if (!getVersion().equals(other.getVersion())) return false;
}
if (hasState() != other.hasState()) return false;
if (hasState()) {
if (!getState().equals(other.getState())) return false;
}
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (hasVersion()) {
hash = (37 * hash) + VERSION_FIELD_NUMBER;
hash = (53 * hash) + getVersion().hashCode();
}
if (hasState()) {
hash = (37 * hash) + STATE_FIELD_NUMBER;
hash = (53 * hash) + getState().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.gkehub.configmanagement.v1alpha.HierarchyControllerState parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.gkehub.configmanagement.v1alpha.HierarchyControllerState parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.gkehub.configmanagement.v1alpha.HierarchyControllerState parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.gkehub.configmanagement.v1alpha.HierarchyControllerState parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.gkehub.configmanagement.v1alpha.HierarchyControllerState parseFrom(
byte[] data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.gkehub.configmanagement.v1alpha.HierarchyControllerState parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.gkehub.configmanagement.v1alpha.HierarchyControllerState parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.gkehub.configmanagement.v1alpha.HierarchyControllerState parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.gkehub.configmanagement.v1alpha.HierarchyControllerState
parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.gkehub.configmanagement.v1alpha.HierarchyControllerState
parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.gkehub.configmanagement.v1alpha.HierarchyControllerState parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.gkehub.configmanagement.v1alpha.HierarchyControllerState parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.gkehub.configmanagement.v1alpha.HierarchyControllerState prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* State for Hierarchy Controller
* </pre>
*
* Protobuf type {@code google.cloud.gkehub.configmanagement.v1alpha.HierarchyControllerState}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.gkehub.configmanagement.v1alpha.HierarchyControllerState)
com.google.cloud.gkehub.configmanagement.v1alpha.HierarchyControllerStateOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.gkehub.configmanagement.v1alpha.ConfigManagementProto
.internal_static_google_cloud_gkehub_configmanagement_v1alpha_HierarchyControllerState_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.gkehub.configmanagement.v1alpha.ConfigManagementProto
.internal_static_google_cloud_gkehub_configmanagement_v1alpha_HierarchyControllerState_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.gkehub.configmanagement.v1alpha.HierarchyControllerState.class,
com.google.cloud.gkehub.configmanagement.v1alpha.HierarchyControllerState.Builder
.class);
}
// Construct using
// com.google.cloud.gkehub.configmanagement.v1alpha.HierarchyControllerState.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {
getVersionFieldBuilder();
getStateFieldBuilder();
}
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
version_ = null;
if (versionBuilder_ != null) {
versionBuilder_.dispose();
versionBuilder_ = null;
}
state_ = null;
if (stateBuilder_ != null) {
stateBuilder_.dispose();
stateBuilder_ = null;
}
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.gkehub.configmanagement.v1alpha.ConfigManagementProto
.internal_static_google_cloud_gkehub_configmanagement_v1alpha_HierarchyControllerState_descriptor;
}
@java.lang.Override
public com.google.cloud.gkehub.configmanagement.v1alpha.HierarchyControllerState
getDefaultInstanceForType() {
return com.google.cloud.gkehub.configmanagement.v1alpha.HierarchyControllerState
.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.gkehub.configmanagement.v1alpha.HierarchyControllerState build() {
com.google.cloud.gkehub.configmanagement.v1alpha.HierarchyControllerState result =
buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.gkehub.configmanagement.v1alpha.HierarchyControllerState
buildPartial() {
com.google.cloud.gkehub.configmanagement.v1alpha.HierarchyControllerState result =
new com.google.cloud.gkehub.configmanagement.v1alpha.HierarchyControllerState(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(
com.google.cloud.gkehub.configmanagement.v1alpha.HierarchyControllerState result) {
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.version_ = versionBuilder_ == null ? version_ : versionBuilder_.build();
to_bitField0_ |= 0x00000001;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.state_ = stateBuilder_ == null ? state_ : stateBuilder_.build();
to_bitField0_ |= 0x00000002;
}
result.bitField0_ |= to_bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other
instanceof com.google.cloud.gkehub.configmanagement.v1alpha.HierarchyControllerState) {
return mergeFrom(
(com.google.cloud.gkehub.configmanagement.v1alpha.HierarchyControllerState) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(
com.google.cloud.gkehub.configmanagement.v1alpha.HierarchyControllerState other) {
if (other
== com.google.cloud.gkehub.configmanagement.v1alpha.HierarchyControllerState
.getDefaultInstance()) return this;
if (other.hasVersion()) {
mergeVersion(other.getVersion());
}
if (other.hasState()) {
mergeState(other.getState());
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
input.readMessage(getVersionFieldBuilder().getBuilder(), extensionRegistry);
bitField0_ |= 0x00000001;
break;
} // case 10
case 18:
{
input.readMessage(getStateFieldBuilder().getBuilder(), extensionRegistry);
bitField0_ |= 0x00000002;
break;
} // case 18
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private com.google.cloud.gkehub.configmanagement.v1alpha.HierarchyControllerVersion version_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.gkehub.configmanagement.v1alpha.HierarchyControllerVersion,
com.google.cloud.gkehub.configmanagement.v1alpha.HierarchyControllerVersion.Builder,
com.google.cloud.gkehub.configmanagement.v1alpha.HierarchyControllerVersionOrBuilder>
versionBuilder_;
/**
*
*
* <pre>
* The version for Hierarchy Controller
* </pre>
*
* <code>.google.cloud.gkehub.configmanagement.v1alpha.HierarchyControllerVersion version = 1;
* </code>
*
* @return Whether the version field is set.
*/
public boolean hasVersion() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* The version for Hierarchy Controller
* </pre>
*
* <code>.google.cloud.gkehub.configmanagement.v1alpha.HierarchyControllerVersion version = 1;
* </code>
*
* @return The version.
*/
public com.google.cloud.gkehub.configmanagement.v1alpha.HierarchyControllerVersion
getVersion() {
if (versionBuilder_ == null) {
return version_ == null
? com.google.cloud.gkehub.configmanagement.v1alpha.HierarchyControllerVersion
.getDefaultInstance()
: version_;
} else {
return versionBuilder_.getMessage();
}
}
/**
*
*
* <pre>
* The version for Hierarchy Controller
* </pre>
*
* <code>.google.cloud.gkehub.configmanagement.v1alpha.HierarchyControllerVersion version = 1;
* </code>
*/
public Builder setVersion(
com.google.cloud.gkehub.configmanagement.v1alpha.HierarchyControllerVersion value) {
if (versionBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
version_ = value;
} else {
versionBuilder_.setMessage(value);
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* The version for Hierarchy Controller
* </pre>
*
* <code>.google.cloud.gkehub.configmanagement.v1alpha.HierarchyControllerVersion version = 1;
* </code>
*/
public Builder setVersion(
com.google.cloud.gkehub.configmanagement.v1alpha.HierarchyControllerVersion.Builder
builderForValue) {
if (versionBuilder_ == null) {
version_ = builderForValue.build();
} else {
versionBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* The version for Hierarchy Controller
* </pre>
*
* <code>.google.cloud.gkehub.configmanagement.v1alpha.HierarchyControllerVersion version = 1;
* </code>
*/
public Builder mergeVersion(
com.google.cloud.gkehub.configmanagement.v1alpha.HierarchyControllerVersion value) {
if (versionBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0)
&& version_ != null
&& version_
!= com.google.cloud.gkehub.configmanagement.v1alpha.HierarchyControllerVersion
.getDefaultInstance()) {
getVersionBuilder().mergeFrom(value);
} else {
version_ = value;
}
} else {
versionBuilder_.mergeFrom(value);
}
if (version_ != null) {
bitField0_ |= 0x00000001;
onChanged();
}
return this;
}
/**
*
*
* <pre>
* The version for Hierarchy Controller
* </pre>
*
* <code>.google.cloud.gkehub.configmanagement.v1alpha.HierarchyControllerVersion version = 1;
* </code>
*/
public Builder clearVersion() {
bitField0_ = (bitField0_ & ~0x00000001);
version_ = null;
if (versionBuilder_ != null) {
versionBuilder_.dispose();
versionBuilder_ = null;
}
onChanged();
return this;
}
/**
*
*
* <pre>
* The version for Hierarchy Controller
* </pre>
*
* <code>.google.cloud.gkehub.configmanagement.v1alpha.HierarchyControllerVersion version = 1;
* </code>
*/
public com.google.cloud.gkehub.configmanagement.v1alpha.HierarchyControllerVersion.Builder
getVersionBuilder() {
bitField0_ |= 0x00000001;
onChanged();
return getVersionFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* The version for Hierarchy Controller
* </pre>
*
* <code>.google.cloud.gkehub.configmanagement.v1alpha.HierarchyControllerVersion version = 1;
* </code>
*/
public com.google.cloud.gkehub.configmanagement.v1alpha.HierarchyControllerVersionOrBuilder
getVersionOrBuilder() {
if (versionBuilder_ != null) {
return versionBuilder_.getMessageOrBuilder();
} else {
return version_ == null
? com.google.cloud.gkehub.configmanagement.v1alpha.HierarchyControllerVersion
.getDefaultInstance()
: version_;
}
}
/**
*
*
* <pre>
* The version for Hierarchy Controller
* </pre>
*
* <code>.google.cloud.gkehub.configmanagement.v1alpha.HierarchyControllerVersion version = 1;
* </code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.gkehub.configmanagement.v1alpha.HierarchyControllerVersion,
com.google.cloud.gkehub.configmanagement.v1alpha.HierarchyControllerVersion.Builder,
com.google.cloud.gkehub.configmanagement.v1alpha.HierarchyControllerVersionOrBuilder>
getVersionFieldBuilder() {
if (versionBuilder_ == null) {
versionBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.gkehub.configmanagement.v1alpha.HierarchyControllerVersion,
com.google.cloud.gkehub.configmanagement.v1alpha.HierarchyControllerVersion.Builder,
com.google.cloud.gkehub.configmanagement.v1alpha
.HierarchyControllerVersionOrBuilder>(
getVersion(), getParentForChildren(), isClean());
version_ = null;
}
return versionBuilder_;
}
private com.google.cloud.gkehub.configmanagement.v1alpha.HierarchyControllerDeploymentState
state_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.gkehub.configmanagement.v1alpha.HierarchyControllerDeploymentState,
com.google.cloud.gkehub.configmanagement.v1alpha.HierarchyControllerDeploymentState
.Builder,
com.google.cloud.gkehub.configmanagement.v1alpha
.HierarchyControllerDeploymentStateOrBuilder>
stateBuilder_;
/**
*
*
* <pre>
* The deployment state for Hierarchy Controller
* </pre>
*
* <code>
* .google.cloud.gkehub.configmanagement.v1alpha.HierarchyControllerDeploymentState state = 2;
* </code>
*
* @return Whether the state field is set.
*/
public boolean hasState() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
*
*
* <pre>
* The deployment state for Hierarchy Controller
* </pre>
*
* <code>
* .google.cloud.gkehub.configmanagement.v1alpha.HierarchyControllerDeploymentState state = 2;
* </code>
*
* @return The state.
*/
public com.google.cloud.gkehub.configmanagement.v1alpha.HierarchyControllerDeploymentState
getState() {
if (stateBuilder_ == null) {
return state_ == null
? com.google.cloud.gkehub.configmanagement.v1alpha.HierarchyControllerDeploymentState
.getDefaultInstance()
: state_;
} else {
return stateBuilder_.getMessage();
}
}
/**
*
*
* <pre>
* The deployment state for Hierarchy Controller
* </pre>
*
* <code>
* .google.cloud.gkehub.configmanagement.v1alpha.HierarchyControllerDeploymentState state = 2;
* </code>
*/
public Builder setState(
com.google.cloud.gkehub.configmanagement.v1alpha.HierarchyControllerDeploymentState value) {
if (stateBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
state_ = value;
} else {
stateBuilder_.setMessage(value);
}
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* The deployment state for Hierarchy Controller
* </pre>
*
* <code>
* .google.cloud.gkehub.configmanagement.v1alpha.HierarchyControllerDeploymentState state = 2;
* </code>
*/
public Builder setState(
com.google.cloud.gkehub.configmanagement.v1alpha.HierarchyControllerDeploymentState.Builder
builderForValue) {
if (stateBuilder_ == null) {
state_ = builderForValue.build();
} else {
stateBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* The deployment state for Hierarchy Controller
* </pre>
*
* <code>
* .google.cloud.gkehub.configmanagement.v1alpha.HierarchyControllerDeploymentState state = 2;
* </code>
*/
public Builder mergeState(
com.google.cloud.gkehub.configmanagement.v1alpha.HierarchyControllerDeploymentState value) {
if (stateBuilder_ == null) {
if (((bitField0_ & 0x00000002) != 0)
&& state_ != null
&& state_
!= com.google.cloud.gkehub.configmanagement.v1alpha
.HierarchyControllerDeploymentState.getDefaultInstance()) {
getStateBuilder().mergeFrom(value);
} else {
state_ = value;
}
} else {
stateBuilder_.mergeFrom(value);
}
if (state_ != null) {
bitField0_ |= 0x00000002;
onChanged();
}
return this;
}
/**
*
*
* <pre>
* The deployment state for Hierarchy Controller
* </pre>
*
* <code>
* .google.cloud.gkehub.configmanagement.v1alpha.HierarchyControllerDeploymentState state = 2;
* </code>
*/
public Builder clearState() {
bitField0_ = (bitField0_ & ~0x00000002);
state_ = null;
if (stateBuilder_ != null) {
stateBuilder_.dispose();
stateBuilder_ = null;
}
onChanged();
return this;
}
/**
*
*
* <pre>
* The deployment state for Hierarchy Controller
* </pre>
*
* <code>
* .google.cloud.gkehub.configmanagement.v1alpha.HierarchyControllerDeploymentState state = 2;
* </code>
*/
public com.google.cloud.gkehub.configmanagement.v1alpha.HierarchyControllerDeploymentState
.Builder
getStateBuilder() {
bitField0_ |= 0x00000002;
onChanged();
return getStateFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* The deployment state for Hierarchy Controller
* </pre>
*
* <code>
* .google.cloud.gkehub.configmanagement.v1alpha.HierarchyControllerDeploymentState state = 2;
* </code>
*/
public com.google.cloud.gkehub.configmanagement.v1alpha
.HierarchyControllerDeploymentStateOrBuilder
getStateOrBuilder() {
if (stateBuilder_ != null) {
return stateBuilder_.getMessageOrBuilder();
} else {
return state_ == null
? com.google.cloud.gkehub.configmanagement.v1alpha.HierarchyControllerDeploymentState
.getDefaultInstance()
: state_;
}
}
/**
*
*
* <pre>
* The deployment state for Hierarchy Controller
* </pre>
*
* <code>
* .google.cloud.gkehub.configmanagement.v1alpha.HierarchyControllerDeploymentState state = 2;
* </code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.gkehub.configmanagement.v1alpha.HierarchyControllerDeploymentState,
com.google.cloud.gkehub.configmanagement.v1alpha.HierarchyControllerDeploymentState
.Builder,
com.google.cloud.gkehub.configmanagement.v1alpha
.HierarchyControllerDeploymentStateOrBuilder>
getStateFieldBuilder() {
if (stateBuilder_ == null) {
stateBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.gkehub.configmanagement.v1alpha.HierarchyControllerDeploymentState,
com.google.cloud.gkehub.configmanagement.v1alpha.HierarchyControllerDeploymentState
.Builder,
com.google.cloud.gkehub.configmanagement.v1alpha
.HierarchyControllerDeploymentStateOrBuilder>(
getState(), getParentForChildren(), isClean());
state_ = null;
}
return stateBuilder_;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.gkehub.configmanagement.v1alpha.HierarchyControllerState)
}
// @@protoc_insertion_point(class_scope:google.cloud.gkehub.configmanagement.v1alpha.HierarchyControllerState)
private static final com.google.cloud.gkehub.configmanagement.v1alpha.HierarchyControllerState
DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE =
new com.google.cloud.gkehub.configmanagement.v1alpha.HierarchyControllerState();
}
public static com.google.cloud.gkehub.configmanagement.v1alpha.HierarchyControllerState
getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<HierarchyControllerState> PARSER =
new com.google.protobuf.AbstractParser<HierarchyControllerState>() {
@java.lang.Override
public HierarchyControllerState parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<HierarchyControllerState> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<HierarchyControllerState> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.gkehub.configmanagement.v1alpha.HierarchyControllerState
getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleapis/google-cloud-java | 36,186 | java-artifact-registry/proto-google-cloud-artifact-registry-v1/src/main/java/com/google/devtools/artifactregistry/v1/ProjectSettings.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/devtools/artifactregistry/v1/settings.proto
// Protobuf Java Version: 3.25.8
package com.google.devtools.artifactregistry.v1;
/**
*
*
* <pre>
* The Artifact Registry settings that apply to a Project.
* </pre>
*
* Protobuf type {@code google.devtools.artifactregistry.v1.ProjectSettings}
*/
public final class ProjectSettings extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.devtools.artifactregistry.v1.ProjectSettings)
ProjectSettingsOrBuilder {
private static final long serialVersionUID = 0L;
// Use ProjectSettings.newBuilder() to construct.
private ProjectSettings(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private ProjectSettings() {
name_ = "";
legacyRedirectionState_ = 0;
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new ProjectSettings();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.devtools.artifactregistry.v1.SettingsProto
.internal_static_google_devtools_artifactregistry_v1_ProjectSettings_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.devtools.artifactregistry.v1.SettingsProto
.internal_static_google_devtools_artifactregistry_v1_ProjectSettings_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.devtools.artifactregistry.v1.ProjectSettings.class,
com.google.devtools.artifactregistry.v1.ProjectSettings.Builder.class);
}
/**
*
*
* <pre>
* The possible redirection states for legacy repositories.
* </pre>
*
* Protobuf enum {@code google.devtools.artifactregistry.v1.ProjectSettings.RedirectionState}
*/
public enum RedirectionState implements com.google.protobuf.ProtocolMessageEnum {
/**
*
*
* <pre>
* No redirection status has been set.
* </pre>
*
* <code>REDIRECTION_STATE_UNSPECIFIED = 0;</code>
*/
REDIRECTION_STATE_UNSPECIFIED(0),
/**
*
*
* <pre>
* Redirection is disabled.
* </pre>
*
* <code>REDIRECTION_FROM_GCR_IO_DISABLED = 1;</code>
*/
REDIRECTION_FROM_GCR_IO_DISABLED(1),
/**
*
*
* <pre>
* Redirection is enabled.
* </pre>
*
* <code>REDIRECTION_FROM_GCR_IO_ENABLED = 2;</code>
*/
REDIRECTION_FROM_GCR_IO_ENABLED(2),
/**
*
*
* <pre>
* Redirection is enabled, and has been finalized so cannot be reverted.
* </pre>
*
* <code>REDIRECTION_FROM_GCR_IO_FINALIZED = 3 [deprecated = true];</code>
*/
@java.lang.Deprecated
REDIRECTION_FROM_GCR_IO_FINALIZED(3),
/**
*
*
* <pre>
* Redirection is enabled and missing images are copied from GCR
* </pre>
*
* <code>REDIRECTION_FROM_GCR_IO_ENABLED_AND_COPYING = 5;</code>
*/
REDIRECTION_FROM_GCR_IO_ENABLED_AND_COPYING(5),
/**
*
*
* <pre>
* Redirection is partially enabled and missing images are copied from GCR
* </pre>
*
* <code>REDIRECTION_FROM_GCR_IO_PARTIAL_AND_COPYING = 6;</code>
*/
REDIRECTION_FROM_GCR_IO_PARTIAL_AND_COPYING(6),
UNRECOGNIZED(-1),
;
/**
*
*
* <pre>
* No redirection status has been set.
* </pre>
*
* <code>REDIRECTION_STATE_UNSPECIFIED = 0;</code>
*/
public static final int REDIRECTION_STATE_UNSPECIFIED_VALUE = 0;
/**
*
*
* <pre>
* Redirection is disabled.
* </pre>
*
* <code>REDIRECTION_FROM_GCR_IO_DISABLED = 1;</code>
*/
public static final int REDIRECTION_FROM_GCR_IO_DISABLED_VALUE = 1;
/**
*
*
* <pre>
* Redirection is enabled.
* </pre>
*
* <code>REDIRECTION_FROM_GCR_IO_ENABLED = 2;</code>
*/
public static final int REDIRECTION_FROM_GCR_IO_ENABLED_VALUE = 2;
/**
*
*
* <pre>
* Redirection is enabled, and has been finalized so cannot be reverted.
* </pre>
*
* <code>REDIRECTION_FROM_GCR_IO_FINALIZED = 3 [deprecated = true];</code>
*/
@java.lang.Deprecated public static final int REDIRECTION_FROM_GCR_IO_FINALIZED_VALUE = 3;
/**
*
*
* <pre>
* Redirection is enabled and missing images are copied from GCR
* </pre>
*
* <code>REDIRECTION_FROM_GCR_IO_ENABLED_AND_COPYING = 5;</code>
*/
public static final int REDIRECTION_FROM_GCR_IO_ENABLED_AND_COPYING_VALUE = 5;
/**
*
*
* <pre>
* Redirection is partially enabled and missing images are copied from GCR
* </pre>
*
* <code>REDIRECTION_FROM_GCR_IO_PARTIAL_AND_COPYING = 6;</code>
*/
public static final int REDIRECTION_FROM_GCR_IO_PARTIAL_AND_COPYING_VALUE = 6;
public final int getNumber() {
if (this == UNRECOGNIZED) {
throw new java.lang.IllegalArgumentException(
"Can't get the number of an unknown enum value.");
}
return value;
}
/**
* @param value The numeric wire value of the corresponding enum entry.
* @return The enum associated with the given numeric wire value.
* @deprecated Use {@link #forNumber(int)} instead.
*/
@java.lang.Deprecated
public static RedirectionState valueOf(int value) {
return forNumber(value);
}
/**
* @param value The numeric wire value of the corresponding enum entry.
* @return The enum associated with the given numeric wire value.
*/
public static RedirectionState forNumber(int value) {
switch (value) {
case 0:
return REDIRECTION_STATE_UNSPECIFIED;
case 1:
return REDIRECTION_FROM_GCR_IO_DISABLED;
case 2:
return REDIRECTION_FROM_GCR_IO_ENABLED;
case 3:
return REDIRECTION_FROM_GCR_IO_FINALIZED;
case 5:
return REDIRECTION_FROM_GCR_IO_ENABLED_AND_COPYING;
case 6:
return REDIRECTION_FROM_GCR_IO_PARTIAL_AND_COPYING;
default:
return null;
}
}
public static com.google.protobuf.Internal.EnumLiteMap<RedirectionState> internalGetValueMap() {
return internalValueMap;
}
private static final com.google.protobuf.Internal.EnumLiteMap<RedirectionState>
internalValueMap =
new com.google.protobuf.Internal.EnumLiteMap<RedirectionState>() {
public RedirectionState findValueByNumber(int number) {
return RedirectionState.forNumber(number);
}
};
public final com.google.protobuf.Descriptors.EnumValueDescriptor getValueDescriptor() {
if (this == UNRECOGNIZED) {
throw new java.lang.IllegalStateException(
"Can't get the descriptor of an unrecognized enum value.");
}
return getDescriptor().getValues().get(ordinal());
}
public final com.google.protobuf.Descriptors.EnumDescriptor getDescriptorForType() {
return getDescriptor();
}
public static final com.google.protobuf.Descriptors.EnumDescriptor getDescriptor() {
return com.google.devtools.artifactregistry.v1.ProjectSettings.getDescriptor()
.getEnumTypes()
.get(0);
}
private static final RedirectionState[] VALUES = values();
public static RedirectionState valueOf(
com.google.protobuf.Descriptors.EnumValueDescriptor desc) {
if (desc.getType() != getDescriptor()) {
throw new java.lang.IllegalArgumentException("EnumValueDescriptor is not for this type.");
}
if (desc.getIndex() == -1) {
return UNRECOGNIZED;
}
return VALUES[desc.getIndex()];
}
private final int value;
private RedirectionState(int value) {
this.value = value;
}
// @@protoc_insertion_point(enum_scope:google.devtools.artifactregistry.v1.ProjectSettings.RedirectionState)
}
public static final int NAME_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private volatile java.lang.Object name_ = "";
/**
*
*
* <pre>
* The name of the project's settings.
*
* Always of the form:
* projects/{project-id}/projectSettings
*
* In update request: never set
* In response: always set
* </pre>
*
* <code>string name = 1;</code>
*
* @return The name.
*/
@java.lang.Override
public java.lang.String getName() {
java.lang.Object ref = name_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
name_ = s;
return s;
}
}
/**
*
*
* <pre>
* The name of the project's settings.
*
* Always of the form:
* projects/{project-id}/projectSettings
*
* In update request: never set
* In response: always set
* </pre>
*
* <code>string name = 1;</code>
*
* @return The bytes for name.
*/
@java.lang.Override
public com.google.protobuf.ByteString getNameBytes() {
java.lang.Object ref = name_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
name_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int LEGACY_REDIRECTION_STATE_FIELD_NUMBER = 2;
private int legacyRedirectionState_ = 0;
/**
*
*
* <pre>
* The redirection state of the legacy repositories in this project.
* </pre>
*
* <code>
* .google.devtools.artifactregistry.v1.ProjectSettings.RedirectionState legacy_redirection_state = 2;
* </code>
*
* @return The enum numeric value on the wire for legacyRedirectionState.
*/
@java.lang.Override
public int getLegacyRedirectionStateValue() {
return legacyRedirectionState_;
}
/**
*
*
* <pre>
* The redirection state of the legacy repositories in this project.
* </pre>
*
* <code>
* .google.devtools.artifactregistry.v1.ProjectSettings.RedirectionState legacy_redirection_state = 2;
* </code>
*
* @return The legacyRedirectionState.
*/
@java.lang.Override
public com.google.devtools.artifactregistry.v1.ProjectSettings.RedirectionState
getLegacyRedirectionState() {
com.google.devtools.artifactregistry.v1.ProjectSettings.RedirectionState result =
com.google.devtools.artifactregistry.v1.ProjectSettings.RedirectionState.forNumber(
legacyRedirectionState_);
return result == null
? com.google.devtools.artifactregistry.v1.ProjectSettings.RedirectionState.UNRECOGNIZED
: result;
}
public static final int PULL_PERCENT_FIELD_NUMBER = 3;
private int pullPercent_ = 0;
/**
*
*
* <pre>
* The percentage of pull traffic to redirect from GCR to AR when using
* partial redirection.
* </pre>
*
* <code>int32 pull_percent = 3;</code>
*
* @return The pullPercent.
*/
@java.lang.Override
public int getPullPercent() {
return pullPercent_;
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(name_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 1, name_);
}
if (legacyRedirectionState_
!= com.google.devtools.artifactregistry.v1.ProjectSettings.RedirectionState
.REDIRECTION_STATE_UNSPECIFIED
.getNumber()) {
output.writeEnum(2, legacyRedirectionState_);
}
if (pullPercent_ != 0) {
output.writeInt32(3, pullPercent_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(name_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, name_);
}
if (legacyRedirectionState_
!= com.google.devtools.artifactregistry.v1.ProjectSettings.RedirectionState
.REDIRECTION_STATE_UNSPECIFIED
.getNumber()) {
size += com.google.protobuf.CodedOutputStream.computeEnumSize(2, legacyRedirectionState_);
}
if (pullPercent_ != 0) {
size += com.google.protobuf.CodedOutputStream.computeInt32Size(3, pullPercent_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.devtools.artifactregistry.v1.ProjectSettings)) {
return super.equals(obj);
}
com.google.devtools.artifactregistry.v1.ProjectSettings other =
(com.google.devtools.artifactregistry.v1.ProjectSettings) obj;
if (!getName().equals(other.getName())) return false;
if (legacyRedirectionState_ != other.legacyRedirectionState_) return false;
if (getPullPercent() != other.getPullPercent()) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + NAME_FIELD_NUMBER;
hash = (53 * hash) + getName().hashCode();
hash = (37 * hash) + LEGACY_REDIRECTION_STATE_FIELD_NUMBER;
hash = (53 * hash) + legacyRedirectionState_;
hash = (37 * hash) + PULL_PERCENT_FIELD_NUMBER;
hash = (53 * hash) + getPullPercent();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.devtools.artifactregistry.v1.ProjectSettings parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.devtools.artifactregistry.v1.ProjectSettings parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.devtools.artifactregistry.v1.ProjectSettings parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.devtools.artifactregistry.v1.ProjectSettings parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.devtools.artifactregistry.v1.ProjectSettings parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.devtools.artifactregistry.v1.ProjectSettings parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.devtools.artifactregistry.v1.ProjectSettings parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.devtools.artifactregistry.v1.ProjectSettings parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.devtools.artifactregistry.v1.ProjectSettings parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.devtools.artifactregistry.v1.ProjectSettings parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.devtools.artifactregistry.v1.ProjectSettings parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.devtools.artifactregistry.v1.ProjectSettings parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.devtools.artifactregistry.v1.ProjectSettings prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* The Artifact Registry settings that apply to a Project.
* </pre>
*
* Protobuf type {@code google.devtools.artifactregistry.v1.ProjectSettings}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.devtools.artifactregistry.v1.ProjectSettings)
com.google.devtools.artifactregistry.v1.ProjectSettingsOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.devtools.artifactregistry.v1.SettingsProto
.internal_static_google_devtools_artifactregistry_v1_ProjectSettings_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.devtools.artifactregistry.v1.SettingsProto
.internal_static_google_devtools_artifactregistry_v1_ProjectSettings_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.devtools.artifactregistry.v1.ProjectSettings.class,
com.google.devtools.artifactregistry.v1.ProjectSettings.Builder.class);
}
// Construct using com.google.devtools.artifactregistry.v1.ProjectSettings.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
name_ = "";
legacyRedirectionState_ = 0;
pullPercent_ = 0;
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.devtools.artifactregistry.v1.SettingsProto
.internal_static_google_devtools_artifactregistry_v1_ProjectSettings_descriptor;
}
@java.lang.Override
public com.google.devtools.artifactregistry.v1.ProjectSettings getDefaultInstanceForType() {
return com.google.devtools.artifactregistry.v1.ProjectSettings.getDefaultInstance();
}
@java.lang.Override
public com.google.devtools.artifactregistry.v1.ProjectSettings build() {
com.google.devtools.artifactregistry.v1.ProjectSettings result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.devtools.artifactregistry.v1.ProjectSettings buildPartial() {
com.google.devtools.artifactregistry.v1.ProjectSettings result =
new com.google.devtools.artifactregistry.v1.ProjectSettings(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(com.google.devtools.artifactregistry.v1.ProjectSettings result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.name_ = name_;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.legacyRedirectionState_ = legacyRedirectionState_;
}
if (((from_bitField0_ & 0x00000004) != 0)) {
result.pullPercent_ = pullPercent_;
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.devtools.artifactregistry.v1.ProjectSettings) {
return mergeFrom((com.google.devtools.artifactregistry.v1.ProjectSettings) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.devtools.artifactregistry.v1.ProjectSettings other) {
if (other == com.google.devtools.artifactregistry.v1.ProjectSettings.getDefaultInstance())
return this;
if (!other.getName().isEmpty()) {
name_ = other.name_;
bitField0_ |= 0x00000001;
onChanged();
}
if (other.legacyRedirectionState_ != 0) {
setLegacyRedirectionStateValue(other.getLegacyRedirectionStateValue());
}
if (other.getPullPercent() != 0) {
setPullPercent(other.getPullPercent());
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
name_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000001;
break;
} // case 10
case 16:
{
legacyRedirectionState_ = input.readEnum();
bitField0_ |= 0x00000002;
break;
} // case 16
case 24:
{
pullPercent_ = input.readInt32();
bitField0_ |= 0x00000004;
break;
} // case 24
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.lang.Object name_ = "";
/**
*
*
* <pre>
* The name of the project's settings.
*
* Always of the form:
* projects/{project-id}/projectSettings
*
* In update request: never set
* In response: always set
* </pre>
*
* <code>string name = 1;</code>
*
* @return The name.
*/
public java.lang.String getName() {
java.lang.Object ref = name_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
name_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* The name of the project's settings.
*
* Always of the form:
* projects/{project-id}/projectSettings
*
* In update request: never set
* In response: always set
* </pre>
*
* <code>string name = 1;</code>
*
* @return The bytes for name.
*/
public com.google.protobuf.ByteString getNameBytes() {
java.lang.Object ref = name_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
name_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* The name of the project's settings.
*
* Always of the form:
* projects/{project-id}/projectSettings
*
* In update request: never set
* In response: always set
* </pre>
*
* <code>string name = 1;</code>
*
* @param value The name to set.
* @return This builder for chaining.
*/
public Builder setName(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
name_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* The name of the project's settings.
*
* Always of the form:
* projects/{project-id}/projectSettings
*
* In update request: never set
* In response: always set
* </pre>
*
* <code>string name = 1;</code>
*
* @return This builder for chaining.
*/
public Builder clearName() {
name_ = getDefaultInstance().getName();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
return this;
}
/**
*
*
* <pre>
* The name of the project's settings.
*
* Always of the form:
* projects/{project-id}/projectSettings
*
* In update request: never set
* In response: always set
* </pre>
*
* <code>string name = 1;</code>
*
* @param value The bytes for name to set.
* @return This builder for chaining.
*/
public Builder setNameBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
name_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
private int legacyRedirectionState_ = 0;
/**
*
*
* <pre>
* The redirection state of the legacy repositories in this project.
* </pre>
*
* <code>
* .google.devtools.artifactregistry.v1.ProjectSettings.RedirectionState legacy_redirection_state = 2;
* </code>
*
* @return The enum numeric value on the wire for legacyRedirectionState.
*/
@java.lang.Override
public int getLegacyRedirectionStateValue() {
return legacyRedirectionState_;
}
/**
*
*
* <pre>
* The redirection state of the legacy repositories in this project.
* </pre>
*
* <code>
* .google.devtools.artifactregistry.v1.ProjectSettings.RedirectionState legacy_redirection_state = 2;
* </code>
*
* @param value The enum numeric value on the wire for legacyRedirectionState to set.
* @return This builder for chaining.
*/
public Builder setLegacyRedirectionStateValue(int value) {
legacyRedirectionState_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* The redirection state of the legacy repositories in this project.
* </pre>
*
* <code>
* .google.devtools.artifactregistry.v1.ProjectSettings.RedirectionState legacy_redirection_state = 2;
* </code>
*
* @return The legacyRedirectionState.
*/
@java.lang.Override
public com.google.devtools.artifactregistry.v1.ProjectSettings.RedirectionState
getLegacyRedirectionState() {
com.google.devtools.artifactregistry.v1.ProjectSettings.RedirectionState result =
com.google.devtools.artifactregistry.v1.ProjectSettings.RedirectionState.forNumber(
legacyRedirectionState_);
return result == null
? com.google.devtools.artifactregistry.v1.ProjectSettings.RedirectionState.UNRECOGNIZED
: result;
}
/**
*
*
* <pre>
* The redirection state of the legacy repositories in this project.
* </pre>
*
* <code>
* .google.devtools.artifactregistry.v1.ProjectSettings.RedirectionState legacy_redirection_state = 2;
* </code>
*
* @param value The legacyRedirectionState to set.
* @return This builder for chaining.
*/
public Builder setLegacyRedirectionState(
com.google.devtools.artifactregistry.v1.ProjectSettings.RedirectionState value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000002;
legacyRedirectionState_ = value.getNumber();
onChanged();
return this;
}
/**
*
*
* <pre>
* The redirection state of the legacy repositories in this project.
* </pre>
*
* <code>
* .google.devtools.artifactregistry.v1.ProjectSettings.RedirectionState legacy_redirection_state = 2;
* </code>
*
* @return This builder for chaining.
*/
public Builder clearLegacyRedirectionState() {
bitField0_ = (bitField0_ & ~0x00000002);
legacyRedirectionState_ = 0;
onChanged();
return this;
}
private int pullPercent_;
/**
*
*
* <pre>
* The percentage of pull traffic to redirect from GCR to AR when using
* partial redirection.
* </pre>
*
* <code>int32 pull_percent = 3;</code>
*
* @return The pullPercent.
*/
@java.lang.Override
public int getPullPercent() {
return pullPercent_;
}
/**
*
*
* <pre>
* The percentage of pull traffic to redirect from GCR to AR when using
* partial redirection.
* </pre>
*
* <code>int32 pull_percent = 3;</code>
*
* @param value The pullPercent to set.
* @return This builder for chaining.
*/
public Builder setPullPercent(int value) {
pullPercent_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
*
*
* <pre>
* The percentage of pull traffic to redirect from GCR to AR when using
* partial redirection.
* </pre>
*
* <code>int32 pull_percent = 3;</code>
*
* @return This builder for chaining.
*/
public Builder clearPullPercent() {
bitField0_ = (bitField0_ & ~0x00000004);
pullPercent_ = 0;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.devtools.artifactregistry.v1.ProjectSettings)
}
// @@protoc_insertion_point(class_scope:google.devtools.artifactregistry.v1.ProjectSettings)
private static final com.google.devtools.artifactregistry.v1.ProjectSettings DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.devtools.artifactregistry.v1.ProjectSettings();
}
public static com.google.devtools.artifactregistry.v1.ProjectSettings getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<ProjectSettings> PARSER =
new com.google.protobuf.AbstractParser<ProjectSettings>() {
@java.lang.Override
public ProjectSettings parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<ProjectSettings> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<ProjectSettings> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.devtools.artifactregistry.v1.ProjectSettings getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleapis/google-cloud-java | 36,210 | java-shopping-css/proto-google-shopping-css-v1/src/main/java/com/google/shopping/css/v1/UpdateAccountLabelsRequest.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/shopping/css/v1/accounts.proto
// Protobuf Java Version: 3.25.8
package com.google.shopping.css.v1;
/**
*
*
* <pre>
* The request message for the `UpdateLabels` method.
* </pre>
*
* Protobuf type {@code google.shopping.css.v1.UpdateAccountLabelsRequest}
*/
public final class UpdateAccountLabelsRequest extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.shopping.css.v1.UpdateAccountLabelsRequest)
UpdateAccountLabelsRequestOrBuilder {
private static final long serialVersionUID = 0L;
// Use UpdateAccountLabelsRequest.newBuilder() to construct.
private UpdateAccountLabelsRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private UpdateAccountLabelsRequest() {
name_ = "";
labelIds_ = emptyLongList();
parent_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new UpdateAccountLabelsRequest();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.shopping.css.v1.AccountsProto
.internal_static_google_shopping_css_v1_UpdateAccountLabelsRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.shopping.css.v1.AccountsProto
.internal_static_google_shopping_css_v1_UpdateAccountLabelsRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.shopping.css.v1.UpdateAccountLabelsRequest.class,
com.google.shopping.css.v1.UpdateAccountLabelsRequest.Builder.class);
}
private int bitField0_;
public static final int NAME_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private volatile java.lang.Object name_ = "";
/**
*
*
* <pre>
* Required. The label resource name.
* Format: accounts/{account}
* </pre>
*
* <code>
* string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The name.
*/
@java.lang.Override
public java.lang.String getName() {
java.lang.Object ref = name_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
name_ = s;
return s;
}
}
/**
*
*
* <pre>
* Required. The label resource name.
* Format: accounts/{account}
* </pre>
*
* <code>
* string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for name.
*/
@java.lang.Override
public com.google.protobuf.ByteString getNameBytes() {
java.lang.Object ref = name_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
name_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int LABEL_IDS_FIELD_NUMBER = 2;
@SuppressWarnings("serial")
private com.google.protobuf.Internal.LongList labelIds_ = emptyLongList();
/**
*
*
* <pre>
* The list of label IDs to overwrite the existing account label IDs.
* If the list is empty, all currently assigned label IDs will be deleted.
* </pre>
*
* <code>repeated int64 label_ids = 2;</code>
*
* @return A list containing the labelIds.
*/
@java.lang.Override
public java.util.List<java.lang.Long> getLabelIdsList() {
return labelIds_;
}
/**
*
*
* <pre>
* The list of label IDs to overwrite the existing account label IDs.
* If the list is empty, all currently assigned label IDs will be deleted.
* </pre>
*
* <code>repeated int64 label_ids = 2;</code>
*
* @return The count of labelIds.
*/
public int getLabelIdsCount() {
return labelIds_.size();
}
/**
*
*
* <pre>
* The list of label IDs to overwrite the existing account label IDs.
* If the list is empty, all currently assigned label IDs will be deleted.
* </pre>
*
* <code>repeated int64 label_ids = 2;</code>
*
* @param index The index of the element to return.
* @return The labelIds at the given index.
*/
public long getLabelIds(int index) {
return labelIds_.getLong(index);
}
private int labelIdsMemoizedSerializedSize = -1;
public static final int PARENT_FIELD_NUMBER = 3;
@SuppressWarnings("serial")
private volatile java.lang.Object parent_ = "";
/**
*
*
* <pre>
* Optional. Only required when updating MC account labels.
* The CSS domain that is the parent resource of the MC account.
* Format: accounts/{account}
* </pre>
*
* <code>
* optional string parent = 3 [(.google.api.field_behavior) = OPTIONAL, (.google.api.resource_reference) = { ... }
* </code>
*
* @return Whether the parent field is set.
*/
@java.lang.Override
public boolean hasParent() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* Optional. Only required when updating MC account labels.
* The CSS domain that is the parent resource of the MC account.
* Format: accounts/{account}
* </pre>
*
* <code>
* optional string parent = 3 [(.google.api.field_behavior) = OPTIONAL, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The parent.
*/
@java.lang.Override
public java.lang.String getParent() {
java.lang.Object ref = parent_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
parent_ = s;
return s;
}
}
/**
*
*
* <pre>
* Optional. Only required when updating MC account labels.
* The CSS domain that is the parent resource of the MC account.
* Format: accounts/{account}
* </pre>
*
* <code>
* optional string parent = 3 [(.google.api.field_behavior) = OPTIONAL, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for parent.
*/
@java.lang.Override
public com.google.protobuf.ByteString getParentBytes() {
java.lang.Object ref = parent_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
parent_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
getSerializedSize();
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(name_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 1, name_);
}
if (getLabelIdsList().size() > 0) {
output.writeUInt32NoTag(18);
output.writeUInt32NoTag(labelIdsMemoizedSerializedSize);
}
for (int i = 0; i < labelIds_.size(); i++) {
output.writeInt64NoTag(labelIds_.getLong(i));
}
if (((bitField0_ & 0x00000001) != 0)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 3, parent_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(name_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, name_);
}
{
int dataSize = 0;
for (int i = 0; i < labelIds_.size(); i++) {
dataSize +=
com.google.protobuf.CodedOutputStream.computeInt64SizeNoTag(labelIds_.getLong(i));
}
size += dataSize;
if (!getLabelIdsList().isEmpty()) {
size += 1;
size += com.google.protobuf.CodedOutputStream.computeInt32SizeNoTag(dataSize);
}
labelIdsMemoizedSerializedSize = dataSize;
}
if (((bitField0_ & 0x00000001) != 0)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(3, parent_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.shopping.css.v1.UpdateAccountLabelsRequest)) {
return super.equals(obj);
}
com.google.shopping.css.v1.UpdateAccountLabelsRequest other =
(com.google.shopping.css.v1.UpdateAccountLabelsRequest) obj;
if (!getName().equals(other.getName())) return false;
if (!getLabelIdsList().equals(other.getLabelIdsList())) return false;
if (hasParent() != other.hasParent()) return false;
if (hasParent()) {
if (!getParent().equals(other.getParent())) return false;
}
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + NAME_FIELD_NUMBER;
hash = (53 * hash) + getName().hashCode();
if (getLabelIdsCount() > 0) {
hash = (37 * hash) + LABEL_IDS_FIELD_NUMBER;
hash = (53 * hash) + getLabelIdsList().hashCode();
}
if (hasParent()) {
hash = (37 * hash) + PARENT_FIELD_NUMBER;
hash = (53 * hash) + getParent().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.shopping.css.v1.UpdateAccountLabelsRequest parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.shopping.css.v1.UpdateAccountLabelsRequest parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.shopping.css.v1.UpdateAccountLabelsRequest parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.shopping.css.v1.UpdateAccountLabelsRequest parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.shopping.css.v1.UpdateAccountLabelsRequest parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.shopping.css.v1.UpdateAccountLabelsRequest parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.shopping.css.v1.UpdateAccountLabelsRequest parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.shopping.css.v1.UpdateAccountLabelsRequest parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.shopping.css.v1.UpdateAccountLabelsRequest parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.shopping.css.v1.UpdateAccountLabelsRequest parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.shopping.css.v1.UpdateAccountLabelsRequest parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.shopping.css.v1.UpdateAccountLabelsRequest parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.shopping.css.v1.UpdateAccountLabelsRequest prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* The request message for the `UpdateLabels` method.
* </pre>
*
* Protobuf type {@code google.shopping.css.v1.UpdateAccountLabelsRequest}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.shopping.css.v1.UpdateAccountLabelsRequest)
com.google.shopping.css.v1.UpdateAccountLabelsRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.shopping.css.v1.AccountsProto
.internal_static_google_shopping_css_v1_UpdateAccountLabelsRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.shopping.css.v1.AccountsProto
.internal_static_google_shopping_css_v1_UpdateAccountLabelsRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.shopping.css.v1.UpdateAccountLabelsRequest.class,
com.google.shopping.css.v1.UpdateAccountLabelsRequest.Builder.class);
}
// Construct using com.google.shopping.css.v1.UpdateAccountLabelsRequest.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
name_ = "";
labelIds_ = emptyLongList();
parent_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.shopping.css.v1.AccountsProto
.internal_static_google_shopping_css_v1_UpdateAccountLabelsRequest_descriptor;
}
@java.lang.Override
public com.google.shopping.css.v1.UpdateAccountLabelsRequest getDefaultInstanceForType() {
return com.google.shopping.css.v1.UpdateAccountLabelsRequest.getDefaultInstance();
}
@java.lang.Override
public com.google.shopping.css.v1.UpdateAccountLabelsRequest build() {
com.google.shopping.css.v1.UpdateAccountLabelsRequest result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.shopping.css.v1.UpdateAccountLabelsRequest buildPartial() {
com.google.shopping.css.v1.UpdateAccountLabelsRequest result =
new com.google.shopping.css.v1.UpdateAccountLabelsRequest(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(com.google.shopping.css.v1.UpdateAccountLabelsRequest result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.name_ = name_;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
labelIds_.makeImmutable();
result.labelIds_ = labelIds_;
}
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000004) != 0)) {
result.parent_ = parent_;
to_bitField0_ |= 0x00000001;
}
result.bitField0_ |= to_bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.shopping.css.v1.UpdateAccountLabelsRequest) {
return mergeFrom((com.google.shopping.css.v1.UpdateAccountLabelsRequest) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.shopping.css.v1.UpdateAccountLabelsRequest other) {
if (other == com.google.shopping.css.v1.UpdateAccountLabelsRequest.getDefaultInstance())
return this;
if (!other.getName().isEmpty()) {
name_ = other.name_;
bitField0_ |= 0x00000001;
onChanged();
}
if (!other.labelIds_.isEmpty()) {
if (labelIds_.isEmpty()) {
labelIds_ = other.labelIds_;
labelIds_.makeImmutable();
bitField0_ |= 0x00000002;
} else {
ensureLabelIdsIsMutable();
labelIds_.addAll(other.labelIds_);
}
onChanged();
}
if (other.hasParent()) {
parent_ = other.parent_;
bitField0_ |= 0x00000004;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
name_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000001;
break;
} // case 10
case 16:
{
long v = input.readInt64();
ensureLabelIdsIsMutable();
labelIds_.addLong(v);
break;
} // case 16
case 18:
{
int length = input.readRawVarint32();
int limit = input.pushLimit(length);
ensureLabelIdsIsMutable();
while (input.getBytesUntilLimit() > 0) {
labelIds_.addLong(input.readInt64());
}
input.popLimit(limit);
break;
} // case 18
case 26:
{
parent_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000004;
break;
} // case 26
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.lang.Object name_ = "";
/**
*
*
* <pre>
* Required. The label resource name.
* Format: accounts/{account}
* </pre>
*
* <code>
* string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The name.
*/
public java.lang.String getName() {
java.lang.Object ref = name_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
name_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Required. The label resource name.
* Format: accounts/{account}
* </pre>
*
* <code>
* string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for name.
*/
public com.google.protobuf.ByteString getNameBytes() {
java.lang.Object ref = name_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
name_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Required. The label resource name.
* Format: accounts/{account}
* </pre>
*
* <code>
* string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The name to set.
* @return This builder for chaining.
*/
public Builder setName(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
name_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The label resource name.
* Format: accounts/{account}
* </pre>
*
* <code>
* string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return This builder for chaining.
*/
public Builder clearName() {
name_ = getDefaultInstance().getName();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The label resource name.
* Format: accounts/{account}
* </pre>
*
* <code>
* string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The bytes for name to set.
* @return This builder for chaining.
*/
public Builder setNameBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
name_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
private com.google.protobuf.Internal.LongList labelIds_ = emptyLongList();
private void ensureLabelIdsIsMutable() {
if (!labelIds_.isModifiable()) {
labelIds_ = makeMutableCopy(labelIds_);
}
bitField0_ |= 0x00000002;
}
/**
*
*
* <pre>
* The list of label IDs to overwrite the existing account label IDs.
* If the list is empty, all currently assigned label IDs will be deleted.
* </pre>
*
* <code>repeated int64 label_ids = 2;</code>
*
* @return A list containing the labelIds.
*/
public java.util.List<java.lang.Long> getLabelIdsList() {
labelIds_.makeImmutable();
return labelIds_;
}
/**
*
*
* <pre>
* The list of label IDs to overwrite the existing account label IDs.
* If the list is empty, all currently assigned label IDs will be deleted.
* </pre>
*
* <code>repeated int64 label_ids = 2;</code>
*
* @return The count of labelIds.
*/
public int getLabelIdsCount() {
return labelIds_.size();
}
/**
*
*
* <pre>
* The list of label IDs to overwrite the existing account label IDs.
* If the list is empty, all currently assigned label IDs will be deleted.
* </pre>
*
* <code>repeated int64 label_ids = 2;</code>
*
* @param index The index of the element to return.
* @return The labelIds at the given index.
*/
public long getLabelIds(int index) {
return labelIds_.getLong(index);
}
/**
*
*
* <pre>
* The list of label IDs to overwrite the existing account label IDs.
* If the list is empty, all currently assigned label IDs will be deleted.
* </pre>
*
* <code>repeated int64 label_ids = 2;</code>
*
* @param index The index to set the value at.
* @param value The labelIds to set.
* @return This builder for chaining.
*/
public Builder setLabelIds(int index, long value) {
ensureLabelIdsIsMutable();
labelIds_.setLong(index, value);
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* The list of label IDs to overwrite the existing account label IDs.
* If the list is empty, all currently assigned label IDs will be deleted.
* </pre>
*
* <code>repeated int64 label_ids = 2;</code>
*
* @param value The labelIds to add.
* @return This builder for chaining.
*/
public Builder addLabelIds(long value) {
ensureLabelIdsIsMutable();
labelIds_.addLong(value);
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* The list of label IDs to overwrite the existing account label IDs.
* If the list is empty, all currently assigned label IDs will be deleted.
* </pre>
*
* <code>repeated int64 label_ids = 2;</code>
*
* @param values The labelIds to add.
* @return This builder for chaining.
*/
public Builder addAllLabelIds(java.lang.Iterable<? extends java.lang.Long> values) {
ensureLabelIdsIsMutable();
com.google.protobuf.AbstractMessageLite.Builder.addAll(values, labelIds_);
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* The list of label IDs to overwrite the existing account label IDs.
* If the list is empty, all currently assigned label IDs will be deleted.
* </pre>
*
* <code>repeated int64 label_ids = 2;</code>
*
* @return This builder for chaining.
*/
public Builder clearLabelIds() {
labelIds_ = emptyLongList();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
return this;
}
private java.lang.Object parent_ = "";
/**
*
*
* <pre>
* Optional. Only required when updating MC account labels.
* The CSS domain that is the parent resource of the MC account.
* Format: accounts/{account}
* </pre>
*
* <code>
* optional string parent = 3 [(.google.api.field_behavior) = OPTIONAL, (.google.api.resource_reference) = { ... }
* </code>
*
* @return Whether the parent field is set.
*/
public boolean hasParent() {
return ((bitField0_ & 0x00000004) != 0);
}
/**
*
*
* <pre>
* Optional. Only required when updating MC account labels.
* The CSS domain that is the parent resource of the MC account.
* Format: accounts/{account}
* </pre>
*
* <code>
* optional string parent = 3 [(.google.api.field_behavior) = OPTIONAL, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The parent.
*/
public java.lang.String getParent() {
java.lang.Object ref = parent_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
parent_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Optional. Only required when updating MC account labels.
* The CSS domain that is the parent resource of the MC account.
* Format: accounts/{account}
* </pre>
*
* <code>
* optional string parent = 3 [(.google.api.field_behavior) = OPTIONAL, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for parent.
*/
public com.google.protobuf.ByteString getParentBytes() {
java.lang.Object ref = parent_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
parent_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Optional. Only required when updating MC account labels.
* The CSS domain that is the parent resource of the MC account.
* Format: accounts/{account}
* </pre>
*
* <code>
* optional string parent = 3 [(.google.api.field_behavior) = OPTIONAL, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The parent to set.
* @return This builder for chaining.
*/
public Builder setParent(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
parent_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. Only required when updating MC account labels.
* The CSS domain that is the parent resource of the MC account.
* Format: accounts/{account}
* </pre>
*
* <code>
* optional string parent = 3 [(.google.api.field_behavior) = OPTIONAL, (.google.api.resource_reference) = { ... }
* </code>
*
* @return This builder for chaining.
*/
public Builder clearParent() {
parent_ = getDefaultInstance().getParent();
bitField0_ = (bitField0_ & ~0x00000004);
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. Only required when updating MC account labels.
* The CSS domain that is the parent resource of the MC account.
* Format: accounts/{account}
* </pre>
*
* <code>
* optional string parent = 3 [(.google.api.field_behavior) = OPTIONAL, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The bytes for parent to set.
* @return This builder for chaining.
*/
public Builder setParentBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
parent_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.shopping.css.v1.UpdateAccountLabelsRequest)
}
// @@protoc_insertion_point(class_scope:google.shopping.css.v1.UpdateAccountLabelsRequest)
private static final com.google.shopping.css.v1.UpdateAccountLabelsRequest DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.shopping.css.v1.UpdateAccountLabelsRequest();
}
public static com.google.shopping.css.v1.UpdateAccountLabelsRequest getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<UpdateAccountLabelsRequest> PARSER =
new com.google.protobuf.AbstractParser<UpdateAccountLabelsRequest>() {
@java.lang.Override
public UpdateAccountLabelsRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<UpdateAccountLabelsRequest> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<UpdateAccountLabelsRequest> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.shopping.css.v1.UpdateAccountLabelsRequest getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
apache/fluss | 36,845 | fluss-common/src/main/java/org/apache/fluss/utils/crc/PureJavaCrc32C.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.fluss.utils.crc;
import java.util.zip.Checksum;
/* This file is based on source code of Apache Kafka Project (https://kafka.apache.org/), licensed by the Apache
* Software Foundation (ASF) under the Apache License, Version 2.0. See the NOTICE file distributed with this work for
* additional information regarding copyright ownership. */
/**
* This class was taken from Hadoop: org.apache.hadoop.util.PureJavaCrc32C.
*
* <p>A pure-java implementation of the CRC32 checksum that uses the CRC32-C polynomial, the same
* polynomial used by iSCSI and implemented on many Intel chipsets supporting SSE4.2.
*
* <p>NOTE: This class is intended for INTERNAL usage only within Fluss.
*/
public class PureJavaCrc32C implements Checksum {
/** the current CRC value, bit-flipped. */
private int crc;
public PureJavaCrc32C() {
reset();
}
@Override
public long getValue() {
long ret = crc;
return (~ret) & 0xffffffffL;
}
@Override
public void reset() {
crc = 0xffffffff;
}
@SuppressWarnings("fallthrough")
@Override
public void update(byte[] b, int off, int len) {
int localCrc = crc;
while (len > 7) {
final int c0 = (b[off] ^ localCrc) & 0xff;
final int c1 = (b[off + 1] ^ (localCrc >>>= 8)) & 0xff;
final int c2 = (b[off + 2] ^ (localCrc >>>= 8)) & 0xff;
final int c3 = (b[off + 3] ^ localCrc >>> 8) & 0xff;
localCrc =
(T[T8_7_START + c0] ^ T[T8_6_START + c1])
^ (T[T8_5_START + c2] ^ T[T8_4_START + c3]);
final int c4 = b[off + 4] & 0xff;
final int c5 = b[off + 5] & 0xff;
final int c6 = b[off + 6] & 0xff;
final int c7 = b[off + 7] & 0xff;
localCrc ^=
(T[T8_3_START + c4] ^ T[T8_2_START + c5])
^ (T[T8_1_START + c6] ^ T[T8_0_START + c7]);
off += 8;
len -= 8;
}
/* loop unroll - duff's device style. */
for (int i = 0; i < len; i++) {
localCrc = (localCrc >>> 8) ^ T[T8_0_START + ((localCrc ^ b[off++]) & 0xff)];
}
// Publish crc out to object
crc = localCrc;
}
@Override
public final void update(int b) {
crc = (crc >>> 8) ^ T[T8_0_START + ((crc ^ b) & 0xff)];
}
// CRC polynomial tables generated by:
// java -cp build/test/classes/:build/classes/ \
// org.apache.hadoop.util.TestPureJavaCrc32\$Table 82F63B78
private static final int T8_0_START = 0;
private static final int T8_1_START = 256;
private static final int T8_2_START = 2 * 256;
private static final int T8_3_START = 3 * 256;
private static final int T8_4_START = 4 * 256;
private static final int T8_5_START = 5 * 256;
private static final int T8_6_START = 6 * 256;
private static final int T8_7_START = 7 * 256;
private static final int[] T =
new int[] {
/* T8_0 */
0x00000000, 0xF26B8303, 0xE13B70F7, 0x1350F3F4,
0xC79A971F, 0x35F1141C, 0x26A1E7E8, 0xD4CA64EB,
0x8AD958CF, 0x78B2DBCC, 0x6BE22838, 0x9989AB3B,
0x4D43CFD0, 0xBF284CD3, 0xAC78BF27, 0x5E133C24,
0x105EC76F, 0xE235446C, 0xF165B798, 0x030E349B,
0xD7C45070, 0x25AFD373, 0x36FF2087, 0xC494A384,
0x9A879FA0, 0x68EC1CA3, 0x7BBCEF57, 0x89D76C54,
0x5D1D08BF, 0xAF768BBC, 0xBC267848, 0x4E4DFB4B,
0x20BD8EDE, 0xD2D60DDD, 0xC186FE29, 0x33ED7D2A,
0xE72719C1, 0x154C9AC2, 0x061C6936, 0xF477EA35,
0xAA64D611, 0x580F5512, 0x4B5FA6E6, 0xB93425E5,
0x6DFE410E, 0x9F95C20D, 0x8CC531F9, 0x7EAEB2FA,
0x30E349B1, 0xC288CAB2, 0xD1D83946, 0x23B3BA45,
0xF779DEAE, 0x05125DAD, 0x1642AE59, 0xE4292D5A,
0xBA3A117E, 0x4851927D, 0x5B016189, 0xA96AE28A,
0x7DA08661, 0x8FCB0562, 0x9C9BF696, 0x6EF07595,
0x417B1DBC, 0xB3109EBF, 0xA0406D4B, 0x522BEE48,
0x86E18AA3, 0x748A09A0, 0x67DAFA54, 0x95B17957,
0xCBA24573, 0x39C9C670, 0x2A993584, 0xD8F2B687,
0x0C38D26C, 0xFE53516F, 0xED03A29B, 0x1F682198,
0x5125DAD3, 0xA34E59D0, 0xB01EAA24, 0x42752927,
0x96BF4DCC, 0x64D4CECF, 0x77843D3B, 0x85EFBE38,
0xDBFC821C, 0x2997011F, 0x3AC7F2EB, 0xC8AC71E8,
0x1C661503, 0xEE0D9600, 0xFD5D65F4, 0x0F36E6F7,
0x61C69362, 0x93AD1061, 0x80FDE395, 0x72966096,
0xA65C047D, 0x5437877E, 0x4767748A, 0xB50CF789,
0xEB1FCBAD, 0x197448AE, 0x0A24BB5A, 0xF84F3859,
0x2C855CB2, 0xDEEEDFB1, 0xCDBE2C45, 0x3FD5AF46,
0x7198540D, 0x83F3D70E, 0x90A324FA, 0x62C8A7F9,
0xB602C312, 0x44694011, 0x5739B3E5, 0xA55230E6,
0xFB410CC2, 0x092A8FC1, 0x1A7A7C35, 0xE811FF36,
0x3CDB9BDD, 0xCEB018DE, 0xDDE0EB2A, 0x2F8B6829,
0x82F63B78, 0x709DB87B, 0x63CD4B8F, 0x91A6C88C,
0x456CAC67, 0xB7072F64, 0xA457DC90, 0x563C5F93,
0x082F63B7, 0xFA44E0B4, 0xE9141340, 0x1B7F9043,
0xCFB5F4A8, 0x3DDE77AB, 0x2E8E845F, 0xDCE5075C,
0x92A8FC17, 0x60C37F14, 0x73938CE0, 0x81F80FE3,
0x55326B08, 0xA759E80B, 0xB4091BFF, 0x466298FC,
0x1871A4D8, 0xEA1A27DB, 0xF94AD42F, 0x0B21572C,
0xDFEB33C7, 0x2D80B0C4, 0x3ED04330, 0xCCBBC033,
0xA24BB5A6, 0x502036A5, 0x4370C551, 0xB11B4652,
0x65D122B9, 0x97BAA1BA, 0x84EA524E, 0x7681D14D,
0x2892ED69, 0xDAF96E6A, 0xC9A99D9E, 0x3BC21E9D,
0xEF087A76, 0x1D63F975, 0x0E330A81, 0xFC588982,
0xB21572C9, 0x407EF1CA, 0x532E023E, 0xA145813D,
0x758FE5D6, 0x87E466D5, 0x94B49521, 0x66DF1622,
0x38CC2A06, 0xCAA7A905, 0xD9F75AF1, 0x2B9CD9F2,
0xFF56BD19, 0x0D3D3E1A, 0x1E6DCDEE, 0xEC064EED,
0xC38D26C4, 0x31E6A5C7, 0x22B65633, 0xD0DDD530,
0x0417B1DB, 0xF67C32D8, 0xE52CC12C, 0x1747422F,
0x49547E0B, 0xBB3FFD08, 0xA86F0EFC, 0x5A048DFF,
0x8ECEE914, 0x7CA56A17, 0x6FF599E3, 0x9D9E1AE0,
0xD3D3E1AB, 0x21B862A8, 0x32E8915C, 0xC083125F,
0x144976B4, 0xE622F5B7, 0xF5720643, 0x07198540,
0x590AB964, 0xAB613A67, 0xB831C993, 0x4A5A4A90,
0x9E902E7B, 0x6CFBAD78, 0x7FAB5E8C, 0x8DC0DD8F,
0xE330A81A, 0x115B2B19, 0x020BD8ED, 0xF0605BEE,
0x24AA3F05, 0xD6C1BC06, 0xC5914FF2, 0x37FACCF1,
0x69E9F0D5, 0x9B8273D6, 0x88D28022, 0x7AB90321,
0xAE7367CA, 0x5C18E4C9, 0x4F48173D, 0xBD23943E,
0xF36E6F75, 0x0105EC76, 0x12551F82, 0xE03E9C81,
0x34F4F86A, 0xC69F7B69, 0xD5CF889D, 0x27A40B9E,
0x79B737BA, 0x8BDCB4B9, 0x988C474D, 0x6AE7C44E,
0xBE2DA0A5, 0x4C4623A6, 0x5F16D052, 0xAD7D5351,
/* T8_1 */
0x00000000, 0x13A29877, 0x274530EE, 0x34E7A899,
0x4E8A61DC, 0x5D28F9AB, 0x69CF5132, 0x7A6DC945,
0x9D14C3B8, 0x8EB65BCF, 0xBA51F356, 0xA9F36B21,
0xD39EA264, 0xC03C3A13, 0xF4DB928A, 0xE7790AFD,
0x3FC5F181, 0x2C6769F6, 0x1880C16F, 0x0B225918,
0x714F905D, 0x62ED082A, 0x560AA0B3, 0x45A838C4,
0xA2D13239, 0xB173AA4E, 0x859402D7, 0x96369AA0,
0xEC5B53E5, 0xFFF9CB92, 0xCB1E630B, 0xD8BCFB7C,
0x7F8BE302, 0x6C297B75, 0x58CED3EC, 0x4B6C4B9B,
0x310182DE, 0x22A31AA9, 0x1644B230, 0x05E62A47,
0xE29F20BA, 0xF13DB8CD, 0xC5DA1054, 0xD6788823,
0xAC154166, 0xBFB7D911, 0x8B507188, 0x98F2E9FF,
0x404E1283, 0x53EC8AF4, 0x670B226D, 0x74A9BA1A,
0x0EC4735F, 0x1D66EB28, 0x298143B1, 0x3A23DBC6,
0xDD5AD13B, 0xCEF8494C, 0xFA1FE1D5, 0xE9BD79A2,
0x93D0B0E7, 0x80722890, 0xB4958009, 0xA737187E,
0xFF17C604, 0xECB55E73, 0xD852F6EA, 0xCBF06E9D,
0xB19DA7D8, 0xA23F3FAF, 0x96D89736, 0x857A0F41,
0x620305BC, 0x71A19DCB, 0x45463552, 0x56E4AD25,
0x2C896460, 0x3F2BFC17, 0x0BCC548E, 0x186ECCF9,
0xC0D23785, 0xD370AFF2, 0xE797076B, 0xF4359F1C,
0x8E585659, 0x9DFACE2E, 0xA91D66B7, 0xBABFFEC0,
0x5DC6F43D, 0x4E646C4A, 0x7A83C4D3, 0x69215CA4,
0x134C95E1, 0x00EE0D96, 0x3409A50F, 0x27AB3D78,
0x809C2506, 0x933EBD71, 0xA7D915E8, 0xB47B8D9F,
0xCE1644DA, 0xDDB4DCAD, 0xE9537434, 0xFAF1EC43,
0x1D88E6BE, 0x0E2A7EC9, 0x3ACDD650, 0x296F4E27,
0x53028762, 0x40A01F15, 0x7447B78C, 0x67E52FFB,
0xBF59D487, 0xACFB4CF0, 0x981CE469, 0x8BBE7C1E,
0xF1D3B55B, 0xE2712D2C, 0xD69685B5, 0xC5341DC2,
0x224D173F, 0x31EF8F48, 0x050827D1, 0x16AABFA6,
0x6CC776E3, 0x7F65EE94, 0x4B82460D, 0x5820DE7A,
0xFBC3FAF9, 0xE861628E, 0xDC86CA17, 0xCF245260,
0xB5499B25, 0xA6EB0352, 0x920CABCB, 0x81AE33BC,
0x66D73941, 0x7575A136, 0x419209AF, 0x523091D8,
0x285D589D, 0x3BFFC0EA, 0x0F186873, 0x1CBAF004,
0xC4060B78, 0xD7A4930F, 0xE3433B96, 0xF0E1A3E1,
0x8A8C6AA4, 0x992EF2D3, 0xADC95A4A, 0xBE6BC23D,
0x5912C8C0, 0x4AB050B7, 0x7E57F82E, 0x6DF56059,
0x1798A91C, 0x043A316B, 0x30DD99F2, 0x237F0185,
0x844819FB, 0x97EA818C, 0xA30D2915, 0xB0AFB162,
0xCAC27827, 0xD960E050, 0xED8748C9, 0xFE25D0BE,
0x195CDA43, 0x0AFE4234, 0x3E19EAAD, 0x2DBB72DA,
0x57D6BB9F, 0x447423E8, 0x70938B71, 0x63311306,
0xBB8DE87A, 0xA82F700D, 0x9CC8D894, 0x8F6A40E3,
0xF50789A6, 0xE6A511D1, 0xD242B948, 0xC1E0213F,
0x26992BC2, 0x353BB3B5, 0x01DC1B2C, 0x127E835B,
0x68134A1E, 0x7BB1D269, 0x4F567AF0, 0x5CF4E287,
0x04D43CFD, 0x1776A48A, 0x23910C13, 0x30339464,
0x4A5E5D21, 0x59FCC556, 0x6D1B6DCF, 0x7EB9F5B8,
0x99C0FF45, 0x8A626732, 0xBE85CFAB, 0xAD2757DC,
0xD74A9E99, 0xC4E806EE, 0xF00FAE77, 0xE3AD3600,
0x3B11CD7C, 0x28B3550B, 0x1C54FD92, 0x0FF665E5,
0x759BACA0, 0x663934D7, 0x52DE9C4E, 0x417C0439,
0xA6050EC4, 0xB5A796B3, 0x81403E2A, 0x92E2A65D,
0xE88F6F18, 0xFB2DF76F, 0xCFCA5FF6, 0xDC68C781,
0x7B5FDFFF, 0x68FD4788, 0x5C1AEF11, 0x4FB87766,
0x35D5BE23, 0x26772654, 0x12908ECD, 0x013216BA,
0xE64B1C47, 0xF5E98430, 0xC10E2CA9, 0xD2ACB4DE,
0xA8C17D9B, 0xBB63E5EC, 0x8F844D75, 0x9C26D502,
0x449A2E7E, 0x5738B609, 0x63DF1E90, 0x707D86E7,
0x0A104FA2, 0x19B2D7D5, 0x2D557F4C, 0x3EF7E73B,
0xD98EEDC6, 0xCA2C75B1, 0xFECBDD28, 0xED69455F,
0x97048C1A, 0x84A6146D, 0xB041BCF4, 0xA3E32483,
/* T8_2 */
0x00000000, 0xA541927E, 0x4F6F520D, 0xEA2EC073,
0x9EDEA41A, 0x3B9F3664, 0xD1B1F617, 0x74F06469,
0x38513EC5, 0x9D10ACBB, 0x773E6CC8, 0xD27FFEB6,
0xA68F9ADF, 0x03CE08A1, 0xE9E0C8D2, 0x4CA15AAC,
0x70A27D8A, 0xD5E3EFF4, 0x3FCD2F87, 0x9A8CBDF9,
0xEE7CD990, 0x4B3D4BEE, 0xA1138B9D, 0x045219E3,
0x48F3434F, 0xEDB2D131, 0x079C1142, 0xA2DD833C,
0xD62DE755, 0x736C752B, 0x9942B558, 0x3C032726,
0xE144FB14, 0x4405696A, 0xAE2BA919, 0x0B6A3B67,
0x7F9A5F0E, 0xDADBCD70, 0x30F50D03, 0x95B49F7D,
0xD915C5D1, 0x7C5457AF, 0x967A97DC, 0x333B05A2,
0x47CB61CB, 0xE28AF3B5, 0x08A433C6, 0xADE5A1B8,
0x91E6869E, 0x34A714E0, 0xDE89D493, 0x7BC846ED,
0x0F382284, 0xAA79B0FA, 0x40577089, 0xE516E2F7,
0xA9B7B85B, 0x0CF62A25, 0xE6D8EA56, 0x43997828,
0x37691C41, 0x92288E3F, 0x78064E4C, 0xDD47DC32,
0xC76580D9, 0x622412A7, 0x880AD2D4, 0x2D4B40AA,
0x59BB24C3, 0xFCFAB6BD, 0x16D476CE, 0xB395E4B0,
0xFF34BE1C, 0x5A752C62, 0xB05BEC11, 0x151A7E6F,
0x61EA1A06, 0xC4AB8878, 0x2E85480B, 0x8BC4DA75,
0xB7C7FD53, 0x12866F2D, 0xF8A8AF5E, 0x5DE93D20,
0x29195949, 0x8C58CB37, 0x66760B44, 0xC337993A,
0x8F96C396, 0x2AD751E8, 0xC0F9919B, 0x65B803E5,
0x1148678C, 0xB409F5F2, 0x5E273581, 0xFB66A7FF,
0x26217BCD, 0x8360E9B3, 0x694E29C0, 0xCC0FBBBE,
0xB8FFDFD7, 0x1DBE4DA9, 0xF7908DDA, 0x52D11FA4,
0x1E704508, 0xBB31D776, 0x511F1705, 0xF45E857B,
0x80AEE112, 0x25EF736C, 0xCFC1B31F, 0x6A802161,
0x56830647, 0xF3C29439, 0x19EC544A, 0xBCADC634,
0xC85DA25D, 0x6D1C3023, 0x8732F050, 0x2273622E,
0x6ED23882, 0xCB93AAFC, 0x21BD6A8F, 0x84FCF8F1,
0xF00C9C98, 0x554D0EE6, 0xBF63CE95, 0x1A225CEB,
0x8B277743, 0x2E66E53D, 0xC448254E, 0x6109B730,
0x15F9D359, 0xB0B84127, 0x5A968154, 0xFFD7132A,
0xB3764986, 0x1637DBF8, 0xFC191B8B, 0x595889F5,
0x2DA8ED9C, 0x88E97FE2, 0x62C7BF91, 0xC7862DEF,
0xFB850AC9, 0x5EC498B7, 0xB4EA58C4, 0x11ABCABA,
0x655BAED3, 0xC01A3CAD, 0x2A34FCDE, 0x8F756EA0,
0xC3D4340C, 0x6695A672, 0x8CBB6601, 0x29FAF47F,
0x5D0A9016, 0xF84B0268, 0x1265C21B, 0xB7245065,
0x6A638C57, 0xCF221E29, 0x250CDE5A, 0x804D4C24,
0xF4BD284D, 0x51FCBA33, 0xBBD27A40, 0x1E93E83E,
0x5232B292, 0xF77320EC, 0x1D5DE09F, 0xB81C72E1,
0xCCEC1688, 0x69AD84F6, 0x83834485, 0x26C2D6FB,
0x1AC1F1DD, 0xBF8063A3, 0x55AEA3D0, 0xF0EF31AE,
0x841F55C7, 0x215EC7B9, 0xCB7007CA, 0x6E3195B4,
0x2290CF18, 0x87D15D66, 0x6DFF9D15, 0xC8BE0F6B,
0xBC4E6B02, 0x190FF97C, 0xF321390F, 0x5660AB71,
0x4C42F79A, 0xE90365E4, 0x032DA597, 0xA66C37E9,
0xD29C5380, 0x77DDC1FE, 0x9DF3018D, 0x38B293F3,
0x7413C95F, 0xD1525B21, 0x3B7C9B52, 0x9E3D092C,
0xEACD6D45, 0x4F8CFF3B, 0xA5A23F48, 0x00E3AD36,
0x3CE08A10, 0x99A1186E, 0x738FD81D, 0xD6CE4A63,
0xA23E2E0A, 0x077FBC74, 0xED517C07, 0x4810EE79,
0x04B1B4D5, 0xA1F026AB, 0x4BDEE6D8, 0xEE9F74A6,
0x9A6F10CF, 0x3F2E82B1, 0xD50042C2, 0x7041D0BC,
0xAD060C8E, 0x08479EF0, 0xE2695E83, 0x4728CCFD,
0x33D8A894, 0x96993AEA, 0x7CB7FA99, 0xD9F668E7,
0x9557324B, 0x3016A035, 0xDA386046, 0x7F79F238,
0x0B899651, 0xAEC8042F, 0x44E6C45C, 0xE1A75622,
0xDDA47104, 0x78E5E37A, 0x92CB2309, 0x378AB177,
0x437AD51E, 0xE63B4760, 0x0C158713, 0xA954156D,
0xE5F54FC1, 0x40B4DDBF, 0xAA9A1DCC, 0x0FDB8FB2,
0x7B2BEBDB, 0xDE6A79A5, 0x3444B9D6, 0x91052BA8,
/* T8_3 */
0x00000000, 0xDD45AAB8, 0xBF672381, 0x62228939,
0x7B2231F3, 0xA6679B4B, 0xC4451272, 0x1900B8CA,
0xF64463E6, 0x2B01C95E, 0x49234067, 0x9466EADF,
0x8D665215, 0x5023F8AD, 0x32017194, 0xEF44DB2C,
0xE964B13D, 0x34211B85, 0x560392BC, 0x8B463804,
0x924680CE, 0x4F032A76, 0x2D21A34F, 0xF06409F7,
0x1F20D2DB, 0xC2657863, 0xA047F15A, 0x7D025BE2,
0x6402E328, 0xB9474990, 0xDB65C0A9, 0x06206A11,
0xD725148B, 0x0A60BE33, 0x6842370A, 0xB5079DB2,
0xAC072578, 0x71428FC0, 0x136006F9, 0xCE25AC41,
0x2161776D, 0xFC24DDD5, 0x9E0654EC, 0x4343FE54,
0x5A43469E, 0x8706EC26, 0xE524651F, 0x3861CFA7,
0x3E41A5B6, 0xE3040F0E, 0x81268637, 0x5C632C8F,
0x45639445, 0x98263EFD, 0xFA04B7C4, 0x27411D7C,
0xC805C650, 0x15406CE8, 0x7762E5D1, 0xAA274F69,
0xB327F7A3, 0x6E625D1B, 0x0C40D422, 0xD1057E9A,
0xABA65FE7, 0x76E3F55F, 0x14C17C66, 0xC984D6DE,
0xD0846E14, 0x0DC1C4AC, 0x6FE34D95, 0xB2A6E72D,
0x5DE23C01, 0x80A796B9, 0xE2851F80, 0x3FC0B538,
0x26C00DF2, 0xFB85A74A, 0x99A72E73, 0x44E284CB,
0x42C2EEDA, 0x9F874462, 0xFDA5CD5B, 0x20E067E3,
0x39E0DF29, 0xE4A57591, 0x8687FCA8, 0x5BC25610,
0xB4868D3C, 0x69C32784, 0x0BE1AEBD, 0xD6A40405,
0xCFA4BCCF, 0x12E11677, 0x70C39F4E, 0xAD8635F6,
0x7C834B6C, 0xA1C6E1D4, 0xC3E468ED, 0x1EA1C255,
0x07A17A9F, 0xDAE4D027, 0xB8C6591E, 0x6583F3A6,
0x8AC7288A, 0x57828232, 0x35A00B0B, 0xE8E5A1B3,
0xF1E51979, 0x2CA0B3C1, 0x4E823AF8, 0x93C79040,
0x95E7FA51, 0x48A250E9, 0x2A80D9D0, 0xF7C57368,
0xEEC5CBA2, 0x3380611A, 0x51A2E823, 0x8CE7429B,
0x63A399B7, 0xBEE6330F, 0xDCC4BA36, 0x0181108E,
0x1881A844, 0xC5C402FC, 0xA7E68BC5, 0x7AA3217D,
0x52A0C93F, 0x8FE56387, 0xEDC7EABE, 0x30824006,
0x2982F8CC, 0xF4C75274, 0x96E5DB4D, 0x4BA071F5,
0xA4E4AAD9, 0x79A10061, 0x1B838958, 0xC6C623E0,
0xDFC69B2A, 0x02833192, 0x60A1B8AB, 0xBDE41213,
0xBBC47802, 0x6681D2BA, 0x04A35B83, 0xD9E6F13B,
0xC0E649F1, 0x1DA3E349, 0x7F816A70, 0xA2C4C0C8,
0x4D801BE4, 0x90C5B15C, 0xF2E73865, 0x2FA292DD,
0x36A22A17, 0xEBE780AF, 0x89C50996, 0x5480A32E,
0x8585DDB4, 0x58C0770C, 0x3AE2FE35, 0xE7A7548D,
0xFEA7EC47, 0x23E246FF, 0x41C0CFC6, 0x9C85657E,
0x73C1BE52, 0xAE8414EA, 0xCCA69DD3, 0x11E3376B,
0x08E38FA1, 0xD5A62519, 0xB784AC20, 0x6AC10698,
0x6CE16C89, 0xB1A4C631, 0xD3864F08, 0x0EC3E5B0,
0x17C35D7A, 0xCA86F7C2, 0xA8A47EFB, 0x75E1D443,
0x9AA50F6F, 0x47E0A5D7, 0x25C22CEE, 0xF8878656,
0xE1873E9C, 0x3CC29424, 0x5EE01D1D, 0x83A5B7A5,
0xF90696D8, 0x24433C60, 0x4661B559, 0x9B241FE1,
0x8224A72B, 0x5F610D93, 0x3D4384AA, 0xE0062E12,
0x0F42F53E, 0xD2075F86, 0xB025D6BF, 0x6D607C07,
0x7460C4CD, 0xA9256E75, 0xCB07E74C, 0x16424DF4,
0x106227E5, 0xCD278D5D, 0xAF050464, 0x7240AEDC,
0x6B401616, 0xB605BCAE, 0xD4273597, 0x09629F2F,
0xE6264403, 0x3B63EEBB, 0x59416782, 0x8404CD3A,
0x9D0475F0, 0x4041DF48, 0x22635671, 0xFF26FCC9,
0x2E238253, 0xF36628EB, 0x9144A1D2, 0x4C010B6A,
0x5501B3A0, 0x88441918, 0xEA669021, 0x37233A99,
0xD867E1B5, 0x05224B0D, 0x6700C234, 0xBA45688C,
0xA345D046, 0x7E007AFE, 0x1C22F3C7, 0xC167597F,
0xC747336E, 0x1A0299D6, 0x782010EF, 0xA565BA57,
0xBC65029D, 0x6120A825, 0x0302211C, 0xDE478BA4,
0x31035088, 0xEC46FA30, 0x8E647309, 0x5321D9B1,
0x4A21617B, 0x9764CBC3, 0xF54642FA, 0x2803E842,
/* T8_4 */
0x00000000, 0x38116FAC, 0x7022DF58, 0x4833B0F4,
0xE045BEB0, 0xD854D11C, 0x906761E8, 0xA8760E44,
0xC5670B91, 0xFD76643D, 0xB545D4C9, 0x8D54BB65,
0x2522B521, 0x1D33DA8D, 0x55006A79, 0x6D1105D5,
0x8F2261D3, 0xB7330E7F, 0xFF00BE8B, 0xC711D127,
0x6F67DF63, 0x5776B0CF, 0x1F45003B, 0x27546F97,
0x4A456A42, 0x725405EE, 0x3A67B51A, 0x0276DAB6,
0xAA00D4F2, 0x9211BB5E, 0xDA220BAA, 0xE2336406,
0x1BA8B557, 0x23B9DAFB, 0x6B8A6A0F, 0x539B05A3,
0xFBED0BE7, 0xC3FC644B, 0x8BCFD4BF, 0xB3DEBB13,
0xDECFBEC6, 0xE6DED16A, 0xAEED619E, 0x96FC0E32,
0x3E8A0076, 0x069B6FDA, 0x4EA8DF2E, 0x76B9B082,
0x948AD484, 0xAC9BBB28, 0xE4A80BDC, 0xDCB96470,
0x74CF6A34, 0x4CDE0598, 0x04EDB56C, 0x3CFCDAC0,
0x51EDDF15, 0x69FCB0B9, 0x21CF004D, 0x19DE6FE1,
0xB1A861A5, 0x89B90E09, 0xC18ABEFD, 0xF99BD151,
0x37516AAE, 0x0F400502, 0x4773B5F6, 0x7F62DA5A,
0xD714D41E, 0xEF05BBB2, 0xA7360B46, 0x9F2764EA,
0xF236613F, 0xCA270E93, 0x8214BE67, 0xBA05D1CB,
0x1273DF8F, 0x2A62B023, 0x625100D7, 0x5A406F7B,
0xB8730B7D, 0x806264D1, 0xC851D425, 0xF040BB89,
0x5836B5CD, 0x6027DA61, 0x28146A95, 0x10050539,
0x7D1400EC, 0x45056F40, 0x0D36DFB4, 0x3527B018,
0x9D51BE5C, 0xA540D1F0, 0xED736104, 0xD5620EA8,
0x2CF9DFF9, 0x14E8B055, 0x5CDB00A1, 0x64CA6F0D,
0xCCBC6149, 0xF4AD0EE5, 0xBC9EBE11, 0x848FD1BD,
0xE99ED468, 0xD18FBBC4, 0x99BC0B30, 0xA1AD649C,
0x09DB6AD8, 0x31CA0574, 0x79F9B580, 0x41E8DA2C,
0xA3DBBE2A, 0x9BCAD186, 0xD3F96172, 0xEBE80EDE,
0x439E009A, 0x7B8F6F36, 0x33BCDFC2, 0x0BADB06E,
0x66BCB5BB, 0x5EADDA17, 0x169E6AE3, 0x2E8F054F,
0x86F90B0B, 0xBEE864A7, 0xF6DBD453, 0xCECABBFF,
0x6EA2D55C, 0x56B3BAF0, 0x1E800A04, 0x269165A8,
0x8EE76BEC, 0xB6F60440, 0xFEC5B4B4, 0xC6D4DB18,
0xABC5DECD, 0x93D4B161, 0xDBE70195, 0xE3F66E39,
0x4B80607D, 0x73910FD1, 0x3BA2BF25, 0x03B3D089,
0xE180B48F, 0xD991DB23, 0x91A26BD7, 0xA9B3047B,
0x01C50A3F, 0x39D46593, 0x71E7D567, 0x49F6BACB,
0x24E7BF1E, 0x1CF6D0B2, 0x54C56046, 0x6CD40FEA,
0xC4A201AE, 0xFCB36E02, 0xB480DEF6, 0x8C91B15A,
0x750A600B, 0x4D1B0FA7, 0x0528BF53, 0x3D39D0FF,
0x954FDEBB, 0xAD5EB117, 0xE56D01E3, 0xDD7C6E4F,
0xB06D6B9A, 0x887C0436, 0xC04FB4C2, 0xF85EDB6E,
0x5028D52A, 0x6839BA86, 0x200A0A72, 0x181B65DE,
0xFA2801D8, 0xC2396E74, 0x8A0ADE80, 0xB21BB12C,
0x1A6DBF68, 0x227CD0C4, 0x6A4F6030, 0x525E0F9C,
0x3F4F0A49, 0x075E65E5, 0x4F6DD511, 0x777CBABD,
0xDF0AB4F9, 0xE71BDB55, 0xAF286BA1, 0x9739040D,
0x59F3BFF2, 0x61E2D05E, 0x29D160AA, 0x11C00F06,
0xB9B60142, 0x81A76EEE, 0xC994DE1A, 0xF185B1B6,
0x9C94B463, 0xA485DBCF, 0xECB66B3B, 0xD4A70497,
0x7CD10AD3, 0x44C0657F, 0x0CF3D58B, 0x34E2BA27,
0xD6D1DE21, 0xEEC0B18D, 0xA6F30179, 0x9EE26ED5,
0x36946091, 0x0E850F3D, 0x46B6BFC9, 0x7EA7D065,
0x13B6D5B0, 0x2BA7BA1C, 0x63940AE8, 0x5B856544,
0xF3F36B00, 0xCBE204AC, 0x83D1B458, 0xBBC0DBF4,
0x425B0AA5, 0x7A4A6509, 0x3279D5FD, 0x0A68BA51,
0xA21EB415, 0x9A0FDBB9, 0xD23C6B4D, 0xEA2D04E1,
0x873C0134, 0xBF2D6E98, 0xF71EDE6C, 0xCF0FB1C0,
0x6779BF84, 0x5F68D028, 0x175B60DC, 0x2F4A0F70,
0xCD796B76, 0xF56804DA, 0xBD5BB42E, 0x854ADB82,
0x2D3CD5C6, 0x152DBA6A, 0x5D1E0A9E, 0x650F6532,
0x081E60E7, 0x300F0F4B, 0x783CBFBF, 0x402DD013,
0xE85BDE57, 0xD04AB1FB, 0x9879010F, 0xA0686EA3,
/* T8_5 */
0x00000000, 0xEF306B19, 0xDB8CA0C3, 0x34BCCBDA,
0xB2F53777, 0x5DC55C6E, 0x697997B4, 0x8649FCAD,
0x6006181F, 0x8F367306, 0xBB8AB8DC, 0x54BAD3C5,
0xD2F32F68, 0x3DC34471, 0x097F8FAB, 0xE64FE4B2,
0xC00C303E, 0x2F3C5B27, 0x1B8090FD, 0xF4B0FBE4,
0x72F90749, 0x9DC96C50, 0xA975A78A, 0x4645CC93,
0xA00A2821, 0x4F3A4338, 0x7B8688E2, 0x94B6E3FB,
0x12FF1F56, 0xFDCF744F, 0xC973BF95, 0x2643D48C,
0x85F4168D, 0x6AC47D94, 0x5E78B64E, 0xB148DD57,
0x370121FA, 0xD8314AE3, 0xEC8D8139, 0x03BDEA20,
0xE5F20E92, 0x0AC2658B, 0x3E7EAE51, 0xD14EC548,
0x570739E5, 0xB83752FC, 0x8C8B9926, 0x63BBF23F,
0x45F826B3, 0xAAC84DAA, 0x9E748670, 0x7144ED69,
0xF70D11C4, 0x183D7ADD, 0x2C81B107, 0xC3B1DA1E,
0x25FE3EAC, 0xCACE55B5, 0xFE729E6F, 0x1142F576,
0x970B09DB, 0x783B62C2, 0x4C87A918, 0xA3B7C201,
0x0E045BEB, 0xE13430F2, 0xD588FB28, 0x3AB89031,
0xBCF16C9C, 0x53C10785, 0x677DCC5F, 0x884DA746,
0x6E0243F4, 0x813228ED, 0xB58EE337, 0x5ABE882E,
0xDCF77483, 0x33C71F9A, 0x077BD440, 0xE84BBF59,
0xCE086BD5, 0x213800CC, 0x1584CB16, 0xFAB4A00F,
0x7CFD5CA2, 0x93CD37BB, 0xA771FC61, 0x48419778,
0xAE0E73CA, 0x413E18D3, 0x7582D309, 0x9AB2B810,
0x1CFB44BD, 0xF3CB2FA4, 0xC777E47E, 0x28478F67,
0x8BF04D66, 0x64C0267F, 0x507CEDA5, 0xBF4C86BC,
0x39057A11, 0xD6351108, 0xE289DAD2, 0x0DB9B1CB,
0xEBF65579, 0x04C63E60, 0x307AF5BA, 0xDF4A9EA3,
0x5903620E, 0xB6330917, 0x828FC2CD, 0x6DBFA9D4,
0x4BFC7D58, 0xA4CC1641, 0x9070DD9B, 0x7F40B682,
0xF9094A2F, 0x16392136, 0x2285EAEC, 0xCDB581F5,
0x2BFA6547, 0xC4CA0E5E, 0xF076C584, 0x1F46AE9D,
0x990F5230, 0x763F3929, 0x4283F2F3, 0xADB399EA,
0x1C08B7D6, 0xF338DCCF, 0xC7841715, 0x28B47C0C,
0xAEFD80A1, 0x41CDEBB8, 0x75712062, 0x9A414B7B,
0x7C0EAFC9, 0x933EC4D0, 0xA7820F0A, 0x48B26413,
0xCEFB98BE, 0x21CBF3A7, 0x1577387D, 0xFA475364,
0xDC0487E8, 0x3334ECF1, 0x0788272B, 0xE8B84C32,
0x6EF1B09F, 0x81C1DB86, 0xB57D105C, 0x5A4D7B45,
0xBC029FF7, 0x5332F4EE, 0x678E3F34, 0x88BE542D,
0x0EF7A880, 0xE1C7C399, 0xD57B0843, 0x3A4B635A,
0x99FCA15B, 0x76CCCA42, 0x42700198, 0xAD406A81,
0x2B09962C, 0xC439FD35, 0xF08536EF, 0x1FB55DF6,
0xF9FAB944, 0x16CAD25D, 0x22761987, 0xCD46729E,
0x4B0F8E33, 0xA43FE52A, 0x90832EF0, 0x7FB345E9,
0x59F09165, 0xB6C0FA7C, 0x827C31A6, 0x6D4C5ABF,
0xEB05A612, 0x0435CD0B, 0x308906D1, 0xDFB96DC8,
0x39F6897A, 0xD6C6E263, 0xE27A29B9, 0x0D4A42A0,
0x8B03BE0D, 0x6433D514, 0x508F1ECE, 0xBFBF75D7,
0x120CEC3D, 0xFD3C8724, 0xC9804CFE, 0x26B027E7,
0xA0F9DB4A, 0x4FC9B053, 0x7B757B89, 0x94451090,
0x720AF422, 0x9D3A9F3B, 0xA98654E1, 0x46B63FF8,
0xC0FFC355, 0x2FCFA84C, 0x1B736396, 0xF443088F,
0xD200DC03, 0x3D30B71A, 0x098C7CC0, 0xE6BC17D9,
0x60F5EB74, 0x8FC5806D, 0xBB794BB7, 0x544920AE,
0xB206C41C, 0x5D36AF05, 0x698A64DF, 0x86BA0FC6,
0x00F3F36B, 0xEFC39872, 0xDB7F53A8, 0x344F38B1,
0x97F8FAB0, 0x78C891A9, 0x4C745A73, 0xA344316A,
0x250DCDC7, 0xCA3DA6DE, 0xFE816D04, 0x11B1061D,
0xF7FEE2AF, 0x18CE89B6, 0x2C72426C, 0xC3422975,
0x450BD5D8, 0xAA3BBEC1, 0x9E87751B, 0x71B71E02,
0x57F4CA8E, 0xB8C4A197, 0x8C786A4D, 0x63480154,
0xE501FDF9, 0x0A3196E0, 0x3E8D5D3A, 0xD1BD3623,
0x37F2D291, 0xD8C2B988, 0xEC7E7252, 0x034E194B,
0x8507E5E6, 0x6A378EFF, 0x5E8B4525, 0xB1BB2E3C,
/* T8_6 */
0x00000000, 0x68032CC8, 0xD0065990, 0xB8057558,
0xA5E0C5D1, 0xCDE3E919, 0x75E69C41, 0x1DE5B089,
0x4E2DFD53, 0x262ED19B, 0x9E2BA4C3, 0xF628880B,
0xEBCD3882, 0x83CE144A, 0x3BCB6112, 0x53C84DDA,
0x9C5BFAA6, 0xF458D66E, 0x4C5DA336, 0x245E8FFE,
0x39BB3F77, 0x51B813BF, 0xE9BD66E7, 0x81BE4A2F,
0xD27607F5, 0xBA752B3D, 0x02705E65, 0x6A7372AD,
0x7796C224, 0x1F95EEEC, 0xA7909BB4, 0xCF93B77C,
0x3D5B83BD, 0x5558AF75, 0xED5DDA2D, 0x855EF6E5,
0x98BB466C, 0xF0B86AA4, 0x48BD1FFC, 0x20BE3334,
0x73767EEE, 0x1B755226, 0xA370277E, 0xCB730BB6,
0xD696BB3F, 0xBE9597F7, 0x0690E2AF, 0x6E93CE67,
0xA100791B, 0xC90355D3, 0x7106208B, 0x19050C43,
0x04E0BCCA, 0x6CE39002, 0xD4E6E55A, 0xBCE5C992,
0xEF2D8448, 0x872EA880, 0x3F2BDDD8, 0x5728F110,
0x4ACD4199, 0x22CE6D51, 0x9ACB1809, 0xF2C834C1,
0x7AB7077A, 0x12B42BB2, 0xAAB15EEA, 0xC2B27222,
0xDF57C2AB, 0xB754EE63, 0x0F519B3B, 0x6752B7F3,
0x349AFA29, 0x5C99D6E1, 0xE49CA3B9, 0x8C9F8F71,
0x917A3FF8, 0xF9791330, 0x417C6668, 0x297F4AA0,
0xE6ECFDDC, 0x8EEFD114, 0x36EAA44C, 0x5EE98884,
0x430C380D, 0x2B0F14C5, 0x930A619D, 0xFB094D55,
0xA8C1008F, 0xC0C22C47, 0x78C7591F, 0x10C475D7,
0x0D21C55E, 0x6522E996, 0xDD279CCE, 0xB524B006,
0x47EC84C7, 0x2FEFA80F, 0x97EADD57, 0xFFE9F19F,
0xE20C4116, 0x8A0F6DDE, 0x320A1886, 0x5A09344E,
0x09C17994, 0x61C2555C, 0xD9C72004, 0xB1C40CCC,
0xAC21BC45, 0xC422908D, 0x7C27E5D5, 0x1424C91D,
0xDBB77E61, 0xB3B452A9, 0x0BB127F1, 0x63B20B39,
0x7E57BBB0, 0x16549778, 0xAE51E220, 0xC652CEE8,
0x959A8332, 0xFD99AFFA, 0x459CDAA2, 0x2D9FF66A,
0x307A46E3, 0x58796A2B, 0xE07C1F73, 0x887F33BB,
0xF56E0EF4, 0x9D6D223C, 0x25685764, 0x4D6B7BAC,
0x508ECB25, 0x388DE7ED, 0x808892B5, 0xE88BBE7D,
0xBB43F3A7, 0xD340DF6F, 0x6B45AA37, 0x034686FF,
0x1EA33676, 0x76A01ABE, 0xCEA56FE6, 0xA6A6432E,
0x6935F452, 0x0136D89A, 0xB933ADC2, 0xD130810A,
0xCCD53183, 0xA4D61D4B, 0x1CD36813, 0x74D044DB,
0x27180901, 0x4F1B25C9, 0xF71E5091, 0x9F1D7C59,
0x82F8CCD0, 0xEAFBE018, 0x52FE9540, 0x3AFDB988,
0xC8358D49, 0xA036A181, 0x1833D4D9, 0x7030F811,
0x6DD54898, 0x05D66450, 0xBDD31108, 0xD5D03DC0,
0x8618701A, 0xEE1B5CD2, 0x561E298A, 0x3E1D0542,
0x23F8B5CB, 0x4BFB9903, 0xF3FEEC5B, 0x9BFDC093,
0x546E77EF, 0x3C6D5B27, 0x84682E7F, 0xEC6B02B7,
0xF18EB23E, 0x998D9EF6, 0x2188EBAE, 0x498BC766,
0x1A438ABC, 0x7240A674, 0xCA45D32C, 0xA246FFE4,
0xBFA34F6D, 0xD7A063A5, 0x6FA516FD, 0x07A63A35,
0x8FD9098E, 0xE7DA2546, 0x5FDF501E, 0x37DC7CD6,
0x2A39CC5F, 0x423AE097, 0xFA3F95CF, 0x923CB907,
0xC1F4F4DD, 0xA9F7D815, 0x11F2AD4D, 0x79F18185,
0x6414310C, 0x0C171DC4, 0xB412689C, 0xDC114454,
0x1382F328, 0x7B81DFE0, 0xC384AAB8, 0xAB878670,
0xB66236F9, 0xDE611A31, 0x66646F69, 0x0E6743A1,
0x5DAF0E7B, 0x35AC22B3, 0x8DA957EB, 0xE5AA7B23,
0xF84FCBAA, 0x904CE762, 0x2849923A, 0x404ABEF2,
0xB2828A33, 0xDA81A6FB, 0x6284D3A3, 0x0A87FF6B,
0x17624FE2, 0x7F61632A, 0xC7641672, 0xAF673ABA,
0xFCAF7760, 0x94AC5BA8, 0x2CA92EF0, 0x44AA0238,
0x594FB2B1, 0x314C9E79, 0x8949EB21, 0xE14AC7E9,
0x2ED97095, 0x46DA5C5D, 0xFEDF2905, 0x96DC05CD,
0x8B39B544, 0xE33A998C, 0x5B3FECD4, 0x333CC01C,
0x60F48DC6, 0x08F7A10E, 0xB0F2D456, 0xD8F1F89E,
0xC5144817, 0xAD1764DF, 0x15121187, 0x7D113D4F,
/* T8_7 */
0x00000000, 0x493C7D27, 0x9278FA4E, 0xDB448769,
0x211D826D, 0x6821FF4A, 0xB3657823, 0xFA590504,
0x423B04DA, 0x0B0779FD, 0xD043FE94, 0x997F83B3,
0x632686B7, 0x2A1AFB90, 0xF15E7CF9, 0xB86201DE,
0x847609B4, 0xCD4A7493, 0x160EF3FA, 0x5F328EDD,
0xA56B8BD9, 0xEC57F6FE, 0x37137197, 0x7E2F0CB0,
0xC64D0D6E, 0x8F717049, 0x5435F720, 0x1D098A07,
0xE7508F03, 0xAE6CF224, 0x7528754D, 0x3C14086A,
0x0D006599, 0x443C18BE, 0x9F789FD7, 0xD644E2F0,
0x2C1DE7F4, 0x65219AD3, 0xBE651DBA, 0xF759609D,
0x4F3B6143, 0x06071C64, 0xDD439B0D, 0x947FE62A,
0x6E26E32E, 0x271A9E09, 0xFC5E1960, 0xB5626447,
0x89766C2D, 0xC04A110A, 0x1B0E9663, 0x5232EB44,
0xA86BEE40, 0xE1579367, 0x3A13140E, 0x732F6929,
0xCB4D68F7, 0x827115D0, 0x593592B9, 0x1009EF9E,
0xEA50EA9A, 0xA36C97BD, 0x782810D4, 0x31146DF3,
0x1A00CB32, 0x533CB615, 0x8878317C, 0xC1444C5B,
0x3B1D495F, 0x72213478, 0xA965B311, 0xE059CE36,
0x583BCFE8, 0x1107B2CF, 0xCA4335A6, 0x837F4881,
0x79264D85, 0x301A30A2, 0xEB5EB7CB, 0xA262CAEC,
0x9E76C286, 0xD74ABFA1, 0x0C0E38C8, 0x453245EF,
0xBF6B40EB, 0xF6573DCC, 0x2D13BAA5, 0x642FC782,
0xDC4DC65C, 0x9571BB7B, 0x4E353C12, 0x07094135,
0xFD504431, 0xB46C3916, 0x6F28BE7F, 0x2614C358,
0x1700AEAB, 0x5E3CD38C, 0x857854E5, 0xCC4429C2,
0x361D2CC6, 0x7F2151E1, 0xA465D688, 0xED59ABAF,
0x553BAA71, 0x1C07D756, 0xC743503F, 0x8E7F2D18,
0x7426281C, 0x3D1A553B, 0xE65ED252, 0xAF62AF75,
0x9376A71F, 0xDA4ADA38, 0x010E5D51, 0x48322076,
0xB26B2572, 0xFB575855, 0x2013DF3C, 0x692FA21B,
0xD14DA3C5, 0x9871DEE2, 0x4335598B, 0x0A0924AC,
0xF05021A8, 0xB96C5C8F, 0x6228DBE6, 0x2B14A6C1,
0x34019664, 0x7D3DEB43, 0xA6796C2A, 0xEF45110D,
0x151C1409, 0x5C20692E, 0x8764EE47, 0xCE589360,
0x763A92BE, 0x3F06EF99, 0xE44268F0, 0xAD7E15D7,
0x572710D3, 0x1E1B6DF4, 0xC55FEA9D, 0x8C6397BA,
0xB0779FD0, 0xF94BE2F7, 0x220F659E, 0x6B3318B9,
0x916A1DBD, 0xD856609A, 0x0312E7F3, 0x4A2E9AD4,
0xF24C9B0A, 0xBB70E62D, 0x60346144, 0x29081C63,
0xD3511967, 0x9A6D6440, 0x4129E329, 0x08159E0E,
0x3901F3FD, 0x703D8EDA, 0xAB7909B3, 0xE2457494,
0x181C7190, 0x51200CB7, 0x8A648BDE, 0xC358F6F9,
0x7B3AF727, 0x32068A00, 0xE9420D69, 0xA07E704E,
0x5A27754A, 0x131B086D, 0xC85F8F04, 0x8163F223,
0xBD77FA49, 0xF44B876E, 0x2F0F0007, 0x66337D20,
0x9C6A7824, 0xD5560503, 0x0E12826A, 0x472EFF4D,
0xFF4CFE93, 0xB67083B4, 0x6D3404DD, 0x240879FA,
0xDE517CFE, 0x976D01D9, 0x4C2986B0, 0x0515FB97,
0x2E015D56, 0x673D2071, 0xBC79A718, 0xF545DA3F,
0x0F1CDF3B, 0x4620A21C, 0x9D642575, 0xD4585852,
0x6C3A598C, 0x250624AB, 0xFE42A3C2, 0xB77EDEE5,
0x4D27DBE1, 0x041BA6C6, 0xDF5F21AF, 0x96635C88,
0xAA7754E2, 0xE34B29C5, 0x380FAEAC, 0x7133D38B,
0x8B6AD68F, 0xC256ABA8, 0x19122CC1, 0x502E51E6,
0xE84C5038, 0xA1702D1F, 0x7A34AA76, 0x3308D751,
0xC951D255, 0x806DAF72, 0x5B29281B, 0x1215553C,
0x230138CF, 0x6A3D45E8, 0xB179C281, 0xF845BFA6,
0x021CBAA2, 0x4B20C785, 0x906440EC, 0xD9583DCB,
0x613A3C15, 0x28064132, 0xF342C65B, 0xBA7EBB7C,
0x4027BE78, 0x091BC35F, 0xD25F4436, 0x9B633911,
0xA777317B, 0xEE4B4C5C, 0x350FCB35, 0x7C33B612,
0x866AB316, 0xCF56CE31, 0x14124958, 0x5D2E347F,
0xE54C35A1, 0xAC704886, 0x7734CFEF, 0x3E08B2C8,
0xC451B7CC, 0x8D6DCAEB, 0x56294D82, 0x1F1530A5
};
}
|
apache/druid | 36,419 | sql/src/main/java/org/apache/druid/sql/calcite/expression/builtin/NestedDataOperatorConversions.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.druid.sql.calcite.expression.builtin;
import com.google.common.base.Preconditions;
import com.google.common.collect.ImmutableList;
import org.apache.calcite.rel.type.RelDataType;
import org.apache.calcite.rel.type.RelDataTypeFactory;
import org.apache.calcite.rex.RexCall;
import org.apache.calcite.rex.RexNode;
import org.apache.calcite.sql.SqlFunction;
import org.apache.calcite.sql.SqlFunctionCategory;
import org.apache.calcite.sql.SqlJsonEmptyOrError;
import org.apache.calcite.sql.SqlJsonValueEmptyOrErrorBehavior;
import org.apache.calcite.sql.SqlKind;
import org.apache.calcite.sql.SqlLiteral;
import org.apache.calcite.sql.SqlNode;
import org.apache.calcite.sql.SqlOperator;
import org.apache.calcite.sql.fun.SqlStdOperatorTable;
import org.apache.calcite.sql.type.OperandTypes;
import org.apache.calcite.sql.type.ReturnTypes;
import org.apache.calcite.sql.type.SqlOperandCountRanges;
import org.apache.calcite.sql.type.SqlReturnTypeInference;
import org.apache.calcite.sql.type.SqlTypeFamily;
import org.apache.calcite.sql.type.SqlTypeName;
import org.apache.calcite.sql.type.SqlTypeTransforms;
import org.apache.calcite.sql2rel.SqlRexConvertlet;
import org.apache.druid.error.DruidException;
import org.apache.druid.error.InvalidSqlInput;
import org.apache.druid.java.util.common.IAE;
import org.apache.druid.java.util.common.StringUtils;
import org.apache.druid.math.expr.Expr;
import org.apache.druid.math.expr.InputBindings;
import org.apache.druid.query.expression.NestedDataExpressions;
import org.apache.druid.segment.column.ColumnType;
import org.apache.druid.segment.column.RowSignature;
import org.apache.druid.segment.nested.NestedPathFinder;
import org.apache.druid.segment.nested.NestedPathPart;
import org.apache.druid.segment.virtual.NestedFieldVirtualColumn;
import org.apache.druid.sql.calcite.expression.DirectOperatorConversion;
import org.apache.druid.sql.calcite.expression.DruidExpression;
import org.apache.druid.sql.calcite.expression.Expressions;
import org.apache.druid.sql.calcite.expression.OperatorConversions;
import org.apache.druid.sql.calcite.expression.SqlOperatorConversion;
import org.apache.druid.sql.calcite.planner.Calcites;
import org.apache.druid.sql.calcite.planner.PlannerContext;
import org.apache.druid.sql.calcite.planner.convertlet.DruidConvertletFactory;
import org.apache.druid.sql.calcite.table.RowSignatures;
import javax.annotation.Nonnull;
import javax.annotation.Nullable;
import java.util.Collections;
import java.util.List;
public class NestedDataOperatorConversions
{
public static final DruidJsonValueConvertletFactory DRUID_JSON_VALUE_CONVERTLET_FACTORY_INSTANCE =
new DruidJsonValueConvertletFactory();
public static final SqlReturnTypeInference NESTED_RETURN_TYPE_INFERENCE = opBinding -> RowSignatures.makeComplexType(
opBinding.getTypeFactory(),
ColumnType.NESTED_DATA,
true
);
public static final SqlReturnTypeInference NESTED_ARRAY_RETURN_TYPE_INFERENCE = opBinding ->
opBinding.getTypeFactory().createArrayType(
RowSignatures.makeComplexType(
opBinding.getTypeFactory(),
ColumnType.NESTED_DATA,
true
),
-1
);
public static class JsonPathsOperatorConversion implements SqlOperatorConversion
{
private static final String FUNCTION_NAME = "json_paths";
private static final SqlFunction SQL_FUNCTION = OperatorConversions
.operatorBuilder(StringUtils.toUpperCase(FUNCTION_NAME))
.operandTypeChecker(OperandTypes.ANY)
.functionCategory(SqlFunctionCategory.USER_DEFINED_FUNCTION)
.returnTypeArrayWithNullableElements(SqlTypeName.VARCHAR)
.build();
@Override
public SqlOperator calciteOperator()
{
return SQL_FUNCTION;
}
@Nullable
@Override
public DruidExpression toDruidExpression(
PlannerContext plannerContext,
RowSignature rowSignature,
RexNode rexNode
)
{
return OperatorConversions.convertCall(
plannerContext,
rowSignature,
rexNode,
druidExpressions -> DruidExpression.ofExpression(
null,
DruidExpression.functionCall(FUNCTION_NAME),
druidExpressions
)
);
}
}
public static class JsonKeysOperatorConversion implements SqlOperatorConversion
{
private static final String FUNCTION_NAME = "json_keys";
private static final SqlFunction SQL_FUNCTION = OperatorConversions
.operatorBuilder(StringUtils.toUpperCase(FUNCTION_NAME))
.operandNames("expr", "path")
.operandTypes(SqlTypeFamily.ANY, SqlTypeFamily.STRING)
.literalOperands(1)
.requiredOperandCount(2)
.functionCategory(SqlFunctionCategory.USER_DEFINED_FUNCTION)
.returnTypeNullableArrayWithNullableElements(SqlTypeName.VARCHAR)
.build();
@Override
public SqlOperator calciteOperator()
{
return SQL_FUNCTION;
}
@Nullable
@Override
public DruidExpression toDruidExpression(
PlannerContext plannerContext,
RowSignature rowSignature,
RexNode rexNode
)
{
return OperatorConversions.convertCall(
plannerContext,
rowSignature,
rexNode,
druidExpressions -> DruidExpression.ofExpression(
ColumnType.STRING_ARRAY,
DruidExpression.functionCall(FUNCTION_NAME),
druidExpressions
)
);
}
}
public static class JsonQueryOperatorConversion implements SqlOperatorConversion
{
private static final String FUNCTION_NAME = "json_query";
private static final SqlFunction SQL_FUNCTION = OperatorConversions
.operatorBuilder(StringUtils.toUpperCase(FUNCTION_NAME))
.operandTypeChecker(
OperandTypes.family(
SqlTypeFamily.ANY,
SqlTypeFamily.CHARACTER,
SqlTypeFamily.ANY,
SqlTypeFamily.ANY,
SqlTypeFamily.ANY
)
)
.returnTypeInference(NESTED_RETURN_TYPE_INFERENCE)
.functionCategory(SqlFunctionCategory.SYSTEM)
.build();
@Override
public SqlOperator calciteOperator()
{
return SQL_FUNCTION;
}
@Nullable
@Override
public DruidExpression toDruidExpression(
PlannerContext plannerContext,
RowSignature rowSignature,
RexNode rexNode
)
{
final RexCall call = (RexCall) rexNode;
final List<DruidExpression> druidExpressions = Expressions.toDruidExpressions(
plannerContext,
rowSignature,
call.getOperands().subList(0, 2)
);
if (druidExpressions == null || druidExpressions.size() != 2) {
return null;
}
final Expr pathExpr = plannerContext.parseExpression(druidExpressions.get(1).getExpression());
if (!pathExpr.isLiteral()) {
// if path argument is not constant, just use a pure expression
return DruidExpression.ofFunctionCall(ColumnType.NESTED_DATA, FUNCTION_NAME, druidExpressions);
}
// pre-normalize path so that the same expressions with different json path syntax are collapsed
final String path = (String) pathExpr.eval(InputBindings.nilBindings()).value();
final List<NestedPathPart> parts = extractNestedPathParts(call, path);
final String jsonPath = NestedPathFinder.toNormalizedJsonPath(parts);
final DruidExpression.ExpressionGenerator builder = args -> StringUtils.format(
"json_query(%s,%s)",
args.get(0).getExpression(),
DruidExpression.stringLiteral(jsonPath)
);
if (druidExpressions.get(0).isSimpleExtraction()) {
return DruidExpression.ofVirtualColumn(
ColumnType.NESTED_DATA,
builder,
ImmutableList.of(
DruidExpression.ofColumn(ColumnType.NESTED_DATA, druidExpressions.get(0).getDirectColumn())
),
(name, outputType, expression, macroTable) -> new NestedFieldVirtualColumn(
druidExpressions.get(0).getDirectColumn(),
name,
outputType,
parts,
true,
null,
null
)
);
}
return DruidExpression.ofExpression(ColumnType.NESTED_DATA, builder, druidExpressions);
}
}
public static class JsonQueryArrayOperatorConversion extends DirectOperatorConversion
{
private static final SqlFunction SQL_FUNCTION = OperatorConversions
.operatorBuilder(StringUtils.toUpperCase(NestedDataExpressions.JsonQueryArrayExprMacro.NAME))
.operandTypeChecker(
OperandTypes.family(
SqlTypeFamily.ANY,
SqlTypeFamily.CHARACTER
)
)
.returnTypeInference(NESTED_ARRAY_RETURN_TYPE_INFERENCE.andThen(SqlTypeTransforms.FORCE_NULLABLE))
.functionCategory(SqlFunctionCategory.SYSTEM)
.build();
public JsonQueryArrayOperatorConversion()
{
super(SQL_FUNCTION, NestedDataExpressions.JsonQueryArrayExprMacro.NAME);
}
}
/**
* The {@link org.apache.calcite.sql2rel.StandardConvertletTable} converts json_value(.. RETURNING type) into
* cast(json_value_any(..), type).
*
* This is not that useful for us, so we have our own convertlet, to translate into specialized operators such
* as {@link JsonValueBigintOperatorConversion}, {@link JsonValueDoubleOperatorConversion}, or
* {@link JsonValueVarcharOperatorConversion}, before falling back to {@link JsonValueAnyOperatorConversion}.
*
* This convertlet still always wraps the function in a {@link SqlStdOperatorTable#CAST}, to smooth out type
* mismatches, such as VARCHAR(2000) vs VARCHAR or whatever else various type checkers like to complain about not
* exactly matching.
*/
public static class DruidJsonValueConvertletFactory implements DruidConvertletFactory
{
@Override
public SqlRexConvertlet createConvertlet(PlannerContext plannerContext)
{
return (cx, call) -> {
// We don't support modifying the behavior to be anything other than 'NULL ON EMPTY' / 'NULL ON ERROR'.
// Check this here: prior operand before ON EMPTY or ON ERROR must be NULL.
for (int i = 2; i < call.operandCount(); i++) {
final SqlNode operand = call.operand(i);
if (operand.getKind() == SqlKind.LITERAL
&& ((SqlLiteral) operand).getValue() instanceof SqlJsonEmptyOrError) {
// Found ON EMPTY or ON ERROR. Check prior operand.
final SqlNode priorOperand = call.operand(i - 1);
Preconditions.checkArgument(
priorOperand.getKind() == SqlKind.LITERAL
&& ((SqlLiteral) priorOperand).getValue() == SqlJsonValueEmptyOrErrorBehavior.NULL,
"Unsupported JSON_VALUE parameter '%s' defined - please re-issue this query without this argument",
((SqlLiteral) operand).getValue()
);
}
}
RelDataType sqlType = cx.getValidator().getValidatedNodeType(call);
SqlOperator jsonValueOperator;
if (SqlTypeName.INT_TYPES.contains(sqlType.getSqlTypeName())) {
jsonValueOperator = JsonValueBigintOperatorConversion.FUNCTION;
} else if (SqlTypeName.DECIMAL.equals(sqlType.getSqlTypeName()) ||
SqlTypeName.APPROX_TYPES.contains(sqlType.getSqlTypeName())) {
jsonValueOperator = JsonValueDoubleOperatorConversion.FUNCTION;
} else if (SqlTypeName.STRING_TYPES.contains(sqlType.getSqlTypeName())) {
jsonValueOperator = JsonValueVarcharOperatorConversion.FUNCTION;
} else if (SqlTypeName.ARRAY.equals(sqlType.getSqlTypeName())) {
ColumnType elementType = Calcites.getColumnTypeForRelDataType(sqlType.getComponentType());
switch (elementType.getType()) {
case LONG:
jsonValueOperator = JsonValueReturningArrayBigIntOperatorConversion.FUNCTION;
break;
case DOUBLE:
jsonValueOperator = JsonValueReturningArrayDoubleOperatorConversion.FUNCTION;
break;
case STRING:
jsonValueOperator = JsonValueReturningArrayVarcharOperatorConversion.FUNCTION;
break;
default:
throw new IAE("Unhandled JSON_VALUE RETURNING ARRAY type [%s]", sqlType.getComponentType());
}
} else {
// fallback to json_value_any, e.g. the 'standard' convertlet.
jsonValueOperator = JsonValueAnyOperatorConversion.FUNCTION;
}
// always cast anyway, to prevent haters from complaining that VARCHAR doesn't match VARCHAR(2000)
return cx.getRexBuilder().makeCast(
sqlType,
cx.getRexBuilder().makeCall(
jsonValueOperator,
cx.convertExpression(call.operand(0)),
cx.convertExpression(call.operand(1))
)
);
};
}
@Override
public List<SqlOperator> operators()
{
return Collections.singletonList(SqlStdOperatorTable.JSON_VALUE);
}
}
public abstract static class JsonValueReturningTypeOperatorConversion implements SqlOperatorConversion
{
private final SqlFunction function;
private final ColumnType druidType;
public JsonValueReturningTypeOperatorConversion(SqlFunction function, ColumnType druidType)
{
this.druidType = druidType;
this.function = function;
}
@Override
public SqlOperator calciteOperator()
{
return function;
}
@Nullable
@Override
public DruidExpression toDruidExpression(
PlannerContext plannerContext,
RowSignature rowSignature,
RexNode rexNode
)
{
final RexCall call = (RexCall) rexNode;
final List<DruidExpression> druidExpressions = Expressions.toDruidExpressions(
plannerContext,
rowSignature,
call.getOperands()
);
if (druidExpressions == null || druidExpressions.size() != 2) {
return null;
}
final Expr pathExpr = plannerContext.parseExpression(druidExpressions.get(1).getExpression());
if (!pathExpr.isLiteral()) {
// if path argument is not constant, just use a pure expression
return DruidExpression.ofFunctionCall(
druidType,
"json_value",
ImmutableList.<DruidExpression>builder()
.addAll(druidExpressions)
.add(DruidExpression.ofStringLiteral(druidType.asTypeString()))
.build()
);
}
// pre-normalize path so that the same expressions with different json path syntax are collapsed
final String path = (String) pathExpr.eval(InputBindings.nilBindings()).value();
final List<NestedPathPart> parts = extractNestedPathParts(call, path);
final String jsonPath = NestedPathFinder.toNormalizedJsonPath(parts);
final DruidExpression.ExpressionGenerator builder = args -> StringUtils.format(
"json_value(%s,%s, %s)",
args.get(0).getExpression(),
DruidExpression.stringLiteral(jsonPath),
DruidExpression.stringLiteral(druidType.asTypeString())
);
if (druidExpressions.get(0).isSimpleExtraction()) {
return DruidExpression.ofVirtualColumn(
druidType,
builder,
ImmutableList.of(
DruidExpression.ofColumn(ColumnType.NESTED_DATA, druidExpressions.get(0).getDirectColumn())
),
(name, outputType, expression, macroTable) -> new NestedFieldVirtualColumn(
druidExpressions.get(0).getDirectColumn(),
name,
outputType,
parts,
false,
null,
null
)
);
}
return DruidExpression.ofExpression(druidType, builder, druidExpressions);
}
static SqlFunction buildFunction(String functionName, SqlTypeName typeName)
{
return OperatorConversions.operatorBuilder(functionName)
.operandTypeChecker(
OperandTypes.sequence(
"'" + functionName + "(expr, path)'",
OperandTypes.family(SqlTypeFamily.ANY),
OperandTypes.family(SqlTypeFamily.STRING)
)
)
.returnTypeInference(
ReturnTypes.cascade(
opBinding -> opBinding.getTypeFactory().createSqlType(typeName),
SqlTypeTransforms.FORCE_NULLABLE
)
)
.functionCategory(SqlFunctionCategory.USER_DEFINED_FUNCTION)
.build();
}
}
public static class JsonValueBigintOperatorConversion extends JsonValueReturningTypeOperatorConversion
{
private static final SqlFunction FUNCTION = buildFunction("JSON_VALUE_BIGINT", SqlTypeName.BIGINT);
public JsonValueBigintOperatorConversion()
{
super(FUNCTION, ColumnType.LONG);
}
}
public static class JsonValueDoubleOperatorConversion extends JsonValueReturningTypeOperatorConversion
{
private static final SqlFunction FUNCTION = buildFunction("JSON_VALUE_DOUBLE", SqlTypeName.DOUBLE);
public JsonValueDoubleOperatorConversion()
{
super(FUNCTION, ColumnType.DOUBLE);
}
}
public static class JsonValueVarcharOperatorConversion extends JsonValueReturningTypeOperatorConversion
{
private static final SqlFunction FUNCTION = buildFunction("JSON_VALUE_VARCHAR", SqlTypeName.VARCHAR);
public JsonValueVarcharOperatorConversion()
{
super(FUNCTION, ColumnType.STRING);
}
}
public abstract static class JsonValueReturningArrayTypeOperatorConversion implements SqlOperatorConversion
{
private final SqlFunction function;
private final ColumnType druidType;
public JsonValueReturningArrayTypeOperatorConversion(SqlFunction function, ColumnType druidType)
{
this.druidType = druidType;
this.function = function;
}
@Override
public SqlOperator calciteOperator()
{
return function;
}
@Nullable
@Override
public DruidExpression toDruidExpression(
PlannerContext plannerContext,
RowSignature rowSignature,
RexNode rexNode
)
{
final RexCall call = (RexCall) rexNode;
final List<DruidExpression> druidExpressions = Expressions.toDruidExpressions(
plannerContext,
rowSignature,
call.getOperands()
);
if (druidExpressions == null || druidExpressions.size() != 2) {
return null;
}
final Expr pathExpr = plannerContext.parseExpression(druidExpressions.get(1).getExpression());
if (!pathExpr.isLiteral()) {
return null;
}
// pre-normalize path so that the same expressions with different json path syntax are collapsed
final String path = (String) pathExpr.eval(InputBindings.nilBindings()).value();
final List<NestedPathPart> parts;
try {
parts = NestedPathFinder.parseJsonPath(path);
}
catch (IllegalArgumentException iae) {
throw InvalidSqlInput.exception(
"Cannot use [%s]: [%s]",
call.getOperator().getName(),
iae.getMessage()
);
}
final String jsonPath = NestedPathFinder.toNormalizedJsonPath(parts);
final DruidExpression.ExpressionGenerator builder = args -> StringUtils.format(
"json_value(%s,%s, %s)",
args.get(0).getExpression(),
DruidExpression.stringLiteral(jsonPath),
DruidExpression.stringLiteral(druidType.asTypeString())
);
if (druidExpressions.get(0).isSimpleExtraction()) {
return DruidExpression.ofVirtualColumn(
druidType,
builder,
ImmutableList.of(
DruidExpression.ofColumn(ColumnType.NESTED_DATA, druidExpressions.get(0).getDirectColumn())
),
(name, outputType, expression, macroTable) -> new NestedFieldVirtualColumn(
druidExpressions.get(0).getDirectColumn(),
name,
outputType,
parts,
false,
null,
null
)
);
}
return DruidExpression.ofExpression(druidType, builder, druidExpressions);
}
static SqlFunction buildArrayFunction(String functionName, SqlTypeName elementTypeName)
{
return OperatorConversions.operatorBuilder(functionName)
.operandTypeChecker(
OperandTypes.sequence(
"'" + functionName + "(expr, path)'",
OperandTypes.family(SqlTypeFamily.ANY),
OperandTypes.family(SqlTypeFamily.STRING)
)
)
.returnTypeInference(
opBinding -> {
return opBinding.getTypeFactory().createTypeWithNullability(Calcites.createSqlArrayTypeWithNullability(opBinding.getTypeFactory(), elementTypeName, false), true);
}
)
.functionCategory(SqlFunctionCategory.USER_DEFINED_FUNCTION)
.build();
}
}
public static class JsonValueReturningArrayBigIntOperatorConversion extends JsonValueReturningArrayTypeOperatorConversion
{
static final SqlFunction FUNCTION = buildArrayFunction("JSON_VALUE_ARRAY_BIGINT", SqlTypeName.BIGINT);
public JsonValueReturningArrayBigIntOperatorConversion()
{
super(FUNCTION, ColumnType.LONG_ARRAY);
}
}
public static class JsonValueReturningArrayDoubleOperatorConversion extends JsonValueReturningArrayTypeOperatorConversion
{
static final SqlFunction FUNCTION = buildArrayFunction("JSON_VALUE_ARRAY_DOUBLE", SqlTypeName.DOUBLE);
public JsonValueReturningArrayDoubleOperatorConversion()
{
super(FUNCTION, ColumnType.DOUBLE_ARRAY);
}
}
public static class JsonValueReturningArrayVarcharOperatorConversion extends JsonValueReturningArrayTypeOperatorConversion
{
static final SqlFunction FUNCTION = buildArrayFunction("JSON_VALUE_ARRAY_VARCHAR", SqlTypeName.VARCHAR);
public JsonValueReturningArrayVarcharOperatorConversion()
{
super(FUNCTION, ColumnType.STRING_ARRAY);
}
}
public static class JsonValueAnyOperatorConversion implements SqlOperatorConversion
{
private static final SqlFunction FUNCTION =
OperatorConversions.operatorBuilder("JSON_VALUE_ANY")
.operandTypeChecker(
OperandTypes.or(
OperandTypes.sequence(
"'JSON_VALUE_ANY(expr, path)'",
OperandTypes.family(SqlTypeFamily.ANY),
OperandTypes.family(SqlTypeFamily.STRING)
),
OperandTypes.family(
SqlTypeFamily.ANY,
SqlTypeFamily.CHARACTER,
SqlTypeFamily.ANY,
SqlTypeFamily.ANY,
SqlTypeFamily.ANY,
SqlTypeFamily.ANY,
SqlTypeFamily.ANY
)
)
)
.operandTypeInference((callBinding, returnType, operandTypes) -> {
RelDataTypeFactory typeFactory = callBinding.getTypeFactory();
if (operandTypes.length > 5) {
operandTypes[3] = typeFactory.createSqlType(SqlTypeName.ANY);
operandTypes[5] = typeFactory.createSqlType(SqlTypeName.ANY);
}
})
.returnTypeInference(
ReturnTypes.cascade(
opBinding -> opBinding.getTypeFactory().createTypeWithNullability(
// STRING is the closest thing we have to an ANY type
// however, this should really be using SqlTypeName.ANY.. someday
opBinding.getTypeFactory().createSqlType(SqlTypeName.VARCHAR),
true
),
SqlTypeTransforms.FORCE_NULLABLE
)
)
.functionCategory(SqlFunctionCategory.SYSTEM)
.build();
@Override
public SqlOperator calciteOperator()
{
return FUNCTION;
}
@Nullable
@Override
public DruidExpression toDruidExpression(
PlannerContext plannerContext,
RowSignature rowSignature,
RexNode rexNode
)
{
final RexCall call = (RexCall) rexNode;
// calcite parser can allow for a bunch of junk in here that we don't care about right now, so the call looks
// something like this:
// JSON_VALUE_ANY(`nested`.`nest`, '$.x', SQLJSONVALUEEMPTYORERRORBEHAVIOR[NULL], NULL, SQLJSONVALUEEMPTYORERRORBEHAVIOR[NULL], NULL)
// by the time it gets here
final List<DruidExpression> druidExpressions = Expressions.toDruidExpressions(
plannerContext,
rowSignature,
call.getOperands().size() > 2 ? call.getOperands().subList(0, 2) : call.getOperands()
);
if (druidExpressions == null || druidExpressions.size() != 2) {
return null;
}
final Expr pathExpr = plannerContext.parseExpression(druidExpressions.get(1).getExpression());
if (!pathExpr.isLiteral()) {
return null;
}
// pre-normalize path so that the same expressions with different json path syntax are collapsed
final String path = (String) pathExpr.eval(InputBindings.nilBindings()).value();
final List<NestedPathPart> parts = extractNestedPathParts(call, path);
final String jsonPath = NestedPathFinder.toNormalizedJsonPath(parts);
final DruidExpression.ExpressionGenerator builder = args -> StringUtils.format(
"json_value(%s,%s)",
args.get(0).getExpression(),
DruidExpression.stringLiteral(jsonPath)
);
// STRING is the closest thing we have to ANY, though maybe someday this
// can be replaced with a VARIANT type
final ColumnType columnType = ColumnType.STRING;
if (druidExpressions.get(0).isSimpleExtraction()) {
return DruidExpression.ofVirtualColumn(
columnType,
builder,
ImmutableList.of(
DruidExpression.ofColumn(ColumnType.NESTED_DATA, druidExpressions.get(0).getDirectColumn())
),
(name, outputType, expression, macroTable) -> new NestedFieldVirtualColumn(
druidExpressions.get(0).getDirectColumn(),
name,
null,
parts,
false,
null,
null
)
);
}
return DruidExpression.ofExpression(columnType, builder, druidExpressions);
}
}
public static class JsonObjectOperatorConversion implements SqlOperatorConversion
{
private static final String FUNCTION_NAME = "json_object";
private static final SqlFunction SQL_FUNCTION = OperatorConversions
.operatorBuilder(StringUtils.toUpperCase(FUNCTION_NAME))
.operandTypeChecker(OperandTypes.variadic(SqlOperandCountRanges.from(1)))
.operandTypeInference((callBinding, returnType, operandTypes) -> {
RelDataTypeFactory typeFactory = callBinding.getTypeFactory();
for (int i = 0; i < operandTypes.length; i++) {
if (i % 2 == 0) {
operandTypes[i] = typeFactory.createSqlType(SqlTypeName.VARCHAR);
continue;
}
operandTypes[i] = typeFactory.createTypeWithNullability(
typeFactory.createSqlType(SqlTypeName.ANY),
true
);
}
})
.returnTypeInference(NESTED_RETURN_TYPE_INFERENCE)
.functionCategory(SqlFunctionCategory.SYSTEM)
.build();
@Override
public SqlOperator calciteOperator()
{
return SQL_FUNCTION;
}
@Nullable
@Override
public DruidExpression toDruidExpression(PlannerContext plannerContext, RowSignature rowSignature, RexNode rexNode)
{
final DruidExpression.DruidExpressionCreator expressionFunction = druidExpressions ->
DruidExpression.ofExpression(
ColumnType.NESTED_DATA,
null,
DruidExpression.functionCall(FUNCTION_NAME),
druidExpressions
);
final RexCall call = (RexCall) rexNode;
// we ignore the first argument because calcite sets a 'nullBehavior' parameter by the time it gets here
// that we .. dont care about right now
final List<DruidExpression> druidExpressions = Expressions.toDruidExpressions(
plannerContext,
rowSignature,
call.getOperands().subList(1, call.getOperands().size())
);
if (druidExpressions == null) {
return null;
}
return expressionFunction.create(druidExpressions);
}
}
public static class JsonMergeOperatorConversion implements SqlOperatorConversion
{
private static final String FUNCTION_NAME = "json_merge";
private static final SqlFunction SQL_FUNCTION = OperatorConversions
.operatorBuilder(FUNCTION_NAME)
.operandTypeChecker(OperandTypes.variadic(SqlOperandCountRanges.from(1)))
.operandTypeInference((callBinding, returnType, operandTypes) -> {
RelDataTypeFactory typeFactory = callBinding.getTypeFactory();
for (int i = 0; i < operandTypes.length; i++) {
operandTypes[i] = typeFactory.createTypeWithNullability(
typeFactory.createSqlType(SqlTypeName.ANY),
true
);
}
})
.returnTypeInference(NESTED_RETURN_TYPE_INFERENCE)
.functionCategory(SqlFunctionCategory.SYSTEM)
.build();
@Override
public SqlOperator calciteOperator()
{
return SQL_FUNCTION;
}
@Nullable
@Override
public DruidExpression toDruidExpression(
PlannerContext plannerContext,
RowSignature rowSignature,
RexNode rexNode
)
{
return OperatorConversions.convertCall(
plannerContext,
rowSignature,
rexNode,
druidExpressions -> DruidExpression.ofExpression(
ColumnType.NESTED_DATA,
DruidExpression.functionCall("json_merge"),
druidExpressions
)
);
}
}
public static class ToJsonStringOperatorConversion implements SqlOperatorConversion
{
private static final String FUNCTION_NAME = "to_json_string";
private static final SqlFunction SQL_FUNCTION = OperatorConversions
.operatorBuilder(StringUtils.toUpperCase(FUNCTION_NAME))
.operandTypes(SqlTypeFamily.ANY)
.returnTypeCascadeNullable(SqlTypeName.VARCHAR)
.functionCategory(SqlFunctionCategory.USER_DEFINED_FUNCTION)
.build();
@Override
public SqlOperator calciteOperator()
{
return SQL_FUNCTION;
}
@Nullable
@Override
public DruidExpression toDruidExpression(
PlannerContext plannerContext,
RowSignature rowSignature,
RexNode rexNode
)
{
return OperatorConversions.convertCall(
plannerContext,
rowSignature,
rexNode,
druidExpressions -> DruidExpression.ofExpression(
ColumnType.NESTED_DATA,
DruidExpression.functionCall(FUNCTION_NAME),
druidExpressions
)
);
}
}
public static class ParseJsonOperatorConversion implements SqlOperatorConversion
{
private static final String FUNCTION_NAME = "parse_json";
private static final SqlFunction SQL_FUNCTION = OperatorConversions
.operatorBuilder(StringUtils.toUpperCase(FUNCTION_NAME))
.operandTypes(SqlTypeFamily.STRING)
.returnTypeInference(NESTED_RETURN_TYPE_INFERENCE)
.functionCategory(SqlFunctionCategory.USER_DEFINED_FUNCTION)
.build();
@Override
public SqlOperator calciteOperator()
{
return SQL_FUNCTION;
}
@Nullable
@Override
public DruidExpression toDruidExpression(
PlannerContext plannerContext,
RowSignature rowSignature,
RexNode rexNode
)
{
return OperatorConversions.convertCall(
plannerContext,
rowSignature,
rexNode,
druidExpressions -> DruidExpression.ofExpression(
ColumnType.NESTED_DATA,
DruidExpression.functionCall(FUNCTION_NAME),
druidExpressions
)
);
}
}
public static class TryParseJsonOperatorConversion implements SqlOperatorConversion
{
private static final String FUNCTION_NAME = "try_parse_json";
private static final SqlFunction SQL_FUNCTION = OperatorConversions
.operatorBuilder(StringUtils.toUpperCase(FUNCTION_NAME))
.operandTypes(SqlTypeFamily.STRING)
.returnTypeInference(NESTED_RETURN_TYPE_INFERENCE)
.functionCategory(SqlFunctionCategory.USER_DEFINED_FUNCTION)
.build();
@Override
public SqlOperator calciteOperator()
{
return SQL_FUNCTION;
}
@Nullable
@Override
public DruidExpression toDruidExpression(
PlannerContext plannerContext,
RowSignature rowSignature,
RexNode rexNode
)
{
return OperatorConversions.convertCall(
plannerContext,
rowSignature,
rexNode,
druidExpressions -> DruidExpression.ofExpression(
ColumnType.NESTED_DATA,
DruidExpression.functionCall(FUNCTION_NAME),
druidExpressions
)
);
}
}
@Nonnull
private static List<NestedPathPart> extractNestedPathParts(RexCall call, String path)
{
try {
return NestedPathFinder.parseJsonPath(path);
}
catch (IllegalArgumentException iae) {
final String name = call.getOperator().getName();
throw DruidException
.forPersona(DruidException.Persona.USER)
.ofCategory(DruidException.Category.INVALID_INPUT)
.build(iae, "Error when processing path [%s], operator [%s] is not useable", path, name);
}
}
}
|
apache/plc4x | 36,507 | plc4j/drivers/profinet/src/main/generated/org/apache/plc4x/java/profinet/readwrite/DataItem.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.plc4x.java.profinet.readwrite;
import static org.apache.plc4x.java.spi.codegen.fields.FieldReaderFactory.*;
import static org.apache.plc4x.java.spi.codegen.fields.FieldWriterFactory.*;
import static org.apache.plc4x.java.spi.codegen.io.DataReaderFactory.*;
import static org.apache.plc4x.java.spi.codegen.io.DataWriterFactory.*;
import static org.apache.plc4x.java.spi.generation.StaticHelper.*;
import java.math.BigInteger;
import java.time.*;
import java.util.*;
import java.util.stream.Collectors;
import org.apache.plc4x.java.api.exceptions.*;
import org.apache.plc4x.java.api.value.*;
import org.apache.plc4x.java.spi.codegen.*;
import org.apache.plc4x.java.spi.codegen.fields.*;
import org.apache.plc4x.java.spi.codegen.io.*;
import org.apache.plc4x.java.spi.generation.*;
import org.apache.plc4x.java.spi.values.*;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
// Code generated by code-generation. DO NOT EDIT.
public class DataItem {
private static final Logger LOGGER = LoggerFactory.getLogger(DataItem.class);
public static PlcValue staticParse(
ReadBuffer readBuffer, ProfinetDataType dataType, Integer numberOfValues)
throws ParseException {
if (EvaluationHelper.equals(dataType, ProfinetDataType.BOOL)
&& EvaluationHelper.equals(numberOfValues, (int) 1)) { // BOOL
boolean value = readSimpleField("value", readBoolean(readBuffer));
return new PlcBOOL(value);
} else if (EvaluationHelper.equals(dataType, ProfinetDataType.BOOL)) { // List
List<Boolean> _value = readCountArrayField("value", readBoolean(readBuffer), numberOfValues);
List<PlcValue> value = new ArrayList<>(_value.size());
for (boolean _item : _value) {
value.add(new PlcBOOL(_item));
}
return new PlcList(value);
} else if (EvaluationHelper.equals(dataType, ProfinetDataType.OCTETSTRING)
&& EvaluationHelper.equals(numberOfValues, (int) 1)) { // USINT
short value = readSimpleField("value", readUnsignedShort(readBuffer, 8));
return new PlcUSINT(value);
} else if (EvaluationHelper.equals(dataType, ProfinetDataType.OCTETSTRING)) { // List
List<Short> _value =
readCountArrayField("value", readUnsignedShort(readBuffer, 8), numberOfValues);
List<PlcValue> value = new ArrayList<>(_value.size());
for (short _item : _value) {
value.add(new PlcUSINT(_item));
}
return new PlcList(value);
} else if (EvaluationHelper.equals(dataType, ProfinetDataType.BYTE)
&& EvaluationHelper.equals(numberOfValues, (int) 1)) { // BYTE
short value = readSimpleField("value", readUnsignedShort(readBuffer, 8));
return new PlcBYTE(value);
} else if (EvaluationHelper.equals(dataType, ProfinetDataType.BYTE)) { // List
List<Boolean> _value =
readCountArrayField("value", readBoolean(readBuffer), (numberOfValues) * (8));
List<PlcValue> value = new ArrayList<>(_value.size());
for (boolean _item : _value) {
value.add(new PlcBOOL(_item));
}
return new PlcList(value);
} else if (EvaluationHelper.equals(dataType, ProfinetDataType.WORD)) { // WORD
int value = readSimpleField("value", readUnsignedInt(readBuffer, 16));
return new PlcWORD(value);
} else if (EvaluationHelper.equals(dataType, ProfinetDataType.DWORD)) { // DWORD
long value = readSimpleField("value", readUnsignedLong(readBuffer, 32));
return new PlcDWORD(value);
} else if (EvaluationHelper.equals(dataType, ProfinetDataType.LWORD)) { // LWORD
BigInteger value = readSimpleField("value", readUnsignedBigInteger(readBuffer, 64));
return new PlcLWORD(value);
} else if (EvaluationHelper.equals(dataType, ProfinetDataType.SINT)
&& EvaluationHelper.equals(numberOfValues, (int) 1)) { // SINT
byte value = readSimpleField("value", readSignedByte(readBuffer, 8));
return new PlcSINT(value);
} else if (EvaluationHelper.equals(dataType, ProfinetDataType.SINT)) { // List
List<Byte> _value =
readCountArrayField("value", readSignedByte(readBuffer, 8), numberOfValues);
List<PlcValue> value = new ArrayList<>(_value.size());
for (byte _item : _value) {
value.add(new PlcSINT(_item));
}
return new PlcList(value);
} else if (EvaluationHelper.equals(dataType, ProfinetDataType.INT)
&& EvaluationHelper.equals(numberOfValues, (int) 1)) { // INT
short value = readSimpleField("value", readSignedShort(readBuffer, 16));
return new PlcINT(value);
} else if (EvaluationHelper.equals(dataType, ProfinetDataType.INT)) { // List
List<Short> _value =
readCountArrayField("value", readSignedShort(readBuffer, 16), numberOfValues);
List<PlcValue> value = new ArrayList<>(_value.size());
for (short _item : _value) {
value.add(new PlcINT(_item));
}
return new PlcList(value);
} else if (EvaluationHelper.equals(dataType, ProfinetDataType.DINT)
&& EvaluationHelper.equals(numberOfValues, (int) 1)) { // DINT
int value = readSimpleField("value", readSignedInt(readBuffer, 32));
return new PlcDINT(value);
} else if (EvaluationHelper.equals(dataType, ProfinetDataType.DINT)) { // List
List<Integer> _value =
readCountArrayField("value", readSignedInt(readBuffer, 32), numberOfValues);
List<PlcValue> value = new ArrayList<>(_value.size());
for (int _item : _value) {
value.add(new PlcDINT(_item));
}
return new PlcList(value);
} else if (EvaluationHelper.equals(dataType, ProfinetDataType.LINT)
&& EvaluationHelper.equals(numberOfValues, (int) 1)) { // LINT
long value = readSimpleField("value", readSignedLong(readBuffer, 64));
return new PlcLINT(value);
} else if (EvaluationHelper.equals(dataType, ProfinetDataType.LINT)) { // List
List<Long> _value =
readCountArrayField("value", readSignedLong(readBuffer, 64), numberOfValues);
List<PlcValue> value = new ArrayList<>(_value.size());
for (long _item : _value) {
value.add(new PlcLINT(_item));
}
return new PlcList(value);
} else if (EvaluationHelper.equals(dataType, ProfinetDataType.USINT)
&& EvaluationHelper.equals(numberOfValues, (int) 1)) { // USINT
short value = readSimpleField("value", readUnsignedShort(readBuffer, 8));
return new PlcUSINT(value);
} else if (EvaluationHelper.equals(dataType, ProfinetDataType.USINT)) { // List
List<Short> _value =
readCountArrayField("value", readUnsignedShort(readBuffer, 8), numberOfValues);
List<PlcValue> value = new ArrayList<>(_value.size());
for (short _item : _value) {
value.add(new PlcUSINT(_item));
}
return new PlcList(value);
} else if (EvaluationHelper.equals(dataType, ProfinetDataType.UINT)
&& EvaluationHelper.equals(numberOfValues, (int) 1)) { // UINT
int value = readSimpleField("value", readUnsignedInt(readBuffer, 16));
return new PlcUINT(value);
} else if (EvaluationHelper.equals(dataType, ProfinetDataType.UINT)) { // List
List<Integer> _value =
readCountArrayField("value", readUnsignedInt(readBuffer, 16), numberOfValues);
List<PlcValue> value = new ArrayList<>(_value.size());
for (int _item : _value) {
value.add(new PlcUINT(_item));
}
return new PlcList(value);
} else if (EvaluationHelper.equals(dataType, ProfinetDataType.UDINT)
&& EvaluationHelper.equals(numberOfValues, (int) 1)) { // UDINT
long value = readSimpleField("value", readUnsignedLong(readBuffer, 32));
return new PlcUDINT(value);
} else if (EvaluationHelper.equals(dataType, ProfinetDataType.UDINT)) { // List
List<Long> _value =
readCountArrayField("value", readUnsignedLong(readBuffer, 32), numberOfValues);
List<PlcValue> value = new ArrayList<>(_value.size());
for (long _item : _value) {
value.add(new PlcUDINT(_item));
}
return new PlcList(value);
} else if (EvaluationHelper.equals(dataType, ProfinetDataType.ULINT)
&& EvaluationHelper.equals(numberOfValues, (int) 1)) { // ULINT
BigInteger value = readSimpleField("value", readUnsignedBigInteger(readBuffer, 64));
return new PlcULINT(value);
} else if (EvaluationHelper.equals(dataType, ProfinetDataType.ULINT)) { // List
List<BigInteger> _value =
readCountArrayField("value", readUnsignedBigInteger(readBuffer, 64), numberOfValues);
List<PlcValue> value = new ArrayList<>(_value.size());
for (BigInteger _item : _value) {
value.add(new PlcULINT(_item));
}
return new PlcList(value);
} else if (EvaluationHelper.equals(dataType, ProfinetDataType.REAL)
&& EvaluationHelper.equals(numberOfValues, (int) 1)) { // REAL
float value = readSimpleField("value", readFloat(readBuffer, 32));
return new PlcREAL(value);
} else if (EvaluationHelper.equals(dataType, ProfinetDataType.REAL)) { // List
List<Float> _value = readCountArrayField("value", readFloat(readBuffer, 32), numberOfValues);
List<PlcValue> value = new ArrayList<>(_value.size());
for (float _item : _value) {
value.add(new PlcREAL(_item));
}
return new PlcList(value);
} else if (EvaluationHelper.equals(dataType, ProfinetDataType.LREAL)
&& EvaluationHelper.equals(numberOfValues, (int) 1)) { // LREAL
double value = readSimpleField("value", readDouble(readBuffer, 64));
return new PlcLREAL(value);
} else if (EvaluationHelper.equals(dataType, ProfinetDataType.LREAL)) { // List
List<Double> _value =
readCountArrayField("value", readDouble(readBuffer, 64), numberOfValues);
List<PlcValue> value = new ArrayList<>(_value.size());
for (double _item : _value) {
value.add(new PlcLREAL(_item));
}
return new PlcList(value);
} else if (EvaluationHelper.equals(dataType, ProfinetDataType.CHAR)
&& EvaluationHelper.equals(numberOfValues, (int) 1)) { // CHAR
String value =
readSimpleField("value", readString(readBuffer, 8), WithOption.WithEncoding("UTF-8"));
return new PlcCHAR(value);
} else if (EvaluationHelper.equals(dataType, ProfinetDataType.CHAR)) { // List
List<String> _value =
readCountArrayField(
"value", readString(readBuffer, 8), numberOfValues, WithOption.WithEncoding("UTF-8"));
List<PlcValue> value = new ArrayList<>(_value.size());
for (String _item : _value) {
value.add(new PlcSTRING(_item));
}
return new PlcList(value);
} else if (EvaluationHelper.equals(dataType, ProfinetDataType.WCHAR)
&& EvaluationHelper.equals(numberOfValues, (int) 1)) { // WCHAR
String value =
readSimpleField("value", readString(readBuffer, 16), WithOption.WithEncoding("UTF-16"));
return new PlcWCHAR(value);
} else if (EvaluationHelper.equals(dataType, ProfinetDataType.WCHAR)) { // List
List<String> _value =
readCountArrayField(
"value",
readString(readBuffer, 16),
numberOfValues,
WithOption.WithEncoding("UTF-16"));
List<PlcValue> value = new ArrayList<>(_value.size());
for (String _item : _value) {
value.add(new PlcSTRING(_item));
}
return new PlcList(value);
} else if (EvaluationHelper.equals(dataType, ProfinetDataType.UNICODESTRING8)
&& EvaluationHelper.equals(numberOfValues, (int) 1)) { // CHAR
String value =
readSimpleField("value", readString(readBuffer, 8), WithOption.WithEncoding("UTF-8"));
return new PlcCHAR(value);
} else if (EvaluationHelper.equals(dataType, ProfinetDataType.UNICODESTRING8)) { // List
List<String> _value =
readCountArrayField(
"value", readString(readBuffer, 8), numberOfValues, WithOption.WithEncoding("UTF-8"));
List<PlcValue> value = new ArrayList<>(_value.size());
for (String _item : _value) {
value.add(new PlcSTRING(_item));
}
return new PlcList(value);
} else if (EvaluationHelper.equals(dataType, ProfinetDataType.WSTRING)
&& EvaluationHelper.equals(numberOfValues, (int) 1)) { // CHAR
String value =
readSimpleField("value", readString(readBuffer, 16), WithOption.WithEncoding("UTF-16"));
return new PlcCHAR(value);
} else if (EvaluationHelper.equals(dataType, ProfinetDataType.WSTRING)) { // List
List<String> _value =
readCountArrayField(
"value",
readString(readBuffer, 16),
numberOfValues,
WithOption.WithEncoding("UTF-16"));
List<PlcValue> value = new ArrayList<>(_value.size());
for (String _item : _value) {
value.add(new PlcSTRING(_item));
}
return new PlcList(value);
} else if (EvaluationHelper.equals(dataType, ProfinetDataType.VISIBLESTRING)
&& EvaluationHelper.equals(numberOfValues, (int) 1)) { // CHAR
String value =
readSimpleField("value", readString(readBuffer, 8), WithOption.WithEncoding("UTF-8"));
return new PlcCHAR(value);
} else if (EvaluationHelper.equals(dataType, ProfinetDataType.VISIBLESTRING)) { // List
List<String> _value =
readCountArrayField(
"value", readString(readBuffer, 8), numberOfValues, WithOption.WithEncoding("UTF-8"));
List<PlcValue> value = new ArrayList<>(_value.size());
for (String _item : _value) {
value.add(new PlcSTRING(_item));
}
return new PlcList(value);
} else if (EvaluationHelper.equals(dataType, ProfinetDataType.F_MESSAGETRAILER4BYTE)
&& EvaluationHelper.equals(numberOfValues, (int) 1)) { // List
List<Short> _value =
readCountArrayField("value", readUnsignedShort(readBuffer, 8), (4) * (8));
List<PlcValue> value = new ArrayList<>(_value.size());
for (short _item : _value) {
value.add(new PlcUSINT(_item));
}
return new PlcList(value);
} else if (EvaluationHelper.equals(dataType, ProfinetDataType.F_MESSAGETRAILER4BYTE)) { // List
List<Short> _value =
readCountArrayField("value", readUnsignedShort(readBuffer, 8), (numberOfValues) * (32));
List<PlcValue> value = new ArrayList<>(_value.size());
for (short _item : _value) {
value.add(new PlcUSINT(_item));
}
return new PlcList(value);
}
return null;
}
public static int getLengthInBytes(
PlcValue _value, ProfinetDataType dataType, Integer numberOfValues) {
return (int) Math.ceil((float) getLengthInBits(_value, dataType, numberOfValues) / 8.0);
}
public static int getLengthInBits(
PlcValue _value, ProfinetDataType dataType, Integer numberOfValues) {
int lengthInBits = 0;
if (EvaluationHelper.equals(dataType, ProfinetDataType.BOOL)
&& EvaluationHelper.equals(numberOfValues, (int) 1)) { // BOOL
// Simple field (value)
lengthInBits += 1;
} else if (EvaluationHelper.equals(dataType, ProfinetDataType.BOOL)) { // List
// Array field
if (_value != null) {
lengthInBits += 1 * _value.getList().size();
}
} else if (EvaluationHelper.equals(dataType, ProfinetDataType.OCTETSTRING)
&& EvaluationHelper.equals(numberOfValues, (int) 1)) { // USINT
// Simple field (value)
lengthInBits += 8;
} else if (EvaluationHelper.equals(dataType, ProfinetDataType.OCTETSTRING)) { // List
// Array field
if (_value != null) {
lengthInBits += 8 * _value.getList().size();
}
} else if (EvaluationHelper.equals(dataType, ProfinetDataType.BYTE)
&& EvaluationHelper.equals(numberOfValues, (int) 1)) { // BYTE
// Simple field (value)
lengthInBits += 8;
} else if (EvaluationHelper.equals(dataType, ProfinetDataType.BYTE)) { // List
// Array field
if (_value != null) {
lengthInBits += 1 * _value.getList().size();
}
} else if (EvaluationHelper.equals(dataType, ProfinetDataType.WORD)) { // WORD
// Simple field (value)
lengthInBits += 16;
} else if (EvaluationHelper.equals(dataType, ProfinetDataType.DWORD)) { // DWORD
// Simple field (value)
lengthInBits += 32;
} else if (EvaluationHelper.equals(dataType, ProfinetDataType.LWORD)) { // LWORD
// Simple field (value)
lengthInBits += 64;
} else if (EvaluationHelper.equals(dataType, ProfinetDataType.SINT)
&& EvaluationHelper.equals(numberOfValues, (int) 1)) { // SINT
// Simple field (value)
lengthInBits += 8;
} else if (EvaluationHelper.equals(dataType, ProfinetDataType.SINT)) { // List
// Array field
if (_value != null) {
lengthInBits += 8 * _value.getList().size();
}
} else if (EvaluationHelper.equals(dataType, ProfinetDataType.INT)
&& EvaluationHelper.equals(numberOfValues, (int) 1)) { // INT
// Simple field (value)
lengthInBits += 16;
} else if (EvaluationHelper.equals(dataType, ProfinetDataType.INT)) { // List
// Array field
if (_value != null) {
lengthInBits += 16 * _value.getList().size();
}
} else if (EvaluationHelper.equals(dataType, ProfinetDataType.DINT)
&& EvaluationHelper.equals(numberOfValues, (int) 1)) { // DINT
// Simple field (value)
lengthInBits += 32;
} else if (EvaluationHelper.equals(dataType, ProfinetDataType.DINT)) { // List
// Array field
if (_value != null) {
lengthInBits += 32 * _value.getList().size();
}
} else if (EvaluationHelper.equals(dataType, ProfinetDataType.LINT)
&& EvaluationHelper.equals(numberOfValues, (int) 1)) { // LINT
// Simple field (value)
lengthInBits += 64;
} else if (EvaluationHelper.equals(dataType, ProfinetDataType.LINT)) { // List
// Array field
if (_value != null) {
lengthInBits += 64 * _value.getList().size();
}
} else if (EvaluationHelper.equals(dataType, ProfinetDataType.USINT)
&& EvaluationHelper.equals(numberOfValues, (int) 1)) { // USINT
// Simple field (value)
lengthInBits += 8;
} else if (EvaluationHelper.equals(dataType, ProfinetDataType.USINT)) { // List
// Array field
if (_value != null) {
lengthInBits += 8 * _value.getList().size();
}
} else if (EvaluationHelper.equals(dataType, ProfinetDataType.UINT)
&& EvaluationHelper.equals(numberOfValues, (int) 1)) { // UINT
// Simple field (value)
lengthInBits += 16;
} else if (EvaluationHelper.equals(dataType, ProfinetDataType.UINT)) { // List
// Array field
if (_value != null) {
lengthInBits += 16 * _value.getList().size();
}
} else if (EvaluationHelper.equals(dataType, ProfinetDataType.UDINT)
&& EvaluationHelper.equals(numberOfValues, (int) 1)) { // UDINT
// Simple field (value)
lengthInBits += 32;
} else if (EvaluationHelper.equals(dataType, ProfinetDataType.UDINT)) { // List
// Array field
if (_value != null) {
lengthInBits += 32 * _value.getList().size();
}
} else if (EvaluationHelper.equals(dataType, ProfinetDataType.ULINT)
&& EvaluationHelper.equals(numberOfValues, (int) 1)) { // ULINT
// Simple field (value)
lengthInBits += 64;
} else if (EvaluationHelper.equals(dataType, ProfinetDataType.ULINT)) { // List
// Array field
if (_value != null) {
lengthInBits += 64 * _value.getList().size();
}
} else if (EvaluationHelper.equals(dataType, ProfinetDataType.REAL)
&& EvaluationHelper.equals(numberOfValues, (int) 1)) { // REAL
// Simple field (value)
lengthInBits += 32;
} else if (EvaluationHelper.equals(dataType, ProfinetDataType.REAL)) { // List
// Array field
if (_value != null) {
lengthInBits += 32 * _value.getList().size();
}
} else if (EvaluationHelper.equals(dataType, ProfinetDataType.LREAL)
&& EvaluationHelper.equals(numberOfValues, (int) 1)) { // LREAL
// Simple field (value)
lengthInBits += 64;
} else if (EvaluationHelper.equals(dataType, ProfinetDataType.LREAL)) { // List
// Array field
if (_value != null) {
lengthInBits += 64 * _value.getList().size();
}
} else if (EvaluationHelper.equals(dataType, ProfinetDataType.CHAR)
&& EvaluationHelper.equals(numberOfValues, (int) 1)) { // CHAR
// Simple field (value)
lengthInBits += 8;
} else if (EvaluationHelper.equals(dataType, ProfinetDataType.CHAR)) { // List
// Array field
if (_value != null) {
lengthInBits += 8 * _value.getList().size();
}
} else if (EvaluationHelper.equals(dataType, ProfinetDataType.WCHAR)
&& EvaluationHelper.equals(numberOfValues, (int) 1)) { // WCHAR
// Simple field (value)
lengthInBits += 16;
} else if (EvaluationHelper.equals(dataType, ProfinetDataType.WCHAR)) { // List
// Array field
if (_value != null) {
lengthInBits += 16 * _value.getList().size();
}
} else if (EvaluationHelper.equals(dataType, ProfinetDataType.UNICODESTRING8)
&& EvaluationHelper.equals(numberOfValues, (int) 1)) { // CHAR
// Simple field (value)
lengthInBits += 8;
} else if (EvaluationHelper.equals(dataType, ProfinetDataType.UNICODESTRING8)) { // List
// Array field
if (_value != null) {
lengthInBits += 8 * _value.getList().size();
}
} else if (EvaluationHelper.equals(dataType, ProfinetDataType.WSTRING)
&& EvaluationHelper.equals(numberOfValues, (int) 1)) { // CHAR
// Simple field (value)
lengthInBits += 16;
} else if (EvaluationHelper.equals(dataType, ProfinetDataType.WSTRING)) { // List
// Array field
if (_value != null) {
lengthInBits += 16 * _value.getList().size();
}
} else if (EvaluationHelper.equals(dataType, ProfinetDataType.VISIBLESTRING)
&& EvaluationHelper.equals(numberOfValues, (int) 1)) { // CHAR
// Simple field (value)
lengthInBits += 8;
} else if (EvaluationHelper.equals(dataType, ProfinetDataType.VISIBLESTRING)) { // List
// Array field
if (_value != null) {
lengthInBits += 8 * _value.getList().size();
}
} else if (EvaluationHelper.equals(dataType, ProfinetDataType.F_MESSAGETRAILER4BYTE)
&& EvaluationHelper.equals(numberOfValues, (int) 1)) { // List
// Array field
if (_value != null) {
lengthInBits += 8 * _value.getList().size();
}
} else if (EvaluationHelper.equals(dataType, ProfinetDataType.F_MESSAGETRAILER4BYTE)) { // List
// Array field
if (_value != null) {
lengthInBits += 8 * _value.getList().size();
}
}
return lengthInBits;
}
public static void staticSerialize(
WriteBuffer writeBuffer, PlcValue _value, ProfinetDataType dataType, Integer numberOfValues)
throws SerializationException {
staticSerialize(writeBuffer, _value, dataType, numberOfValues, ByteOrder.BIG_ENDIAN);
}
public static void staticSerialize(
WriteBuffer writeBuffer,
PlcValue _value,
ProfinetDataType dataType,
Integer numberOfValues,
ByteOrder byteOrder)
throws SerializationException {
if (EvaluationHelper.equals(dataType, ProfinetDataType.BOOL)
&& EvaluationHelper.equals(numberOfValues, (int) 1)) { // BOOL
// Simple Field (value)
writeSimpleField("value", (boolean) _value.getBoolean(), writeBoolean(writeBuffer));
} else if (EvaluationHelper.equals(dataType, ProfinetDataType.BOOL)) { // List
// Array Field (value)
writeSimpleTypeArrayField(
"value",
_value.getList().stream().map(PlcValue::getBoolean).collect(Collectors.toList()),
writeBoolean(writeBuffer));
} else if (EvaluationHelper.equals(dataType, ProfinetDataType.OCTETSTRING)
&& EvaluationHelper.equals(numberOfValues, (int) 1)) { // USINT
// Simple Field (value)
writeSimpleField("value", (short) _value.getShort(), writeUnsignedShort(writeBuffer, 8));
} else if (EvaluationHelper.equals(dataType, ProfinetDataType.OCTETSTRING)) { // List
// Array Field (value)
writeSimpleTypeArrayField(
"value",
_value.getList().stream().map(PlcValue::getShort).collect(Collectors.toList()),
writeUnsignedShort(writeBuffer, 8));
} else if (EvaluationHelper.equals(dataType, ProfinetDataType.BYTE)
&& EvaluationHelper.equals(numberOfValues, (int) 1)) { // BYTE
// Simple Field (value)
writeSimpleField("value", (short) _value.getShort(), writeUnsignedShort(writeBuffer, 8));
} else if (EvaluationHelper.equals(dataType, ProfinetDataType.BYTE)) { // List
// Array Field (value)
writeSimpleTypeArrayField(
"value",
_value.getList().stream().map(PlcValue::getBoolean).collect(Collectors.toList()),
writeBoolean(writeBuffer));
} else if (EvaluationHelper.equals(dataType, ProfinetDataType.WORD)) { // WORD
// Simple Field (value)
writeSimpleField("value", (int) _value.getInteger(), writeUnsignedInt(writeBuffer, 16));
} else if (EvaluationHelper.equals(dataType, ProfinetDataType.DWORD)) { // DWORD
// Simple Field (value)
writeSimpleField("value", (long) _value.getLong(), writeUnsignedLong(writeBuffer, 32));
} else if (EvaluationHelper.equals(dataType, ProfinetDataType.LWORD)) { // LWORD
// Simple Field (value)
writeSimpleField(
"value", (BigInteger) _value.getBigInteger(), writeUnsignedBigInteger(writeBuffer, 64));
} else if (EvaluationHelper.equals(dataType, ProfinetDataType.SINT)
&& EvaluationHelper.equals(numberOfValues, (int) 1)) { // SINT
// Simple Field (value)
writeSimpleField("value", (byte) _value.getByte(), writeSignedByte(writeBuffer, 8));
} else if (EvaluationHelper.equals(dataType, ProfinetDataType.SINT)) { // List
// Array Field (value)
writeSimpleTypeArrayField(
"value",
_value.getList().stream().map(PlcValue::getByte).collect(Collectors.toList()),
writeSignedByte(writeBuffer, 8));
} else if (EvaluationHelper.equals(dataType, ProfinetDataType.INT)
&& EvaluationHelper.equals(numberOfValues, (int) 1)) { // INT
// Simple Field (value)
writeSimpleField("value", (short) _value.getShort(), writeSignedShort(writeBuffer, 16));
} else if (EvaluationHelper.equals(dataType, ProfinetDataType.INT)) { // List
// Array Field (value)
writeSimpleTypeArrayField(
"value",
_value.getList().stream().map(PlcValue::getShort).collect(Collectors.toList()),
writeSignedShort(writeBuffer, 16));
} else if (EvaluationHelper.equals(dataType, ProfinetDataType.DINT)
&& EvaluationHelper.equals(numberOfValues, (int) 1)) { // DINT
// Simple Field (value)
writeSimpleField("value", (int) _value.getInteger(), writeSignedInt(writeBuffer, 32));
} else if (EvaluationHelper.equals(dataType, ProfinetDataType.DINT)) { // List
// Array Field (value)
writeSimpleTypeArrayField(
"value",
_value.getList().stream().map(PlcValue::getInteger).collect(Collectors.toList()),
writeSignedInt(writeBuffer, 32));
} else if (EvaluationHelper.equals(dataType, ProfinetDataType.LINT)
&& EvaluationHelper.equals(numberOfValues, (int) 1)) { // LINT
// Simple Field (value)
writeSimpleField("value", (long) _value.getLong(), writeSignedLong(writeBuffer, 64));
} else if (EvaluationHelper.equals(dataType, ProfinetDataType.LINT)) { // List
// Array Field (value)
writeSimpleTypeArrayField(
"value",
_value.getList().stream().map(PlcValue::getLong).collect(Collectors.toList()),
writeSignedLong(writeBuffer, 64));
} else if (EvaluationHelper.equals(dataType, ProfinetDataType.USINT)
&& EvaluationHelper.equals(numberOfValues, (int) 1)) { // USINT
// Simple Field (value)
writeSimpleField("value", (short) _value.getShort(), writeUnsignedShort(writeBuffer, 8));
} else if (EvaluationHelper.equals(dataType, ProfinetDataType.USINT)) { // List
// Array Field (value)
writeSimpleTypeArrayField(
"value",
_value.getList().stream().map(PlcValue::getShort).collect(Collectors.toList()),
writeUnsignedShort(writeBuffer, 8));
} else if (EvaluationHelper.equals(dataType, ProfinetDataType.UINT)
&& EvaluationHelper.equals(numberOfValues, (int) 1)) { // UINT
// Simple Field (value)
writeSimpleField("value", (int) _value.getInteger(), writeUnsignedInt(writeBuffer, 16));
} else if (EvaluationHelper.equals(dataType, ProfinetDataType.UINT)) { // List
// Array Field (value)
writeSimpleTypeArrayField(
"value",
_value.getList().stream().map(PlcValue::getInteger).collect(Collectors.toList()),
writeUnsignedInt(writeBuffer, 16));
} else if (EvaluationHelper.equals(dataType, ProfinetDataType.UDINT)
&& EvaluationHelper.equals(numberOfValues, (int) 1)) { // UDINT
// Simple Field (value)
writeSimpleField("value", (long) _value.getLong(), writeUnsignedLong(writeBuffer, 32));
} else if (EvaluationHelper.equals(dataType, ProfinetDataType.UDINT)) { // List
// Array Field (value)
writeSimpleTypeArrayField(
"value",
_value.getList().stream().map(PlcValue::getLong).collect(Collectors.toList()),
writeUnsignedLong(writeBuffer, 32));
} else if (EvaluationHelper.equals(dataType, ProfinetDataType.ULINT)
&& EvaluationHelper.equals(numberOfValues, (int) 1)) { // ULINT
// Simple Field (value)
writeSimpleField(
"value", (BigInteger) _value.getBigInteger(), writeUnsignedBigInteger(writeBuffer, 64));
} else if (EvaluationHelper.equals(dataType, ProfinetDataType.ULINT)) { // List
// Array Field (value)
writeSimpleTypeArrayField(
"value",
_value.getList().stream().map(PlcValue::getBigInteger).collect(Collectors.toList()),
writeUnsignedBigInteger(writeBuffer, 64));
} else if (EvaluationHelper.equals(dataType, ProfinetDataType.REAL)
&& EvaluationHelper.equals(numberOfValues, (int) 1)) { // REAL
// Simple Field (value)
writeSimpleField("value", (float) _value.getFloat(), writeFloat(writeBuffer, 32));
} else if (EvaluationHelper.equals(dataType, ProfinetDataType.REAL)) { // List
// Array Field (value)
writeSimpleTypeArrayField(
"value",
_value.getList().stream().map(PlcValue::getFloat).collect(Collectors.toList()),
writeFloat(writeBuffer, 32));
} else if (EvaluationHelper.equals(dataType, ProfinetDataType.LREAL)
&& EvaluationHelper.equals(numberOfValues, (int) 1)) { // LREAL
// Simple Field (value)
writeSimpleField("value", (double) _value.getDouble(), writeDouble(writeBuffer, 64));
} else if (EvaluationHelper.equals(dataType, ProfinetDataType.LREAL)) { // List
// Array Field (value)
writeSimpleTypeArrayField(
"value",
_value.getList().stream().map(PlcValue::getDouble).collect(Collectors.toList()),
writeDouble(writeBuffer, 64));
} else if (EvaluationHelper.equals(dataType, ProfinetDataType.CHAR)
&& EvaluationHelper.equals(numberOfValues, (int) 1)) { // CHAR
// Simple Field (value)
writeSimpleField(
"value",
(String) _value.getString(),
writeString(writeBuffer, 8),
WithOption.WithEncoding("UTF-8"));
} else if (EvaluationHelper.equals(dataType, ProfinetDataType.CHAR)) { // List
// Array Field (value)
writeSimpleTypeArrayField(
"value",
_value.getList().stream().map(PlcValue::getString).collect(Collectors.toList()),
writeString(writeBuffer, 8),
WithOption.WithEncoding("UTF-8"));
} else if (EvaluationHelper.equals(dataType, ProfinetDataType.WCHAR)
&& EvaluationHelper.equals(numberOfValues, (int) 1)) { // WCHAR
// Simple Field (value)
writeSimpleField(
"value",
(String) _value.getString(),
writeString(writeBuffer, 16),
WithOption.WithEncoding("UTF-16"));
} else if (EvaluationHelper.equals(dataType, ProfinetDataType.WCHAR)) { // List
// Array Field (value)
writeSimpleTypeArrayField(
"value",
_value.getList().stream().map(PlcValue::getString).collect(Collectors.toList()),
writeString(writeBuffer, 16),
WithOption.WithEncoding("UTF-16"));
} else if (EvaluationHelper.equals(dataType, ProfinetDataType.UNICODESTRING8)
&& EvaluationHelper.equals(numberOfValues, (int) 1)) { // CHAR
// Simple Field (value)
writeSimpleField(
"value",
(String) _value.getString(),
writeString(writeBuffer, 8),
WithOption.WithEncoding("UTF-8"));
} else if (EvaluationHelper.equals(dataType, ProfinetDataType.UNICODESTRING8)) { // List
// Array Field (value)
writeSimpleTypeArrayField(
"value",
_value.getList().stream().map(PlcValue::getString).collect(Collectors.toList()),
writeString(writeBuffer, 8),
WithOption.WithEncoding("UTF-8"));
} else if (EvaluationHelper.equals(dataType, ProfinetDataType.WSTRING)
&& EvaluationHelper.equals(numberOfValues, (int) 1)) { // CHAR
// Simple Field (value)
writeSimpleField(
"value",
(String) _value.getString(),
writeString(writeBuffer, 16),
WithOption.WithEncoding("UTF-16"));
} else if (EvaluationHelper.equals(dataType, ProfinetDataType.WSTRING)) { // List
// Array Field (value)
writeSimpleTypeArrayField(
"value",
_value.getList().stream().map(PlcValue::getString).collect(Collectors.toList()),
writeString(writeBuffer, 16),
WithOption.WithEncoding("UTF-16"));
} else if (EvaluationHelper.equals(dataType, ProfinetDataType.VISIBLESTRING)
&& EvaluationHelper.equals(numberOfValues, (int) 1)) { // CHAR
// Simple Field (value)
writeSimpleField(
"value",
(String) _value.getString(),
writeString(writeBuffer, 8),
WithOption.WithEncoding("UTF-8"));
} else if (EvaluationHelper.equals(dataType, ProfinetDataType.VISIBLESTRING)) { // List
// Array Field (value)
writeSimpleTypeArrayField(
"value",
_value.getList().stream().map(PlcValue::getString).collect(Collectors.toList()),
writeString(writeBuffer, 8),
WithOption.WithEncoding("UTF-8"));
} else if (EvaluationHelper.equals(dataType, ProfinetDataType.F_MESSAGETRAILER4BYTE)
&& EvaluationHelper.equals(numberOfValues, (int) 1)) { // List
// Array Field (value)
writeSimpleTypeArrayField(
"value",
_value.getList().stream().map(PlcValue::getShort).collect(Collectors.toList()),
writeUnsignedShort(writeBuffer, 8));
} else if (EvaluationHelper.equals(dataType, ProfinetDataType.F_MESSAGETRAILER4BYTE)) { // List
// Array Field (value)
writeSimpleTypeArrayField(
"value",
_value.getList().stream().map(PlcValue::getShort).collect(Collectors.toList()),
writeUnsignedShort(writeBuffer, 8));
}
}
}
|
apache/solr | 35,342 | solr/core/src/test/org/apache/solr/search/TestRealTimeGet.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.solr.search;
import static org.apache.solr.core.SolrCore.verbose;
import static org.apache.solr.update.processor.DistributingUpdateProcessorFactory.DISTRIB_UPDATE_PARAM;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Random;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.concurrent.atomic.AtomicLong;
import org.apache.solr.common.SolrException;
import org.apache.solr.common.SolrInputDocument;
import org.apache.solr.common.params.ModifiableSolrParams;
import org.apache.solr.common.util.Utils;
import org.apache.solr.request.SolrQueryRequest;
import org.apache.solr.util.RefCounted;
import org.apache.solr.util.TestHarness;
import org.junit.BeforeClass;
import org.junit.Test;
public class TestRealTimeGet extends TestRTGBase {
@BeforeClass
public static void beforeClass() throws Exception {
initCore("solrconfig-tlog.xml", "schema_latest.xml");
}
@Test
public void testGetRealtime() throws Exception {
clearIndex();
assertU(commit());
assertU(
adoc(
"id",
"1",
"a_f",
"-1.5",
"a_fd",
"-1.5",
"a_fdS",
"-1.5",
"a_fs",
"1.0",
"a_fs",
"2.5",
"a_fds",
"1.0",
"a_fds",
"2.5",
"a_fdsS",
"1.0",
"a_fdsS",
"2.5",
"a_d",
"-1.2E99",
"a_dd",
"-1.2E99",
"a_ddS",
"-1.2E99",
"a_ds",
"1.0",
"a_ds",
"2.5",
"a_dds",
"1.0",
"a_dds",
"2.5",
"a_ddsS",
"1.0",
"a_ddsS",
"2.5",
"a_i",
"-1",
"a_id",
"-1",
"a_idS",
"-1",
"a_is",
"1",
"a_is",
"2",
"a_ids",
"1",
"a_ids",
"2",
"a_idsS",
"1",
"a_idsS",
"2",
"a_l",
"-9999999999",
"a_ld",
"-9999999999",
"a_ldS",
"-9999999999",
"a_ls",
"1",
"a_ls",
"9999999999",
"a_lds",
"1",
"a_lds",
"9999999999",
"a_ldsS",
"1",
"a_ldsS",
"9999999999",
"a_s",
"abc",
"a_sd",
"bcd",
"a_sdS",
"cde",
"a_ss",
"def",
"a_ss",
"efg",
"a_sds",
"fgh",
"a_sds",
"ghi",
"a_sdsS",
"hij",
"a_sdsS",
"ijk",
"a_b",
"false",
"a_bd",
"true",
"a_bdS",
"false",
"a_bs",
"true",
"a_bs",
"false",
"a_bds",
"true",
"a_bds",
"false",
"a_bdsS",
"true",
"a_bdsS",
"false"));
assertJQ(req("q", "id:1"), "/response/numFound==0");
assertJQ(
req(
"qt",
"/get",
"id",
"1",
"fl",
"id, a_f,a_fd,a_fdS a_fs,a_fds,a_fdsS, "
+ "a_d,a_dd,a_ddS, a_ds,a_dds,a_ddsS, a_i,a_id,a_idS a_is,a_ids,a_idsS, "
+ "a_l,a_ld,a_ldS, a_ls,a_lds,a_ldsS, a_s,a_sd,a_sdS a_ss,a_sds,a_sdsS, "
+ "a_b,a_bd,a_bdS, a_bs,a_bds,a_bdsS"),
"=={'doc':{'id':'1'"
+ ", a_f:-1.5, a_fd:-1.5, a_fdS:-1.5, a_fs:[1.0,2.5], a_fds:[1.0,2.5],a_fdsS:[1.0,2.5]"
+ ", a_d:-1.2E99, a_dd:-1.2E99, a_ddS:-1.2E99, a_ds:[1.0,2.5],a_dds:[1.0,2.5],a_ddsS:[1.0,2.5]"
+ ", a_i:-1, a_id:-1, a_idS:-1, a_is:[1,2],a_ids:[1,2],a_idsS:[1,2]"
+ ", a_l:-9999999999, a_ld:-9999999999, a_ldS:-9999999999, a_ls:[1,9999999999],a_lds:[1,9999999999],a_ldsS:[1,9999999999]"
+ ", a_s:'abc', a_sd:'bcd', a_sdS:'cde', a_ss:['def','efg'],a_sds:['fgh','ghi'],a_sdsS:['hij','ijk']"
+ ", a_b:false, a_bd:true, a_bdS:false, a_bs:[true,false],a_bds:[true,false],a_bdsS:[true,false]"
+ " }}");
assertJQ(
req("qt", "/get", "ids", "1", "fl", "id"),
"=={"
+ " 'response':{'numFound':1,'start':0,'numFoundExact':true,'docs':["
+ " {"
+ " 'id':'1'}]"
+ " }}}");
assertU(commit());
assertJQ(req("q", "id:1"), "/response/numFound==1");
// a cut-n-paste of the first big query, but this time it will be retrieved from the index
// rather than the transaction log
assertJQ(
req(
"qt",
"/get",
"id",
"1",
"fl",
"id, a_f,a_fd,a_fdS a_fs,a_fds,a_fdsS, a_d,a_dd,a_ddS, a_ds,a_dds,a_ddsS, a_i,a_id,a_idS a_is,a_ids,a_idsS, a_l,a_ld,a_ldS a_ls,a_lds,a_ldsS"),
"=={'doc':{'id':'1'"
+ ", a_f:-1.5, a_fd:-1.5, a_fdS:-1.5, a_fs:[1.0,2.5], a_fds:[1.0,2.5],a_fdsS:[1.0,2.5]"
+ ", a_d:-1.2E99, a_dd:-1.2E99, a_ddS:-1.2E99, a_ds:[1.0,2.5],a_dds:[1.0,2.5],a_ddsS:[1.0,2.5]"
+ ", a_i:-1, a_id:-1, a_idS:-1, a_is:[1,2],a_ids:[1,2],a_idsS:[1,2]"
+ ", a_l:-9999999999, a_ld:-9999999999, a_ldS:-9999999999, a_ls:[1,9999999999],a_lds:[1,9999999999],a_ldsS:[1,9999999999]"
+ " }}");
assertJQ(req("qt", "/get", "id", "1", "fl", "id"), "=={'doc':{'id':'1'}}");
assertJQ(
req("qt", "/get", "ids", "1", "fl", "id"),
"=={"
+ " 'response':{'numFound':1,'start':0,'numFoundExact':true,'docs':["
+ " {"
+ " 'id':'1'}]"
+ " }}}");
assertU(delI("1"));
assertJQ(req("q", "id:1"), "/response/numFound==1");
assertJQ(req("qt", "/get", "id", "1"), "=={'doc':null}");
assertJQ(
req("qt", "/get", "ids", "1"),
"=={'response':{'numFound':0,'start':0,'numFoundExact':true,'docs':[]}}");
assertU(adoc("id", "10"));
assertU(adoc("id", "11"));
assertJQ(req("qt", "/get", "id", "10", "fl", "id"), "=={'doc':{'id':'10'}}");
assertU(delQ("id:10 foo_s:abcdef"));
assertJQ(req("qt", "/get", "id", "10"), "=={'doc':null}");
assertJQ(req("qt", "/get", "id", "11", "fl", "id"), "=={'doc':{'id':'11'}}");
// multivalued field
assertU(adoc("id", "12", "val_ls", "1", "val_ls", "2"));
assertJQ(req("q", "id:12"), "/response/numFound==0");
assertJQ(
req("qt", "/get", "id", "12", "fl", "id,val_ls"), "=={'doc':{'id':'12', 'val_ls':[1,2]}}");
assertU(commit());
assertJQ(
req("qt", "/get", "id", "12", "fl", "id,val_ls"), "=={'doc':{'id':'12', 'val_ls':[1,2]}}");
assertJQ(req("q", "id:12"), "/response/numFound==1");
SolrQueryRequest req = req();
RefCounted<SolrIndexSearcher> realtimeHolder = req.getCore().getRealtimeSearcher();
//
// filters
//
assertU(adoc("id", "12"));
assertU(adoc("id", "13"));
// this should not need to open another realtime searcher
assertJQ(req("qt", "/get", "id", "11", "fl", "id", "fq", "id:11"), "=={doc:{id:'11'}}");
// assert that the same realtime searcher is still in effect (i.e. that we didn't
// open a new searcher when we didn't have to).
RefCounted<SolrIndexSearcher> realtimeHolder2 = req.getCore().getRealtimeSearcher();
assertEquals(
realtimeHolder.get(),
realtimeHolder2.get()); // Autocommit could possibly cause this to fail?
realtimeHolder2.decref();
// filter most likely different segment
assertJQ(req("qt", "/get", "id", "12", "fl", "id", "fq", "id:11"), "=={doc:null}");
// filter most likely same different segment
assertJQ(req("qt", "/get", "id", "12", "fl", "id", "fq", "id:13"), "=={doc:null}");
assertJQ(req("qt", "/get", "id", "12", "fl", "id", "fq", "id:12"), "=={doc:{id:'12'}}");
assertU(adoc("id", "14"));
assertU(adoc("id", "15"));
// id list, with some in index and some not, first id from index. Also test multiple fq params.
assertJQ(
req(
"qt",
"/get",
"ids",
"12,14,13,15",
"fl",
"id",
"fq",
"id:[10 TO 14]",
"fq",
"id:[13 TO 19]"),
"/response/docs==[{id:'14'},{id:'13'}]");
assertU(adoc("id", "16"));
assertU(adoc("id", "17"));
// id list, with some in index and some not, first id from tlog
assertJQ(
req("qt", "/get", "ids", "17,16,15,14", "fl", "id", "fq", "id:[15 TO 16]"),
"/response/docs==[{id:'16'},{id:'15'}]");
// more complex filter
assertJQ(
req("qt", "/get", "ids", "17,16,15,14", "fl", "id", "fq", "{!frange l=15 u=16}id"),
"/response/docs==[{id:'16'},{id:'15'}]");
// test with negative filter
assertJQ(
req("qt", "/get", "ids", "15,14", "fl", "id", "fq", "-id:15"),
"/response/docs==[{id:'14'}]");
assertJQ(
req("qt", "/get", "ids", "17,16,15,14", "fl", "id", "fq", "-id:[15 TO 17]"),
"/response/docs==[{id:'14'}]");
realtimeHolder.decref();
req.close();
}
@Test
public void testVersions() throws Exception {
clearIndex();
assertU(commit());
long version = addAndGetVersion(sdoc("id", "1"), null);
assertJQ(req("q", "id:1"), "/response/numFound==0");
// test version is there from rtg
assertJQ(req("qt", "/get", "id", "1"), "=={'doc':{'id':'1','_version_':" + version + "}}");
// test version is there from the index
assertU(commit());
assertJQ(req("qt", "/get", "id", "1"), "=={'doc':{'id':'1','_version_':" + version + "}}");
// simulate an update from the leader
version += 10;
updateJ(
jsonAdd(sdoc("id", "1", "_version_", Long.toString(version))),
params(DISTRIB_UPDATE_PARAM, FROM_LEADER));
// test version is there from rtg
assertJQ(req("qt", "/get", "id", "1"), "=={'doc':{'id':'1','_version_':" + version + "}}");
// simulate reordering: test that a version less than that does not take effect
updateJ(
jsonAdd(sdoc("id", "1", "_version_", Long.toString(version - 1))),
params(DISTRIB_UPDATE_PARAM, FROM_LEADER));
// test that version hasn't changed
assertJQ(req("qt", "/get", "id", "1"), "=={'doc':{'id':'1','_version_':" + version + "}}");
// simulate reordering: test that a delete w/ version less than that does not take affect
// TODO: also allow passing version on delete instead of on URL?
updateJ(
jsonDelId("1"),
params(DISTRIB_UPDATE_PARAM, FROM_LEADER, "_version_", Long.toString(version - 1)));
// test that version hasn't changed
assertJQ(req("qt", "/get", "id", "1"), "=={'doc':{'id':'1','_version_':" + version + "}}");
// make sure reordering detection also works after a commit
assertU(commit());
// simulate reordering: test that a version less than that does not take effect
updateJ(
jsonAdd(sdoc("id", "1", "_version_", Long.toString(version - 1))),
params(DISTRIB_UPDATE_PARAM, FROM_LEADER));
// test that version hasn't changed
assertJQ(req("qt", "/get", "id", "1"), "=={'doc':{'id':'1','_version_':" + version + "}}");
// simulate reordering: test that a delete operation w/ version less than that does not take
// effect
updateJ(
jsonDelId("1"),
params(DISTRIB_UPDATE_PARAM, FROM_LEADER, "_version_", Long.toString(version - 1)));
// test that version hasn't changed
assertJQ(req("qt", "/get", "id", "1"), "=={'doc':{'id':'1','_version_':" + version + "}}");
// now simulate a normal delete from the leader
version += 5;
updateJ(
jsonDelId("1"),
params(DISTRIB_UPDATE_PARAM, FROM_LEADER, "_version_", Long.toString(version)));
// make sure a reordered add doesn't take effect.
updateJ(
jsonAdd(sdoc("id", "1", "_version_", Long.toString(version - 1))),
params(DISTRIB_UPDATE_PARAM, FROM_LEADER));
// test that it's still deleted
assertJQ(req("qt", "/get", "id", "1"), "=={'doc':null}");
// test that we can remember the version of a delete operation after a commit
assertU(commit());
// make sure a reordered add doesn't take effect.
long version2 = deleteByQueryAndGetVersion("id:2", null);
// test that it's still deleted
assertJQ(req("qt", "/get", "id", "1"), "=={'doc':null}");
version = addAndGetVersion(sdoc("id", "2"), null);
version2 = deleteByQueryAndGetVersion("id:2", null);
assertTrue(Math.abs(version2) > version);
// test that it's deleted
assertJQ(req("qt", "/get", "id", "2"), "=={'doc':null}");
version2 = Math.abs(version2) + 1000;
updateJ(
jsonAdd(sdoc("id", "3", "_version_", Long.toString(version2 + 100))),
params(DISTRIB_UPDATE_PARAM, FROM_LEADER));
updateJ(
jsonAdd(sdoc("id", "4", "_version_", Long.toString(version2 + 200))),
params(DISTRIB_UPDATE_PARAM, FROM_LEADER));
// this should only affect id:3 so far
deleteByQueryAndGetVersion(
"id:(3 4 5 6)",
params(DISTRIB_UPDATE_PARAM, FROM_LEADER, "_version_", Long.toString(-(version2 + 150))));
assertJQ(req("qt", "/get", "id", "3"), "=={'doc':null}");
assertJQ(req("qt", "/get", "id", "4", "fl", "id"), "=={'doc':{'id':'4'}}");
updateJ(
jsonAdd(sdoc("id", "5", "_version_", Long.toString(version2 + 201))),
params(DISTRIB_UPDATE_PARAM, FROM_LEADER));
updateJ(
jsonAdd(sdoc("id", "6", "_version_", Long.toString(version2 + 101))),
params(DISTRIB_UPDATE_PARAM, FROM_LEADER));
// the DBQ should also have caused id:6 to be removed
assertJQ(req("qt", "/get", "id", "5", "fl", "id"), "=={'doc':{'id':'5'}}");
assertJQ(req("qt", "/get", "id", "6"), "=={'doc':null}");
assertU(commit());
}
@Test
public void testOptimisticLocking() throws Exception {
clearIndex();
assertU(commit());
final long version = addAndGetVersion(sdoc("id", "1"), null);
long version2;
// try version added directly on doc
SolrException se =
expectThrows(
SolrException.class,
"version should cause an error",
() -> addAndGetVersion(sdoc("id", "1", "_version_", Long.toString(version - 1)), null));
assertEquals("version should cause a conflict", 409, se.code());
// try version added as a parameter on the request
se =
expectThrows(
SolrException.class,
"version should cause an error",
() ->
addAndGetVersion(sdoc("id", "1"), params("_version_", Long.toString(version - 1))));
assertEquals("version should cause a conflict", 409, se.code());
// try an add specifying a negative version
se =
expectThrows(
SolrException.class,
"negative version should cause a conflict",
() -> addAndGetVersion(sdoc("id", "1"), params("_version_", Long.toString(-version))));
assertEquals("version should cause a conflict", 409, se.code());
// try an add with a greater version
se =
expectThrows(
SolrException.class,
"greater version should cause a conflict",
() ->
addAndGetVersion(
sdoc("id", "1"),
params("_version_", Long.toString(version + random().nextInt(1000) + 1))));
assertEquals("version should cause a conflict", 409, se.code());
//
// deletes
//
// try a delete operation with version on the request
se =
expectThrows(
SolrException.class,
"version should cause an error",
() -> deleteAndGetVersion("1", params("_version_", Long.toString(version - 1))));
assertEquals("version should cause a conflict", 409, se.code());
// try a delete operation with a negative version
se =
expectThrows(
SolrException.class,
"negative version should cause an error",
() -> deleteAndGetVersion("1", params("_version_", Long.toString(-version))));
assertEquals("version should cause a conflict", 409, se.code());
// try a delete operation with a greater version
se =
expectThrows(
SolrException.class,
"greater version should cause an error",
() ->
deleteAndGetVersion(
"1", params("_version_", Long.toString(version + random().nextInt(1000) + 1))));
assertEquals("version should cause a conflict", 409, se.code());
// try a delete operation of a document that doesn't exist, specifying a specific version
se =
expectThrows(
SolrException.class,
"document does not exist should cause an error",
() ->
deleteAndGetVersion("I_do_not_exist", params("_version_", Long.toString(version))));
assertEquals("version should cause a conflict", 409, se.code());
// try a delete operation of a document that doesn't exist, specifying that it should not
version2 = deleteAndGetVersion("I_do_not_exist", params("_version_", Long.toString(-1)));
assertTrue(version2 < 0);
// overwrite the document
version2 = addAndGetVersion(sdoc("id", "1", "_version_", Long.toString(version)), null);
assertTrue(version2 > version);
// overwriting the previous version should now fail
se =
expectThrows(
SolrException.class,
"overwriting previous version should fail",
() -> addAndGetVersion(sdoc("id", "1"), params("_version_", Long.toString(version))));
assertEquals(409, se.code());
// deleting the previous version should now fail
se =
expectThrows(
SolrException.class,
"deleting the previous version should now fail",
() -> deleteAndGetVersion("1", params("_version_", Long.toString(version))));
assertEquals(409, se.code());
final long prevVersion = version2;
// deleting the current version should work
version2 = deleteAndGetVersion("1", params("_version_", Long.toString(prevVersion)));
// overwriting the previous existing doc should now fail (since it was deleted)
se =
expectThrows(
SolrException.class,
"overwriting the previous existing doc should now fail (since it was deleted)",
() ->
addAndGetVersion(sdoc("id", "1"), params("_version_", Long.toString(prevVersion))));
assertEquals(409, se.code());
// deleting the previous existing doc should now fail (since it was deleted)
se =
expectThrows(
SolrException.class,
"deleting the previous existing doc should now fail (since it was deleted)",
() -> deleteAndGetVersion("1", params("_version_", Long.toString(prevVersion))));
assertEquals(409, se.code());
// overwriting a negative version should work
version2 =
addAndGetVersion(sdoc("id", "1"), params("_version_", Long.toString(-(prevVersion - 1))));
assertTrue(version2 > version);
long lastVersion = version2;
// sanity test that we see the right version via rtg
assertJQ(req("qt", "/get", "id", "1"), "=={'doc':{'id':'1','_version_':" + lastVersion + "}}");
}
// @Test
// public void testGetRealtime() throws Exception {
// SolrQueryRequest sr1 = req("q","foo");
// IndexReader r1 = sr1.getCore().getRealtimeReader();
//
// assertU(adoc("id","1"));
//
// IndexReader r2 = sr1.getCore().getRealtimeReader();
// assertNotSame(r1, r2);
// int refcount = r2.getRefCount();
//
// // make sure a new reader wasn't opened
// IndexReader r3 = sr1.getCore().getRealtimeReader();
// assertSame(r2, r3);
// assertEquals(refcount+1, r3.getRefCount());
//
// assertU(commit());
//
// // this is not critical, but currently a commit does not refresh the reader
// // if nothing has changed
// IndexReader r4 = sr1.getCore().getRealtimeReader();
// assertEquals(refcount+2, r4.getRefCount());
//
//
// r1.decRef();
// r2.decRef();
// r3.decRef();
// r4.decRef();
// sr1.close();
// }
@Test
public void testStressGetRealtime() throws Exception {
clearIndex();
assertU(commit());
// req().getCore().getUpdateHandler().getIndexWriterProvider().getIndexWriter(req().getCore()).setInfoStream(System.out);
final int commitPercent = 5 + random().nextInt(20);
// what percent of the commits are soft
final int softCommitPercent = 30 + random().nextInt(75);
final int deletePercent = 4 + random().nextInt(25);
final int deleteByQueryPercent = 1 + random().nextInt(5);
// percent change that an update uses optimistic locking
final int optimisticPercent = 1 + random().nextInt(50);
// percent change that a version specified will be correct
final int optimisticCorrectPercent = 25 + random().nextInt(70);
// percent of time that a get will be filtered... we normally don't want too high.
final int filteredGetPercent = random().nextInt(random().nextInt(20) + 1);
final int ndocs = 5 + (random().nextBoolean() ? random().nextInt(25) : random().nextInt(200));
int nWriteThreads = 5 + random().nextInt(25);
// number of committers at a time...
final int maxConcurrentCommits = nWriteThreads;
// query variables
final int percentRealtimeQuery = 60;
final AtomicLong operations =
new AtomicLong(50000); // number of query operations to perform in total
int nReadThreads = 5 + random().nextInt(25);
verbose("commitPercent=", commitPercent);
verbose("softCommitPercent=", softCommitPercent);
verbose("deletePercent=", deletePercent);
verbose("deleteByQueryPercent=", deleteByQueryPercent);
verbose("ndocs=", ndocs);
verbose("nWriteThreads=", nWriteThreads);
verbose("nReadThreads=", nReadThreads);
verbose("percentRealtimeQuery=", percentRealtimeQuery);
verbose("maxConcurrentCommits=", maxConcurrentCommits);
verbose("operations=", operations);
initModel(ndocs);
final AtomicInteger numCommitting = new AtomicInteger();
List<Thread> threads = new ArrayList<>();
for (int i = 0; i < nWriteThreads; i++) {
Thread thread =
new Thread("WRITER" + i) {
Random rand = new Random(random().nextInt());
@Override
public void run() {
try {
while (operations.get() > 0) {
int oper = rand.nextInt(100);
if (oper < commitPercent) {
if (numCommitting.incrementAndGet() <= maxConcurrentCommits) {
Map<Integer, DocInfo> newCommittedModel;
long version;
synchronized (TestRealTimeGet.this) {
newCommittedModel = new HashMap<>(model); // take a snapshot
version = snapshotCount++;
verbose("took snapshot version=", version);
}
if (rand.nextInt(100) < softCommitPercent) {
verbose("softCommit start");
assertU(TestHarness.commit("softCommit", "true"));
verbose("softCommit end");
} else {
verbose("hardCommit start");
assertU(commit());
verbose("hardCommit end");
}
synchronized (TestRealTimeGet.this) {
// install this model snapshot only if it's newer than the current one
if (version >= committedModelClock) {
if (VERBOSE) {
verbose("installing new committedModel version=" + committedModelClock);
}
committedModel = newCommittedModel;
committedModelClock = version;
}
}
}
numCommitting.decrementAndGet();
continue;
}
int id = rand.nextInt(ndocs);
Object sync = syncArr[id];
// set the lastId before we actually change it sometimes to try and
// uncover more race conditions between writing and reading
boolean before = rand.nextBoolean();
if (before) {
lastId = id;
}
// We can't concurrently update the same document and retain our invariants of
// increasing values since we can't guarantee what order the updates will be
// executed. Even with versions, we can't remove the sync because increasing
// versions does not mean increasing vals.
synchronized (sync) {
DocInfo info = model.get(id);
long val = info.val;
long nextVal = Math.abs(val) + 1;
if (oper < commitPercent + deletePercent) {
boolean opt = rand.nextInt() < optimisticPercent;
boolean correct = opt ? rand.nextInt() < optimisticCorrectPercent : false;
long badVersion = correct ? 0 : badVersion(rand, info.version);
if (VERBOSE) {
if (!opt) {
verbose("deleting id", id, "val=", nextVal);
} else {
verbose(
"deleting id",
id,
"val=",
nextVal,
"existing_version=",
info.version,
(correct ? "" : (" bad_version=" + badVersion)));
}
}
// assertU("<delete><id>" + id + "</id></delete>");
Long version = null;
if (opt) {
if (correct) {
version =
deleteAndGetVersion(
Integer.toString(id),
params("_version_", Long.toString(info.version)));
} else {
SolrException se =
expectThrows(
SolrException.class,
"should not get random version",
() ->
deleteAndGetVersion(
Integer.toString(id),
params("_version_", Long.toString(badVersion))));
assertEquals(409, se.code());
}
} else {
version = deleteAndGetVersion(Integer.toString(id), null);
}
if (version != null) {
model.put(id, new DocInfo(version, -nextVal));
}
if (VERBOSE) {
verbose("deleting id", id, "val=", nextVal, "DONE");
}
} else if (oper < commitPercent + deletePercent + deleteByQueryPercent) {
if (VERBOSE) {
verbose("deleteByQuery id ", id, "val=", nextVal);
}
assertU("<delete><query>id:" + id + "</query></delete>");
model.put(id, new DocInfo(-1L, -nextVal));
if (VERBOSE) {
verbose("deleteByQuery id", id, "val=", nextVal, "DONE");
}
} else {
boolean opt = rand.nextInt() < optimisticPercent;
boolean correct = opt ? rand.nextInt() < optimisticCorrectPercent : false;
long badVersion = correct ? 0 : badVersion(rand, info.version);
if (VERBOSE) {
if (!opt) {
verbose("adding id", id, "val=", nextVal);
} else {
verbose(
"adding id",
id,
"val=",
nextVal,
"existing_version=",
info.version,
(correct ? "" : (" bad_version=" + badVersion)));
}
}
Long version = null;
SolrInputDocument sd =
sdoc("id", Integer.toString(id), FIELD, Long.toString(nextVal));
if (opt) {
if (correct) {
version =
addAndGetVersion(
sd, params("_version_", Long.toString(info.version)));
} else {
SolrException se =
expectThrows(
SolrException.class,
"should not get bad version",
() ->
addAndGetVersion(
sd, params("_version_", Long.toString(badVersion))));
assertEquals(409, se.code());
}
} else {
version = addAndGetVersion(sd, null);
}
if (version != null) {
model.put(id, new DocInfo(version, nextVal));
}
if (VERBOSE) {
verbose("adding id", id, "val=", nextVal, "DONE");
}
}
} // end sync
if (!before) {
lastId = id;
}
}
} catch (Throwable e) {
operations.set(-1L);
throw new RuntimeException(e);
}
}
};
threads.add(thread);
}
for (int i = 0; i < nReadThreads; i++) {
Thread thread =
new Thread("READER" + i) {
Random rand = new Random(random().nextInt());
@Override
public void run() {
try {
while (operations.decrementAndGet() >= 0) {
// bias toward a recently changed doc
int id = rand.nextInt(100) < 25 ? lastId : rand.nextInt(ndocs);
// when indexing, we update the index, then the model
// so when querying, we should first check the model, and then the index
boolean realTime = rand.nextInt(100) < percentRealtimeQuery;
DocInfo info;
if (realTime) {
info = model.get(id);
} else {
synchronized (TestRealTimeGet.this) {
info = committedModel.get(id);
}
}
if (VERBOSE) {
verbose("querying id", id);
}
boolean filteredOut = false;
SolrQueryRequest sreq;
if (realTime) {
ModifiableSolrParams p =
params("wt", "json", "qt", "/get", "ids", Integer.toString(id));
if (rand.nextInt(100) < filteredGetPercent) {
int idToFilter = rand.nextBoolean() ? id : rand.nextInt(ndocs);
filteredOut = idToFilter != id;
p.add("fq", "id:" + idToFilter);
}
sreq = req(p);
} else {
sreq =
req("wt", "json", "q", "id:" + Integer.toString(id), "omitHeader", "true");
}
String response = h.query(sreq);
@SuppressWarnings({"rawtypes"})
Map rsp = (Map) Utils.fromJSONString(response);
@SuppressWarnings({"rawtypes"})
List doclist = (List) (((Map) rsp.get("response")).get("docs"));
if (doclist.size() == 0) {
// there's no info we can get back with a delete operation, so not much we can
// check
// without further synchronization. This is also correct when filteredOut==true
} else {
assertEquals(1, doclist.size());
long foundVal = (Long) (((Map) doclist.get(0)).get(FIELD));
long foundVer = (Long) (((Map) doclist.get(0)).get("_version_"));
if (filteredOut
|| foundVal < Math.abs(info.val)
|| (foundVer == info.version
&& foundVal != info.val)) { // if the version matches, the val must
verbose("ERROR, id=", id, "found=", response, "model", info);
fail();
}
}
}
} catch (Throwable e) {
operations.set(-1L);
throw new RuntimeException(e);
}
}
};
threads.add(thread);
}
for (Thread thread : threads) {
thread.start();
}
for (Thread thread : threads) {
thread.join();
}
}
}
|
googleapis/google-cloud-java | 36,239 | java-discoveryengine/proto-google-cloud-discoveryengine-v1/src/main/java/com/google/cloud/discoveryengine/v1/ListEnginesResponse.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/discoveryengine/v1/engine_service.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.discoveryengine.v1;
/**
*
*
* <pre>
* Response message for
* [EngineService.ListEngines][google.cloud.discoveryengine.v1.EngineService.ListEngines]
* method.
* </pre>
*
* Protobuf type {@code google.cloud.discoveryengine.v1.ListEnginesResponse}
*/
public final class ListEnginesResponse extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.discoveryengine.v1.ListEnginesResponse)
ListEnginesResponseOrBuilder {
private static final long serialVersionUID = 0L;
// Use ListEnginesResponse.newBuilder() to construct.
private ListEnginesResponse(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private ListEnginesResponse() {
engines_ = java.util.Collections.emptyList();
nextPageToken_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new ListEnginesResponse();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.discoveryengine.v1.EngineServiceProto
.internal_static_google_cloud_discoveryengine_v1_ListEnginesResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.discoveryengine.v1.EngineServiceProto
.internal_static_google_cloud_discoveryengine_v1_ListEnginesResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.discoveryengine.v1.ListEnginesResponse.class,
com.google.cloud.discoveryengine.v1.ListEnginesResponse.Builder.class);
}
public static final int ENGINES_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private java.util.List<com.google.cloud.discoveryengine.v1.Engine> engines_;
/**
*
*
* <pre>
* All the customer's [Engine][google.cloud.discoveryengine.v1.Engine]s.
* </pre>
*
* <code>repeated .google.cloud.discoveryengine.v1.Engine engines = 1;</code>
*/
@java.lang.Override
public java.util.List<com.google.cloud.discoveryengine.v1.Engine> getEnginesList() {
return engines_;
}
/**
*
*
* <pre>
* All the customer's [Engine][google.cloud.discoveryengine.v1.Engine]s.
* </pre>
*
* <code>repeated .google.cloud.discoveryengine.v1.Engine engines = 1;</code>
*/
@java.lang.Override
public java.util.List<? extends com.google.cloud.discoveryengine.v1.EngineOrBuilder>
getEnginesOrBuilderList() {
return engines_;
}
/**
*
*
* <pre>
* All the customer's [Engine][google.cloud.discoveryengine.v1.Engine]s.
* </pre>
*
* <code>repeated .google.cloud.discoveryengine.v1.Engine engines = 1;</code>
*/
@java.lang.Override
public int getEnginesCount() {
return engines_.size();
}
/**
*
*
* <pre>
* All the customer's [Engine][google.cloud.discoveryengine.v1.Engine]s.
* </pre>
*
* <code>repeated .google.cloud.discoveryengine.v1.Engine engines = 1;</code>
*/
@java.lang.Override
public com.google.cloud.discoveryengine.v1.Engine getEngines(int index) {
return engines_.get(index);
}
/**
*
*
* <pre>
* All the customer's [Engine][google.cloud.discoveryengine.v1.Engine]s.
* </pre>
*
* <code>repeated .google.cloud.discoveryengine.v1.Engine engines = 1;</code>
*/
@java.lang.Override
public com.google.cloud.discoveryengine.v1.EngineOrBuilder getEnginesOrBuilder(int index) {
return engines_.get(index);
}
public static final int NEXT_PAGE_TOKEN_FIELD_NUMBER = 2;
@SuppressWarnings("serial")
private volatile java.lang.Object nextPageToken_ = "";
/**
*
*
* <pre>
* Not supported.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The nextPageToken.
*/
@java.lang.Override
public java.lang.String getNextPageToken() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
nextPageToken_ = s;
return s;
}
}
/**
*
*
* <pre>
* Not supported.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The bytes for nextPageToken.
*/
@java.lang.Override
public com.google.protobuf.ByteString getNextPageTokenBytes() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
nextPageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
for (int i = 0; i < engines_.size(); i++) {
output.writeMessage(1, engines_.get(i));
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 2, nextPageToken_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
for (int i = 0; i < engines_.size(); i++) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, engines_.get(i));
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, nextPageToken_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.discoveryengine.v1.ListEnginesResponse)) {
return super.equals(obj);
}
com.google.cloud.discoveryengine.v1.ListEnginesResponse other =
(com.google.cloud.discoveryengine.v1.ListEnginesResponse) obj;
if (!getEnginesList().equals(other.getEnginesList())) return false;
if (!getNextPageToken().equals(other.getNextPageToken())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (getEnginesCount() > 0) {
hash = (37 * hash) + ENGINES_FIELD_NUMBER;
hash = (53 * hash) + getEnginesList().hashCode();
}
hash = (37 * hash) + NEXT_PAGE_TOKEN_FIELD_NUMBER;
hash = (53 * hash) + getNextPageToken().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.discoveryengine.v1.ListEnginesResponse parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.discoveryengine.v1.ListEnginesResponse parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.discoveryengine.v1.ListEnginesResponse parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.discoveryengine.v1.ListEnginesResponse parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.discoveryengine.v1.ListEnginesResponse parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.discoveryengine.v1.ListEnginesResponse parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.discoveryengine.v1.ListEnginesResponse parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.discoveryengine.v1.ListEnginesResponse parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.discoveryengine.v1.ListEnginesResponse parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.discoveryengine.v1.ListEnginesResponse parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.discoveryengine.v1.ListEnginesResponse parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.discoveryengine.v1.ListEnginesResponse parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.discoveryengine.v1.ListEnginesResponse prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Response message for
* [EngineService.ListEngines][google.cloud.discoveryengine.v1.EngineService.ListEngines]
* method.
* </pre>
*
* Protobuf type {@code google.cloud.discoveryengine.v1.ListEnginesResponse}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.discoveryengine.v1.ListEnginesResponse)
com.google.cloud.discoveryengine.v1.ListEnginesResponseOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.discoveryengine.v1.EngineServiceProto
.internal_static_google_cloud_discoveryengine_v1_ListEnginesResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.discoveryengine.v1.EngineServiceProto
.internal_static_google_cloud_discoveryengine_v1_ListEnginesResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.discoveryengine.v1.ListEnginesResponse.class,
com.google.cloud.discoveryengine.v1.ListEnginesResponse.Builder.class);
}
// Construct using com.google.cloud.discoveryengine.v1.ListEnginesResponse.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
if (enginesBuilder_ == null) {
engines_ = java.util.Collections.emptyList();
} else {
engines_ = null;
enginesBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000001);
nextPageToken_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.discoveryengine.v1.EngineServiceProto
.internal_static_google_cloud_discoveryengine_v1_ListEnginesResponse_descriptor;
}
@java.lang.Override
public com.google.cloud.discoveryengine.v1.ListEnginesResponse getDefaultInstanceForType() {
return com.google.cloud.discoveryengine.v1.ListEnginesResponse.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.discoveryengine.v1.ListEnginesResponse build() {
com.google.cloud.discoveryengine.v1.ListEnginesResponse result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.discoveryengine.v1.ListEnginesResponse buildPartial() {
com.google.cloud.discoveryengine.v1.ListEnginesResponse result =
new com.google.cloud.discoveryengine.v1.ListEnginesResponse(this);
buildPartialRepeatedFields(result);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartialRepeatedFields(
com.google.cloud.discoveryengine.v1.ListEnginesResponse result) {
if (enginesBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0)) {
engines_ = java.util.Collections.unmodifiableList(engines_);
bitField0_ = (bitField0_ & ~0x00000001);
}
result.engines_ = engines_;
} else {
result.engines_ = enginesBuilder_.build();
}
}
private void buildPartial0(com.google.cloud.discoveryengine.v1.ListEnginesResponse result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000002) != 0)) {
result.nextPageToken_ = nextPageToken_;
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.discoveryengine.v1.ListEnginesResponse) {
return mergeFrom((com.google.cloud.discoveryengine.v1.ListEnginesResponse) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.discoveryengine.v1.ListEnginesResponse other) {
if (other == com.google.cloud.discoveryengine.v1.ListEnginesResponse.getDefaultInstance())
return this;
if (enginesBuilder_ == null) {
if (!other.engines_.isEmpty()) {
if (engines_.isEmpty()) {
engines_ = other.engines_;
bitField0_ = (bitField0_ & ~0x00000001);
} else {
ensureEnginesIsMutable();
engines_.addAll(other.engines_);
}
onChanged();
}
} else {
if (!other.engines_.isEmpty()) {
if (enginesBuilder_.isEmpty()) {
enginesBuilder_.dispose();
enginesBuilder_ = null;
engines_ = other.engines_;
bitField0_ = (bitField0_ & ~0x00000001);
enginesBuilder_ =
com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders
? getEnginesFieldBuilder()
: null;
} else {
enginesBuilder_.addAllMessages(other.engines_);
}
}
}
if (!other.getNextPageToken().isEmpty()) {
nextPageToken_ = other.nextPageToken_;
bitField0_ |= 0x00000002;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
com.google.cloud.discoveryengine.v1.Engine m =
input.readMessage(
com.google.cloud.discoveryengine.v1.Engine.parser(), extensionRegistry);
if (enginesBuilder_ == null) {
ensureEnginesIsMutable();
engines_.add(m);
} else {
enginesBuilder_.addMessage(m);
}
break;
} // case 10
case 18:
{
nextPageToken_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000002;
break;
} // case 18
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.util.List<com.google.cloud.discoveryengine.v1.Engine> engines_ =
java.util.Collections.emptyList();
private void ensureEnginesIsMutable() {
if (!((bitField0_ & 0x00000001) != 0)) {
engines_ = new java.util.ArrayList<com.google.cloud.discoveryengine.v1.Engine>(engines_);
bitField0_ |= 0x00000001;
}
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.discoveryengine.v1.Engine,
com.google.cloud.discoveryengine.v1.Engine.Builder,
com.google.cloud.discoveryengine.v1.EngineOrBuilder>
enginesBuilder_;
/**
*
*
* <pre>
* All the customer's [Engine][google.cloud.discoveryengine.v1.Engine]s.
* </pre>
*
* <code>repeated .google.cloud.discoveryengine.v1.Engine engines = 1;</code>
*/
public java.util.List<com.google.cloud.discoveryengine.v1.Engine> getEnginesList() {
if (enginesBuilder_ == null) {
return java.util.Collections.unmodifiableList(engines_);
} else {
return enginesBuilder_.getMessageList();
}
}
/**
*
*
* <pre>
* All the customer's [Engine][google.cloud.discoveryengine.v1.Engine]s.
* </pre>
*
* <code>repeated .google.cloud.discoveryengine.v1.Engine engines = 1;</code>
*/
public int getEnginesCount() {
if (enginesBuilder_ == null) {
return engines_.size();
} else {
return enginesBuilder_.getCount();
}
}
/**
*
*
* <pre>
* All the customer's [Engine][google.cloud.discoveryengine.v1.Engine]s.
* </pre>
*
* <code>repeated .google.cloud.discoveryengine.v1.Engine engines = 1;</code>
*/
public com.google.cloud.discoveryengine.v1.Engine getEngines(int index) {
if (enginesBuilder_ == null) {
return engines_.get(index);
} else {
return enginesBuilder_.getMessage(index);
}
}
/**
*
*
* <pre>
* All the customer's [Engine][google.cloud.discoveryengine.v1.Engine]s.
* </pre>
*
* <code>repeated .google.cloud.discoveryengine.v1.Engine engines = 1;</code>
*/
public Builder setEngines(int index, com.google.cloud.discoveryengine.v1.Engine value) {
if (enginesBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureEnginesIsMutable();
engines_.set(index, value);
onChanged();
} else {
enginesBuilder_.setMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* All the customer's [Engine][google.cloud.discoveryengine.v1.Engine]s.
* </pre>
*
* <code>repeated .google.cloud.discoveryengine.v1.Engine engines = 1;</code>
*/
public Builder setEngines(
int index, com.google.cloud.discoveryengine.v1.Engine.Builder builderForValue) {
if (enginesBuilder_ == null) {
ensureEnginesIsMutable();
engines_.set(index, builderForValue.build());
onChanged();
} else {
enginesBuilder_.setMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* All the customer's [Engine][google.cloud.discoveryengine.v1.Engine]s.
* </pre>
*
* <code>repeated .google.cloud.discoveryengine.v1.Engine engines = 1;</code>
*/
public Builder addEngines(com.google.cloud.discoveryengine.v1.Engine value) {
if (enginesBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureEnginesIsMutable();
engines_.add(value);
onChanged();
} else {
enginesBuilder_.addMessage(value);
}
return this;
}
/**
*
*
* <pre>
* All the customer's [Engine][google.cloud.discoveryengine.v1.Engine]s.
* </pre>
*
* <code>repeated .google.cloud.discoveryengine.v1.Engine engines = 1;</code>
*/
public Builder addEngines(int index, com.google.cloud.discoveryengine.v1.Engine value) {
if (enginesBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureEnginesIsMutable();
engines_.add(index, value);
onChanged();
} else {
enginesBuilder_.addMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* All the customer's [Engine][google.cloud.discoveryengine.v1.Engine]s.
* </pre>
*
* <code>repeated .google.cloud.discoveryengine.v1.Engine engines = 1;</code>
*/
public Builder addEngines(com.google.cloud.discoveryengine.v1.Engine.Builder builderForValue) {
if (enginesBuilder_ == null) {
ensureEnginesIsMutable();
engines_.add(builderForValue.build());
onChanged();
} else {
enginesBuilder_.addMessage(builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* All the customer's [Engine][google.cloud.discoveryengine.v1.Engine]s.
* </pre>
*
* <code>repeated .google.cloud.discoveryengine.v1.Engine engines = 1;</code>
*/
public Builder addEngines(
int index, com.google.cloud.discoveryengine.v1.Engine.Builder builderForValue) {
if (enginesBuilder_ == null) {
ensureEnginesIsMutable();
engines_.add(index, builderForValue.build());
onChanged();
} else {
enginesBuilder_.addMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* All the customer's [Engine][google.cloud.discoveryengine.v1.Engine]s.
* </pre>
*
* <code>repeated .google.cloud.discoveryengine.v1.Engine engines = 1;</code>
*/
public Builder addAllEngines(
java.lang.Iterable<? extends com.google.cloud.discoveryengine.v1.Engine> values) {
if (enginesBuilder_ == null) {
ensureEnginesIsMutable();
com.google.protobuf.AbstractMessageLite.Builder.addAll(values, engines_);
onChanged();
} else {
enginesBuilder_.addAllMessages(values);
}
return this;
}
/**
*
*
* <pre>
* All the customer's [Engine][google.cloud.discoveryengine.v1.Engine]s.
* </pre>
*
* <code>repeated .google.cloud.discoveryengine.v1.Engine engines = 1;</code>
*/
public Builder clearEngines() {
if (enginesBuilder_ == null) {
engines_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
} else {
enginesBuilder_.clear();
}
return this;
}
/**
*
*
* <pre>
* All the customer's [Engine][google.cloud.discoveryengine.v1.Engine]s.
* </pre>
*
* <code>repeated .google.cloud.discoveryengine.v1.Engine engines = 1;</code>
*/
public Builder removeEngines(int index) {
if (enginesBuilder_ == null) {
ensureEnginesIsMutable();
engines_.remove(index);
onChanged();
} else {
enginesBuilder_.remove(index);
}
return this;
}
/**
*
*
* <pre>
* All the customer's [Engine][google.cloud.discoveryengine.v1.Engine]s.
* </pre>
*
* <code>repeated .google.cloud.discoveryengine.v1.Engine engines = 1;</code>
*/
public com.google.cloud.discoveryengine.v1.Engine.Builder getEnginesBuilder(int index) {
return getEnginesFieldBuilder().getBuilder(index);
}
/**
*
*
* <pre>
* All the customer's [Engine][google.cloud.discoveryengine.v1.Engine]s.
* </pre>
*
* <code>repeated .google.cloud.discoveryengine.v1.Engine engines = 1;</code>
*/
public com.google.cloud.discoveryengine.v1.EngineOrBuilder getEnginesOrBuilder(int index) {
if (enginesBuilder_ == null) {
return engines_.get(index);
} else {
return enginesBuilder_.getMessageOrBuilder(index);
}
}
/**
*
*
* <pre>
* All the customer's [Engine][google.cloud.discoveryengine.v1.Engine]s.
* </pre>
*
* <code>repeated .google.cloud.discoveryengine.v1.Engine engines = 1;</code>
*/
public java.util.List<? extends com.google.cloud.discoveryengine.v1.EngineOrBuilder>
getEnginesOrBuilderList() {
if (enginesBuilder_ != null) {
return enginesBuilder_.getMessageOrBuilderList();
} else {
return java.util.Collections.unmodifiableList(engines_);
}
}
/**
*
*
* <pre>
* All the customer's [Engine][google.cloud.discoveryengine.v1.Engine]s.
* </pre>
*
* <code>repeated .google.cloud.discoveryengine.v1.Engine engines = 1;</code>
*/
public com.google.cloud.discoveryengine.v1.Engine.Builder addEnginesBuilder() {
return getEnginesFieldBuilder()
.addBuilder(com.google.cloud.discoveryengine.v1.Engine.getDefaultInstance());
}
/**
*
*
* <pre>
* All the customer's [Engine][google.cloud.discoveryengine.v1.Engine]s.
* </pre>
*
* <code>repeated .google.cloud.discoveryengine.v1.Engine engines = 1;</code>
*/
public com.google.cloud.discoveryengine.v1.Engine.Builder addEnginesBuilder(int index) {
return getEnginesFieldBuilder()
.addBuilder(index, com.google.cloud.discoveryengine.v1.Engine.getDefaultInstance());
}
/**
*
*
* <pre>
* All the customer's [Engine][google.cloud.discoveryengine.v1.Engine]s.
* </pre>
*
* <code>repeated .google.cloud.discoveryengine.v1.Engine engines = 1;</code>
*/
public java.util.List<com.google.cloud.discoveryengine.v1.Engine.Builder>
getEnginesBuilderList() {
return getEnginesFieldBuilder().getBuilderList();
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.discoveryengine.v1.Engine,
com.google.cloud.discoveryengine.v1.Engine.Builder,
com.google.cloud.discoveryengine.v1.EngineOrBuilder>
getEnginesFieldBuilder() {
if (enginesBuilder_ == null) {
enginesBuilder_ =
new com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.discoveryengine.v1.Engine,
com.google.cloud.discoveryengine.v1.Engine.Builder,
com.google.cloud.discoveryengine.v1.EngineOrBuilder>(
engines_, ((bitField0_ & 0x00000001) != 0), getParentForChildren(), isClean());
engines_ = null;
}
return enginesBuilder_;
}
private java.lang.Object nextPageToken_ = "";
/**
*
*
* <pre>
* Not supported.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The nextPageToken.
*/
public java.lang.String getNextPageToken() {
java.lang.Object ref = nextPageToken_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
nextPageToken_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Not supported.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The bytes for nextPageToken.
*/
public com.google.protobuf.ByteString getNextPageTokenBytes() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
nextPageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Not supported.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @param value The nextPageToken to set.
* @return This builder for chaining.
*/
public Builder setNextPageToken(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
nextPageToken_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Not supported.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return This builder for chaining.
*/
public Builder clearNextPageToken() {
nextPageToken_ = getDefaultInstance().getNextPageToken();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
return this;
}
/**
*
*
* <pre>
* Not supported.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @param value The bytes for nextPageToken to set.
* @return This builder for chaining.
*/
public Builder setNextPageTokenBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
nextPageToken_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.discoveryengine.v1.ListEnginesResponse)
}
// @@protoc_insertion_point(class_scope:google.cloud.discoveryengine.v1.ListEnginesResponse)
private static final com.google.cloud.discoveryengine.v1.ListEnginesResponse DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.discoveryengine.v1.ListEnginesResponse();
}
public static com.google.cloud.discoveryengine.v1.ListEnginesResponse getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<ListEnginesResponse> PARSER =
new com.google.protobuf.AbstractParser<ListEnginesResponse>() {
@java.lang.Override
public ListEnginesResponse parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<ListEnginesResponse> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<ListEnginesResponse> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.discoveryengine.v1.ListEnginesResponse getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
apache/streampark | 36,567 | streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/service/application/impl/FlinkApplicationBuildPipelineServiceImpl.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.streampark.console.core.service.application.impl;
import org.apache.streampark.common.conf.Workspace;
import org.apache.streampark.common.constants.Constants;
import org.apache.streampark.common.enums.FlinkDeployMode;
import org.apache.streampark.common.enums.FlinkJobType;
import org.apache.streampark.common.fs.FsOperator;
import org.apache.streampark.common.util.AssertUtils;
import org.apache.streampark.common.util.ExceptionUtils;
import org.apache.streampark.common.util.FileUtils;
import org.apache.streampark.console.base.exception.ApiAlertException;
import org.apache.streampark.console.base.util.JacksonUtils;
import org.apache.streampark.console.base.util.WebUtils;
import org.apache.streampark.console.core.bean.Dependency;
import org.apache.streampark.console.core.bean.DockerConfig;
import org.apache.streampark.console.core.entity.ApplicationBuildPipeline;
import org.apache.streampark.console.core.entity.ApplicationLog;
import org.apache.streampark.console.core.entity.FlinkApplication;
import org.apache.streampark.console.core.entity.FlinkApplicationConfig;
import org.apache.streampark.console.core.entity.FlinkEnv;
import org.apache.streampark.console.core.entity.FlinkSql;
import org.apache.streampark.console.core.entity.Message;
import org.apache.streampark.console.core.entity.Resource;
import org.apache.streampark.console.core.enums.CandidateTypeEnum;
import org.apache.streampark.console.core.enums.NoticeTypeEnum;
import org.apache.streampark.console.core.enums.OptionStateEnum;
import org.apache.streampark.console.core.enums.ReleaseStateEnum;
import org.apache.streampark.console.core.enums.ResourceTypeEnum;
import org.apache.streampark.console.core.mapper.ApplicationBuildPipelineMapper;
import org.apache.streampark.console.core.service.FlinkEnvService;
import org.apache.streampark.console.core.service.FlinkSqlService;
import org.apache.streampark.console.core.service.MessageService;
import org.apache.streampark.console.core.service.ResourceService;
import org.apache.streampark.console.core.service.SettingService;
import org.apache.streampark.console.core.service.application.ApplicationLogService;
import org.apache.streampark.console.core.service.application.FlinkApplicationActionService;
import org.apache.streampark.console.core.service.application.FlinkApplicationBackupService;
import org.apache.streampark.console.core.service.application.FlinkApplicationBuildPipelineService;
import org.apache.streampark.console.core.service.application.FlinkApplicationConfigService;
import org.apache.streampark.console.core.service.application.FlinkApplicationInfoService;
import org.apache.streampark.console.core.service.application.FlinkApplicationManageService;
import org.apache.streampark.console.core.util.ServiceHelper;
import org.apache.streampark.console.core.watcher.FlinkAppHttpWatcher;
import org.apache.streampark.flink.packer.docker.DockerConf;
import org.apache.streampark.flink.packer.maven.Artifact;
import org.apache.streampark.flink.packer.maven.DependencyInfo;
import org.apache.streampark.flink.packer.pipeline.BuildPipeline;
import org.apache.streampark.flink.packer.pipeline.BuildResult;
import org.apache.streampark.flink.packer.pipeline.DockerBuildSnapshot;
import org.apache.streampark.flink.packer.pipeline.DockerProgressWatcher;
import org.apache.streampark.flink.packer.pipeline.DockerPullSnapshot;
import org.apache.streampark.flink.packer.pipeline.DockerPushSnapshot;
import org.apache.streampark.flink.packer.pipeline.DockerResolvedSnapshot;
import org.apache.streampark.flink.packer.pipeline.FlinkK8sApplicationBuildRequest;
import org.apache.streampark.flink.packer.pipeline.FlinkK8sSessionBuildRequest;
import org.apache.streampark.flink.packer.pipeline.FlinkRemotePerJobBuildRequest;
import org.apache.streampark.flink.packer.pipeline.FlinkYarnApplicationBuildRequest;
import org.apache.streampark.flink.packer.pipeline.PipeWatcher;
import org.apache.streampark.flink.packer.pipeline.PipelineSnapshot;
import org.apache.streampark.flink.packer.pipeline.PipelineStatusEnum;
import org.apache.streampark.flink.packer.pipeline.PipelineTypeEnum;
import org.apache.streampark.flink.packer.pipeline.impl.FlinkK8sApplicationBuildPipeline;
import org.apache.streampark.flink.packer.pipeline.impl.FlinkK8sSessionBuildPipeline;
import org.apache.streampark.flink.packer.pipeline.impl.FlinkRemoteBuildPipeline;
import org.apache.streampark.flink.packer.pipeline.impl.FlinkYarnApplicationBuildPipeline;
import org.apache.commons.collections.CollectionUtils;
import org.apache.commons.lang3.StringUtils;
import com.baomidou.mybatisplus.extension.service.impl.ServiceImpl;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.github.benmanes.caffeine.cache.Cache;
import com.github.benmanes.caffeine.cache.Caffeine;
import lombok.extern.slf4j.Slf4j;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Propagation;
import org.springframework.transaction.annotation.Transactional;
import javax.annotation.Nonnull;
import java.io.File;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Date;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.TimeUnit;
import java.util.stream.Collectors;
import static org.apache.streampark.common.enums.ApplicationType.APACHE_FLINK;
import static org.apache.streampark.console.core.enums.OperationEnum.RELEASE;
@Service
@Slf4j
@Transactional(propagation = Propagation.SUPPORTS, rollbackFor = Exception.class)
public class FlinkApplicationBuildPipelineServiceImpl
extends
ServiceImpl<ApplicationBuildPipelineMapper, ApplicationBuildPipeline>
implements
FlinkApplicationBuildPipelineService {
@Autowired
private FlinkEnvService flinkEnvService;
@Autowired
private FlinkSqlService flinkSqlService;
@Autowired
private FlinkApplicationBackupService backUpService;
@Autowired
private SettingService settingService;
@Autowired
private MessageService messageService;
@Autowired
private FlinkApplicationManageService applicationManageService;
@Autowired
private FlinkApplicationInfoService applicationInfoService;
@Autowired
private ApplicationLogService applicationLogService;
@Autowired
private FlinkAppHttpWatcher flinkAppHttpWatcher;
@Autowired
private FlinkApplicationConfigService applicationConfigService;
@Autowired
private ResourceService resourceService;
@Qualifier("streamparkBuildPipelineExecutor")
@Autowired
private ExecutorService executorService;
private static final Cache<Long, DockerPullSnapshot> DOCKER_PULL_PG_SNAPSHOTS = Caffeine.newBuilder()
.expireAfterWrite(30, TimeUnit.DAYS).build();
private static final Cache<Long, DockerBuildSnapshot> DOCKER_BUILD_PG_SNAPSHOTS = Caffeine.newBuilder()
.expireAfterWrite(30, TimeUnit.DAYS).build();
private static final Cache<Long, DockerPushSnapshot> DOCKER_PUSH_PG_SNAPSHOTS = Caffeine.newBuilder()
.expireAfterWrite(30, TimeUnit.DAYS).build();
/**
* Build application. This is an async call method.
*
* @param appId application id
* @param forceBuild forced start pipeline or not
* @return Whether the pipeline was successfully started
*/
@Override
public boolean buildApplication(@Nonnull Long appId, boolean forceBuild) {
// check the build environment
checkBuildEnv(appId, forceBuild);
FlinkApplication app = applicationManageService.getById(appId);
ApplicationLog applicationLog = getApplicationLog(app);
// check if you need to go through the build process (if the jar and pom have changed,
// you need to go through the build process, if other common parameters are modified,
// you don't need to go through the build process)
boolean needBuild = applicationManageService.checkBuildAndUpdate(app);
if (!needBuild) {
applicationLog.setSuccess(true);
applicationLogService.save(applicationLog);
return true;
}
// rollback
if (app.isNeedRollback() && app.isFlinkSql()) {
flinkSqlService.rollback(app);
}
// 1) flink sql setDependency
FlinkSql newFlinkSql = flinkSqlService.getCandidate(app.getId(), CandidateTypeEnum.NEW);
FlinkSql effectiveFlinkSql = flinkSqlService.getEffective(app.getId(), false);
FlinkJobType jobType = app.getJobTypeEnum();
if (jobType == FlinkJobType.FLINK_SQL || jobType == FlinkJobType.PYFLINK) {
FlinkSql flinkSql = newFlinkSql == null ? effectiveFlinkSql : newFlinkSql;
AssertUtils.notNull(flinkSql);
app.setDependency(flinkSql.getDependency());
app.setTeamResource(flinkSql.getTeamResource());
}
// create pipeline instance
BuildPipeline pipeline = createPipelineInstance(app);
// clear history
removeByAppId(app.getId());
// register pipeline progress event watcher.
// save snapshot of pipeline to db when status of pipeline was changed.
pipeline.registerWatcher(
new PipeWatcher() {
@Override
public void onStart(PipelineSnapshot snapshot) {
ApplicationBuildPipeline buildPipeline = ApplicationBuildPipeline.fromPipeSnapshot(snapshot)
.setAppId(app.getId());
saveEntity(buildPipeline);
app.setRelease(ReleaseStateEnum.RELEASING.get());
applicationManageService.updateRelease(app);
if (flinkAppHttpWatcher.isWatchingApp(app.getId())) {
flinkAppHttpWatcher.init();
}
// 1) checkEnv
applicationInfoService.checkEnv(app);
// 2) some preparatory work
String appUploads = app.getWorkspace().APP_UPLOADS();
if (app.isFlinkJarOrPyFlink()) {
// flinkJar upload jar to appHome...
String appHome = app.getAppHome();
FsOperator fsOperator = app.getFsOperator();
fsOperator.delete(appHome);
if (app.isUploadResource()) {
String uploadJar = appUploads.concat("/").concat(app.getJar());
File localJar = new File(
String.format(
"%s/%d/%s",
Workspace.local().APP_UPLOADS(),
app.getTeamId(),
app.getJar()));
if (!localJar.exists()) {
Resource resource = resourceService.findByResourceName(app.getTeamId(),
app.getJar());
if (resource != null && StringUtils.isNotBlank(resource.getFilePath())) {
localJar = new File(resource.getFilePath());
uploadJar = appUploads.concat("/").concat(localJar.getName());
} else {
localJar =
new File(WebUtils.getAppTempDir(), app.getJar());
uploadJar = appUploads.concat("/").concat(localJar.getName());
}
}
// upload jar copy to appHome
checkOrElseUploadJar(app.getFsOperator(), localJar, uploadJar, appUploads);
switch (app.getApplicationType()) {
case STREAMPARK_FLINK:
fsOperator.mkdirs(app.getAppLib());
fsOperator.copy(uploadJar, app.getAppLib(), false, true);
break;
case APACHE_FLINK:
fsOperator.mkdirs(appHome);
fsOperator.copy(uploadJar, appHome, false, true);
break;
default:
throw new IllegalArgumentException(
"[StreamPark] unsupported ApplicationType of FlinkJar: "
+ app.getApplicationType());
}
} else {
fsOperator.upload(app.getDistHome(), appHome);
}
} else {
if (!app.getDependencyObject().getJar().isEmpty()) {
String localUploads = Workspace.local().APP_UPLOADS();
// copy jar to local upload dir
for (String jar : app.getDependencyObject().getJar()) {
File localJar = new File(WebUtils.getAppTempDir(), jar);
File uploadJar = new File(localUploads, jar);
if (!localJar.exists() && !uploadJar.exists()) {
throw new ApiAlertException(
"Missing file: " + jar + ", please upload again");
}
if (localJar.exists()) {
checkOrElseUploadJar(
FsOperator.lfs(), localJar, uploadJar.getAbsolutePath(),
localUploads);
}
}
}
}
}
@Override
public void onStepStateChange(PipelineSnapshot snapshot) {
ApplicationBuildPipeline buildPipeline = ApplicationBuildPipeline.fromPipeSnapshot(snapshot)
.setAppId(app.getId());
saveEntity(buildPipeline);
}
@Override
public void onFinish(PipelineSnapshot snapshot, BuildResult result) {
ApplicationBuildPipeline buildPipeline = ApplicationBuildPipeline.fromPipeSnapshot(snapshot)
.setAppId(app.getId())
.setBuildResult(result);
saveEntity(buildPipeline);
if (result.pass()) {
// running job ...
if (app.isRunning()) {
app.setRelease(ReleaseStateEnum.NEED_RESTART.get());
} else {
app.setOptionState(OptionStateEnum.NONE.getValue());
app.setRelease(ReleaseStateEnum.DONE.get());
// If the current task is not running, or the task has just been added, directly
// set
// the candidate version to the official version
if (app.isFlinkSql()) {
applicationManageService.toEffective(app);
} else {
if (app.isStreamParkType()) {
FlinkApplicationConfig config =
applicationConfigService.getLatest(app.getId());
if (config != null) {
config.setToApplication(app);
applicationConfigService.toEffective(app.getId(),
app.getConfigId());
}
}
}
}
// backup.
if (!app.isNeedRollback()) {
if (app.isFlinkSql() && newFlinkSql != null) {
backUpService.backup(app, newFlinkSql);
} else {
backUpService.backup(app, null);
}
}
applicationLog.setSuccess(true);
app.setBuild(false);
} else {
Message message = new Message(
ServiceHelper.getUserId(),
app.getId(),
app.getJobName().concat(" release failed"),
ExceptionUtils.stringifyException(snapshot.error().exception()),
NoticeTypeEnum.EXCEPTION);
messageService.push(message);
app.setRelease(ReleaseStateEnum.FAILED.get());
app.setOptionState(OptionStateEnum.NONE.getValue());
app.setBuild(true);
applicationLog.setException(
ExceptionUtils.stringifyException(snapshot.error().exception()));
applicationLog.setSuccess(false);
}
applicationManageService.updateRelease(app);
applicationLogService.save(applicationLog);
if (flinkAppHttpWatcher.isWatchingApp(app.getId())) {
flinkAppHttpWatcher.init();
}
}
});
// save docker resolve progress detail to cache, only for flink-k8s application mode.
if (PipelineTypeEnum.FLINK_NATIVE_K8S_APPLICATION == pipeline.pipeType()) {
registerDockerProgressWatcher(pipeline, app);
}
// save pipeline instance snapshot to db before release it.
ApplicationBuildPipeline buildPipeline =
ApplicationBuildPipeline.initFromPipeline(pipeline).setAppId(app.getId());
boolean saved = saveEntity(buildPipeline);
DOCKER_PULL_PG_SNAPSHOTS.invalidate(app.getId());
DOCKER_BUILD_PG_SNAPSHOTS.invalidate(app.getId());
DOCKER_PUSH_PG_SNAPSHOTS.invalidate(app.getId());
// async release pipeline
executorService.submit((Runnable) pipeline::launch);
return saved;
}
private void registerDockerProgressWatcher(BuildPipeline pipeline, FlinkApplication app) {
pipeline
.as(FlinkK8sApplicationBuildPipeline.class)
.registerDockerProgressWatcher(
new DockerProgressWatcher() {
@Override
public void onDockerPullProgressChange(DockerPullSnapshot snapshot) {
DOCKER_PULL_PG_SNAPSHOTS.put(app.getId(), snapshot);
}
@Override
public void onDockerBuildProgressChange(DockerBuildSnapshot snapshot) {
DOCKER_BUILD_PG_SNAPSHOTS.put(app.getId(), snapshot);
}
@Override
public void onDockerPushProgressChange(DockerPushSnapshot snapshot) {
DOCKER_PUSH_PG_SNAPSHOTS.put(app.getId(), snapshot);
}
});
}
@Nonnull
private ApplicationLog getApplicationLog(FlinkApplication app) {
ApplicationLog applicationLog = new ApplicationLog();
applicationLog.setOptionName(RELEASE.getValue());
applicationLog.setAppId(app.getId());
applicationLog.setCreateTime(new Date());
applicationLog.setUserId(ServiceHelper.getUserId());
return applicationLog;
}
/**
* check the build environment
*
* @param appId application id
* @param forceBuild forced start pipeline or not
*/
private void checkBuildEnv(Long appId, boolean forceBuild) {
FlinkApplication app = applicationManageService.getById(appId);
// 1) check flink version
String checkEnvErrorMessage = "Check flink env failed, please check the flink version of this job";
FlinkEnv env = flinkEnvService.getByIdOrDefault(app.getVersionId());
ApiAlertException.throwIfNull(env, checkEnvErrorMessage);
boolean checkVersion = env.getFlinkVersion().checkVersion(false);
ApiAlertException.throwIfFalse(
checkVersion, "Unsupported flink version:" + env.getFlinkVersion().version());
// 2) check env
boolean envOk = applicationInfoService.checkEnv(app);
ApiAlertException.throwIfFalse(envOk, checkEnvErrorMessage);
// 3) Whether the application can currently start a new building progress
ApiAlertException.throwIfTrue(
!forceBuild && !allowToBuildNow(appId),
"The job is invalid, or the job cannot be built while it is running");
}
/**
* create building pipeline instance
*/
private BuildPipeline createPipelineInstance(@Nonnull FlinkApplication app) {
FlinkEnv flinkEnv = flinkEnvService.getByIdOrDefault(app.getVersionId());
String flinkUserJar = retrieveFlinkUserJar(flinkEnv, app);
if (!FileUtils.exists(flinkUserJar)) {
Resource resource = resourceService.findByResourceName(app.getTeamId(), app.getJar());
if (resource != null && StringUtils.isNotBlank(resource.getFilePath())) {
flinkUserJar = resource.getFilePath();
}
}
FlinkDeployMode deployModeEnum = app.getDeployModeEnum();
String mainClass = Constants.STREAMPARK_FLINKSQL_CLIENT_CLASS;
switch (deployModeEnum) {
case YARN_APPLICATION:
String yarnProvidedPath = app.getAppLib();
String localWorkspace = app.getLocalAppHome().concat("/lib");
if (FlinkJobType.FLINK_JAR == app.getJobTypeEnum()
&& APACHE_FLINK == app.getApplicationType()) {
yarnProvidedPath = app.getAppHome();
localWorkspace = app.getLocalAppHome();
}
FlinkYarnApplicationBuildRequest yarnAppRequest = buildFlinkYarnApplicationBuildRequest(app, mainClass,
localWorkspace, yarnProvidedPath);
log.info("Submit params to building pipeline : {}", yarnAppRequest);
return FlinkYarnApplicationBuildPipeline.of(yarnAppRequest);
case YARN_PER_JOB:
case YARN_SESSION:
case REMOTE:
FlinkRemotePerJobBuildRequest buildRequest = buildFlinkRemotePerJobBuildRequest(app, mainClass,
flinkUserJar, flinkEnv);
log.info("Submit params to building pipeline : {}", buildRequest);
return FlinkRemoteBuildPipeline.of(buildRequest);
case KUBERNETES_NATIVE_SESSION:
FlinkK8sSessionBuildRequest k8sSessionBuildRequest = buildFlinkK8sSessionBuildRequest(app, mainClass,
flinkUserJar, flinkEnv);
log.info("Submit params to building pipeline : {}", k8sSessionBuildRequest);
return FlinkK8sSessionBuildPipeline.of(k8sSessionBuildRequest);
case KUBERNETES_NATIVE_APPLICATION:
DockerConfig dockerConfig = settingService.getDockerConfig();
FlinkK8sApplicationBuildRequest k8sApplicationBuildRequest = buildFlinkK8sApplicationBuildRequest(
app, mainClass, flinkUserJar, flinkEnv, dockerConfig);
log.info("Submit params to building pipeline : {}", k8sApplicationBuildRequest);
return FlinkK8sApplicationBuildPipeline.of(k8sApplicationBuildRequest);
default:
throw new UnsupportedOperationException(
"Unsupported Building Application for DeployMode: " + app.getDeployModeEnum());
}
}
@Nonnull
private FlinkYarnApplicationBuildRequest buildFlinkYarnApplicationBuildRequest(
@Nonnull FlinkApplication app,
String mainClass,
String localWorkspace,
String yarnProvidedPath) {
return new FlinkYarnApplicationBuildRequest(
app.getJobName(),
mainClass,
localWorkspace,
yarnProvidedPath,
app.getJobTypeEnum(),
getMergedDependencyInfo(app));
}
@Nonnull
private FlinkK8sApplicationBuildRequest buildFlinkK8sApplicationBuildRequest(
@Nonnull FlinkApplication app,
String mainClass,
String flinkUserJar,
FlinkEnv flinkEnv,
DockerConfig dockerConfig) {
FlinkK8sApplicationBuildRequest k8sApplicationBuildRequest = new FlinkK8sApplicationBuildRequest(
app.getJobName(),
app.getLocalAppHome(),
mainClass,
flinkUserJar,
app.getDeployModeEnum(),
app.getJobTypeEnum(),
flinkEnv.getFlinkVersion(),
getMergedDependencyInfo(app),
app.getJobName(),
app.getK8sNamespace(),
app.getFlinkImage(),
app.getK8sPodTemplates(),
app.getK8sHadoopIntegration() != null ? app.getK8sHadoopIntegration() : false,
DockerConf.of(
dockerConfig.getAddress(),
dockerConfig.getNamespace(),
dockerConfig.getUsername(),
dockerConfig.getPassword()),
app.getIngressTemplate());
return k8sApplicationBuildRequest;
}
@Nonnull
private FlinkK8sSessionBuildRequest buildFlinkK8sSessionBuildRequest(
@Nonnull FlinkApplication app,
String mainClass,
String flinkUserJar, FlinkEnv flinkEnv) {
FlinkK8sSessionBuildRequest k8sSessionBuildRequest = new FlinkK8sSessionBuildRequest(
app.getJobName(),
app.getLocalAppHome(),
mainClass,
flinkUserJar,
app.getDeployModeEnum(),
app.getJobTypeEnum(),
flinkEnv.getFlinkVersion(),
getMergedDependencyInfo(app),
app.getClusterId(),
app.getK8sNamespace());
return k8sSessionBuildRequest;
}
@Nonnull
private FlinkRemotePerJobBuildRequest buildFlinkRemotePerJobBuildRequest(
@Nonnull FlinkApplication app,
String mainClass,
String flinkUserJar, FlinkEnv flinkEnv) {
return new FlinkRemotePerJobBuildRequest(
app.getJobName(),
app.getLocalAppHome(),
mainClass,
flinkUserJar,
app.isFlinkJar(),
app.getDeployModeEnum(),
app.getJobTypeEnum(),
flinkEnv.getFlinkVersion(),
getMergedDependencyInfo(app));
}
/**
* copy from {@link FlinkApplicationActionService#start(FlinkApplication, boolean)}
*/
private String retrieveFlinkUserJar(FlinkEnv flinkEnv, FlinkApplication app) {
switch (app.getJobTypeEnum()) {
case FLINK_JAR:
switch (app.getApplicationType()) {
case STREAMPARK_FLINK:
return String.format(
"%s/%s", app.getAppLib(), app.getModule().concat(Constants.JAR_SUFFIX));
case APACHE_FLINK:
return String.format("%s/%s", app.getAppHome(), app.getJar());
default:
throw new IllegalArgumentException(
"[StreamPark] unsupported ApplicationType of FlinkJar: "
+ app.getApplicationType());
}
case PYFLINK:
return String.format("%s/%s", app.getAppHome(), app.getJar());
case FLINK_SQL:
String sqlDistJar = ServiceHelper.getFlinkSqlClientJar(flinkEnv);
if (app.getDeployModeEnum() == FlinkDeployMode.YARN_APPLICATION) {
String clientPath = Workspace.remote().APP_CLIENT();
return String.format("%s/%s", clientPath, sqlDistJar);
}
return Workspace.local().APP_CLIENT().concat("/").concat(sqlDistJar);
default:
throw new UnsupportedOperationException(
"[StreamPark] unsupported JobType: " + app.getJobTypeEnum());
}
}
@Override
public Optional<ApplicationBuildPipeline> getCurrentBuildPipeline(@Nonnull Long appId) {
return Optional.ofNullable(getById(appId));
}
@Override
public DockerResolvedSnapshot getDockerProgressDetailSnapshot(@Nonnull Long appId) {
return new DockerResolvedSnapshot(
DOCKER_PULL_PG_SNAPSHOTS.getIfPresent(appId),
DOCKER_BUILD_PG_SNAPSHOTS.getIfPresent(appId),
DOCKER_PUSH_PG_SNAPSHOTS.getIfPresent(appId));
}
@Override
public boolean allowToBuildNow(@Nonnull Long appId) {
return getCurrentBuildPipeline(appId)
.map(pipeline -> PipelineStatusEnum.running != pipeline.getPipelineStatus())
.orElse(true);
}
@Override
public Map<Long, PipelineStatusEnum> listAppIdPipelineStatusMap(List<Long> appIds) {
if (CollectionUtils.isEmpty(appIds)) {
return new HashMap<>();
}
List<ApplicationBuildPipeline> appBuildPipelines =
this.lambdaQuery().in(ApplicationBuildPipeline::getAppId, appIds).list();
if (CollectionUtils.isEmpty(appBuildPipelines)) {
return new HashMap<>();
}
return appBuildPipelines.stream()
.collect(Collectors.toMap(ApplicationBuildPipeline::getAppId, ApplicationBuildPipeline::getPipelineStatus));
}
@Override
public void removeByAppId(Long appId) {
this.lambdaUpdate().eq(ApplicationBuildPipeline::getAppId, appId).remove();
}
/**
* save or update build pipeline
*
* @param pipe application build pipeline
* @return value after the save or update
*/
public boolean saveEntity(ApplicationBuildPipeline pipe) {
ApplicationBuildPipeline old = getById(pipe.getAppId());
if (old == null) {
return save(pipe);
}
return updateById(pipe);
}
/**
* Check if the jar exists, and upload a copy if it does not exist
*
* @param fsOperator
* @param localJar
* @param targetJar
* @param targetDir
*/
private void checkOrElseUploadJar(
FsOperator fsOperator, File localJar, String targetJar, String targetDir) {
if (!fsOperator.exists(targetJar)) {
fsOperator.upload(localJar.getAbsolutePath(), targetDir, false, true);
} else {
// The file exists to check whether it is consistent, and if it is inconsistent, re-upload it
if (!FileUtils.equals(localJar, new File(targetJar))) {
fsOperator.upload(localJar.getAbsolutePath(), targetDir, false, true);
}
}
}
/**
* Gets and parses dependencies on the application
*
* @param application
* @return DependencyInfo
*/
private DependencyInfo getMergedDependencyInfo(FlinkApplication application) {
DependencyInfo dependencyInfo = application.getDependencyInfo();
if (StringUtils.isBlank(application.getTeamResource())) {
return dependencyInfo;
}
try {
String[] resourceIds = JacksonUtils.read(application.getTeamResource(), String[].class);
List<Artifact> mvnArtifacts = new ArrayList<Artifact>();
List<String> jarLibs = new ArrayList<String>();
Arrays.stream(resourceIds)
.forEach(
resourceId -> {
Resource resource = resourceService.getById(resourceId);
if (resource.getResourceType() != ResourceTypeEnum.GROUP) {
mergeDependency(application, mvnArtifacts, jarLibs, resource);
} else {
try {
String[] groupElements =
JacksonUtils.read(resource.getResource(),
String[].class);
Arrays.stream(groupElements)
.forEach(
resourceIdInGroup -> mergeDependency(
application,
mvnArtifacts,
jarLibs,
resourceService.getById(
resourceIdInGroup)));
} catch (JsonProcessingException e) {
throw new ApiAlertException("Parse resource group failed.", e);
}
}
});
return dependencyInfo.merge(mvnArtifacts, jarLibs);
} catch (Exception e) {
log.error("Merge team dependency failed.", e);
return dependencyInfo;
}
}
private static void mergeDependency(
FlinkApplication application,
List<Artifact> mvnArtifacts,
List<String> jarLibs,
Resource resource) {
Dependency dependency = Dependency.toDependency(resource.getResource());
dependency
.getPom()
.forEach(
pom -> mvnArtifacts.add(
new Artifact(
pom.getGroupId(),
pom.getArtifactId(),
pom.getVersion(),
pom.getClassifier())));
dependency
.getJar()
.forEach(
jar -> jarLibs.add(
String.format(
"%s/%d/%s",
Workspace.local().APP_UPLOADS(),
application.getTeamId(), jar)));
}
}
|
googleapis/google-cloud-java | 36,458 | java-dataproc/grpc-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/SessionControllerGrpc.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.cloud.dataproc.v1;
import static io.grpc.MethodDescriptor.generateFullMethodName;
/**
*
*
* <pre>
* The `SessionController` provides methods to manage interactive sessions.
* </pre>
*/
@javax.annotation.Generated(
value = "by gRPC proto compiler",
comments = "Source: google/cloud/dataproc/v1/sessions.proto")
@io.grpc.stub.annotations.GrpcGenerated
public final class SessionControllerGrpc {
private SessionControllerGrpc() {}
public static final java.lang.String SERVICE_NAME = "google.cloud.dataproc.v1.SessionController";
// Static method descriptors that strictly reflect the proto.
private static volatile io.grpc.MethodDescriptor<
com.google.cloud.dataproc.v1.CreateSessionRequest, com.google.longrunning.Operation>
getCreateSessionMethod;
@io.grpc.stub.annotations.RpcMethod(
fullMethodName = SERVICE_NAME + '/' + "CreateSession",
requestType = com.google.cloud.dataproc.v1.CreateSessionRequest.class,
responseType = com.google.longrunning.Operation.class,
methodType = io.grpc.MethodDescriptor.MethodType.UNARY)
public static io.grpc.MethodDescriptor<
com.google.cloud.dataproc.v1.CreateSessionRequest, com.google.longrunning.Operation>
getCreateSessionMethod() {
io.grpc.MethodDescriptor<
com.google.cloud.dataproc.v1.CreateSessionRequest, com.google.longrunning.Operation>
getCreateSessionMethod;
if ((getCreateSessionMethod = SessionControllerGrpc.getCreateSessionMethod) == null) {
synchronized (SessionControllerGrpc.class) {
if ((getCreateSessionMethod = SessionControllerGrpc.getCreateSessionMethod) == null) {
SessionControllerGrpc.getCreateSessionMethod =
getCreateSessionMethod =
io.grpc.MethodDescriptor
.<com.google.cloud.dataproc.v1.CreateSessionRequest,
com.google.longrunning.Operation>
newBuilder()
.setType(io.grpc.MethodDescriptor.MethodType.UNARY)
.setFullMethodName(generateFullMethodName(SERVICE_NAME, "CreateSession"))
.setSampledToLocalTracing(true)
.setRequestMarshaller(
io.grpc.protobuf.ProtoUtils.marshaller(
com.google.cloud.dataproc.v1.CreateSessionRequest
.getDefaultInstance()))
.setResponseMarshaller(
io.grpc.protobuf.ProtoUtils.marshaller(
com.google.longrunning.Operation.getDefaultInstance()))
.setSchemaDescriptor(
new SessionControllerMethodDescriptorSupplier("CreateSession"))
.build();
}
}
}
return getCreateSessionMethod;
}
private static volatile io.grpc.MethodDescriptor<
com.google.cloud.dataproc.v1.GetSessionRequest, com.google.cloud.dataproc.v1.Session>
getGetSessionMethod;
@io.grpc.stub.annotations.RpcMethod(
fullMethodName = SERVICE_NAME + '/' + "GetSession",
requestType = com.google.cloud.dataproc.v1.GetSessionRequest.class,
responseType = com.google.cloud.dataproc.v1.Session.class,
methodType = io.grpc.MethodDescriptor.MethodType.UNARY)
public static io.grpc.MethodDescriptor<
com.google.cloud.dataproc.v1.GetSessionRequest, com.google.cloud.dataproc.v1.Session>
getGetSessionMethod() {
io.grpc.MethodDescriptor<
com.google.cloud.dataproc.v1.GetSessionRequest, com.google.cloud.dataproc.v1.Session>
getGetSessionMethod;
if ((getGetSessionMethod = SessionControllerGrpc.getGetSessionMethod) == null) {
synchronized (SessionControllerGrpc.class) {
if ((getGetSessionMethod = SessionControllerGrpc.getGetSessionMethod) == null) {
SessionControllerGrpc.getGetSessionMethod =
getGetSessionMethod =
io.grpc.MethodDescriptor
.<com.google.cloud.dataproc.v1.GetSessionRequest,
com.google.cloud.dataproc.v1.Session>
newBuilder()
.setType(io.grpc.MethodDescriptor.MethodType.UNARY)
.setFullMethodName(generateFullMethodName(SERVICE_NAME, "GetSession"))
.setSampledToLocalTracing(true)
.setRequestMarshaller(
io.grpc.protobuf.ProtoUtils.marshaller(
com.google.cloud.dataproc.v1.GetSessionRequest.getDefaultInstance()))
.setResponseMarshaller(
io.grpc.protobuf.ProtoUtils.marshaller(
com.google.cloud.dataproc.v1.Session.getDefaultInstance()))
.setSchemaDescriptor(
new SessionControllerMethodDescriptorSupplier("GetSession"))
.build();
}
}
}
return getGetSessionMethod;
}
private static volatile io.grpc.MethodDescriptor<
com.google.cloud.dataproc.v1.ListSessionsRequest,
com.google.cloud.dataproc.v1.ListSessionsResponse>
getListSessionsMethod;
@io.grpc.stub.annotations.RpcMethod(
fullMethodName = SERVICE_NAME + '/' + "ListSessions",
requestType = com.google.cloud.dataproc.v1.ListSessionsRequest.class,
responseType = com.google.cloud.dataproc.v1.ListSessionsResponse.class,
methodType = io.grpc.MethodDescriptor.MethodType.UNARY)
public static io.grpc.MethodDescriptor<
com.google.cloud.dataproc.v1.ListSessionsRequest,
com.google.cloud.dataproc.v1.ListSessionsResponse>
getListSessionsMethod() {
io.grpc.MethodDescriptor<
com.google.cloud.dataproc.v1.ListSessionsRequest,
com.google.cloud.dataproc.v1.ListSessionsResponse>
getListSessionsMethod;
if ((getListSessionsMethod = SessionControllerGrpc.getListSessionsMethod) == null) {
synchronized (SessionControllerGrpc.class) {
if ((getListSessionsMethod = SessionControllerGrpc.getListSessionsMethod) == null) {
SessionControllerGrpc.getListSessionsMethod =
getListSessionsMethod =
io.grpc.MethodDescriptor
.<com.google.cloud.dataproc.v1.ListSessionsRequest,
com.google.cloud.dataproc.v1.ListSessionsResponse>
newBuilder()
.setType(io.grpc.MethodDescriptor.MethodType.UNARY)
.setFullMethodName(generateFullMethodName(SERVICE_NAME, "ListSessions"))
.setSampledToLocalTracing(true)
.setRequestMarshaller(
io.grpc.protobuf.ProtoUtils.marshaller(
com.google.cloud.dataproc.v1.ListSessionsRequest
.getDefaultInstance()))
.setResponseMarshaller(
io.grpc.protobuf.ProtoUtils.marshaller(
com.google.cloud.dataproc.v1.ListSessionsResponse
.getDefaultInstance()))
.setSchemaDescriptor(
new SessionControllerMethodDescriptorSupplier("ListSessions"))
.build();
}
}
}
return getListSessionsMethod;
}
private static volatile io.grpc.MethodDescriptor<
com.google.cloud.dataproc.v1.TerminateSessionRequest, com.google.longrunning.Operation>
getTerminateSessionMethod;
@io.grpc.stub.annotations.RpcMethod(
fullMethodName = SERVICE_NAME + '/' + "TerminateSession",
requestType = com.google.cloud.dataproc.v1.TerminateSessionRequest.class,
responseType = com.google.longrunning.Operation.class,
methodType = io.grpc.MethodDescriptor.MethodType.UNARY)
public static io.grpc.MethodDescriptor<
com.google.cloud.dataproc.v1.TerminateSessionRequest, com.google.longrunning.Operation>
getTerminateSessionMethod() {
io.grpc.MethodDescriptor<
com.google.cloud.dataproc.v1.TerminateSessionRequest, com.google.longrunning.Operation>
getTerminateSessionMethod;
if ((getTerminateSessionMethod = SessionControllerGrpc.getTerminateSessionMethod) == null) {
synchronized (SessionControllerGrpc.class) {
if ((getTerminateSessionMethod = SessionControllerGrpc.getTerminateSessionMethod) == null) {
SessionControllerGrpc.getTerminateSessionMethod =
getTerminateSessionMethod =
io.grpc.MethodDescriptor
.<com.google.cloud.dataproc.v1.TerminateSessionRequest,
com.google.longrunning.Operation>
newBuilder()
.setType(io.grpc.MethodDescriptor.MethodType.UNARY)
.setFullMethodName(generateFullMethodName(SERVICE_NAME, "TerminateSession"))
.setSampledToLocalTracing(true)
.setRequestMarshaller(
io.grpc.protobuf.ProtoUtils.marshaller(
com.google.cloud.dataproc.v1.TerminateSessionRequest
.getDefaultInstance()))
.setResponseMarshaller(
io.grpc.protobuf.ProtoUtils.marshaller(
com.google.longrunning.Operation.getDefaultInstance()))
.setSchemaDescriptor(
new SessionControllerMethodDescriptorSupplier("TerminateSession"))
.build();
}
}
}
return getTerminateSessionMethod;
}
private static volatile io.grpc.MethodDescriptor<
com.google.cloud.dataproc.v1.DeleteSessionRequest, com.google.longrunning.Operation>
getDeleteSessionMethod;
@io.grpc.stub.annotations.RpcMethod(
fullMethodName = SERVICE_NAME + '/' + "DeleteSession",
requestType = com.google.cloud.dataproc.v1.DeleteSessionRequest.class,
responseType = com.google.longrunning.Operation.class,
methodType = io.grpc.MethodDescriptor.MethodType.UNARY)
public static io.grpc.MethodDescriptor<
com.google.cloud.dataproc.v1.DeleteSessionRequest, com.google.longrunning.Operation>
getDeleteSessionMethod() {
io.grpc.MethodDescriptor<
com.google.cloud.dataproc.v1.DeleteSessionRequest, com.google.longrunning.Operation>
getDeleteSessionMethod;
if ((getDeleteSessionMethod = SessionControllerGrpc.getDeleteSessionMethod) == null) {
synchronized (SessionControllerGrpc.class) {
if ((getDeleteSessionMethod = SessionControllerGrpc.getDeleteSessionMethod) == null) {
SessionControllerGrpc.getDeleteSessionMethod =
getDeleteSessionMethod =
io.grpc.MethodDescriptor
.<com.google.cloud.dataproc.v1.DeleteSessionRequest,
com.google.longrunning.Operation>
newBuilder()
.setType(io.grpc.MethodDescriptor.MethodType.UNARY)
.setFullMethodName(generateFullMethodName(SERVICE_NAME, "DeleteSession"))
.setSampledToLocalTracing(true)
.setRequestMarshaller(
io.grpc.protobuf.ProtoUtils.marshaller(
com.google.cloud.dataproc.v1.DeleteSessionRequest
.getDefaultInstance()))
.setResponseMarshaller(
io.grpc.protobuf.ProtoUtils.marshaller(
com.google.longrunning.Operation.getDefaultInstance()))
.setSchemaDescriptor(
new SessionControllerMethodDescriptorSupplier("DeleteSession"))
.build();
}
}
}
return getDeleteSessionMethod;
}
/** Creates a new async stub that supports all call types for the service */
public static SessionControllerStub newStub(io.grpc.Channel channel) {
io.grpc.stub.AbstractStub.StubFactory<SessionControllerStub> factory =
new io.grpc.stub.AbstractStub.StubFactory<SessionControllerStub>() {
@java.lang.Override
public SessionControllerStub newStub(
io.grpc.Channel channel, io.grpc.CallOptions callOptions) {
return new SessionControllerStub(channel, callOptions);
}
};
return SessionControllerStub.newStub(factory, channel);
}
/** Creates a new blocking-style stub that supports all types of calls on the service */
public static SessionControllerBlockingV2Stub newBlockingV2Stub(io.grpc.Channel channel) {
io.grpc.stub.AbstractStub.StubFactory<SessionControllerBlockingV2Stub> factory =
new io.grpc.stub.AbstractStub.StubFactory<SessionControllerBlockingV2Stub>() {
@java.lang.Override
public SessionControllerBlockingV2Stub newStub(
io.grpc.Channel channel, io.grpc.CallOptions callOptions) {
return new SessionControllerBlockingV2Stub(channel, callOptions);
}
};
return SessionControllerBlockingV2Stub.newStub(factory, channel);
}
/**
* Creates a new blocking-style stub that supports unary and streaming output calls on the service
*/
public static SessionControllerBlockingStub newBlockingStub(io.grpc.Channel channel) {
io.grpc.stub.AbstractStub.StubFactory<SessionControllerBlockingStub> factory =
new io.grpc.stub.AbstractStub.StubFactory<SessionControllerBlockingStub>() {
@java.lang.Override
public SessionControllerBlockingStub newStub(
io.grpc.Channel channel, io.grpc.CallOptions callOptions) {
return new SessionControllerBlockingStub(channel, callOptions);
}
};
return SessionControllerBlockingStub.newStub(factory, channel);
}
/** Creates a new ListenableFuture-style stub that supports unary calls on the service */
public static SessionControllerFutureStub newFutureStub(io.grpc.Channel channel) {
io.grpc.stub.AbstractStub.StubFactory<SessionControllerFutureStub> factory =
new io.grpc.stub.AbstractStub.StubFactory<SessionControllerFutureStub>() {
@java.lang.Override
public SessionControllerFutureStub newStub(
io.grpc.Channel channel, io.grpc.CallOptions callOptions) {
return new SessionControllerFutureStub(channel, callOptions);
}
};
return SessionControllerFutureStub.newStub(factory, channel);
}
/**
*
*
* <pre>
* The `SessionController` provides methods to manage interactive sessions.
* </pre>
*/
public interface AsyncService {
/**
*
*
* <pre>
* Create an interactive session asynchronously.
* </pre>
*/
default void createSession(
com.google.cloud.dataproc.v1.CreateSessionRequest request,
io.grpc.stub.StreamObserver<com.google.longrunning.Operation> responseObserver) {
io.grpc.stub.ServerCalls.asyncUnimplementedUnaryCall(
getCreateSessionMethod(), responseObserver);
}
/**
*
*
* <pre>
* Gets the resource representation for an interactive session.
* </pre>
*/
default void getSession(
com.google.cloud.dataproc.v1.GetSessionRequest request,
io.grpc.stub.StreamObserver<com.google.cloud.dataproc.v1.Session> responseObserver) {
io.grpc.stub.ServerCalls.asyncUnimplementedUnaryCall(getGetSessionMethod(), responseObserver);
}
/**
*
*
* <pre>
* Lists interactive sessions.
* </pre>
*/
default void listSessions(
com.google.cloud.dataproc.v1.ListSessionsRequest request,
io.grpc.stub.StreamObserver<com.google.cloud.dataproc.v1.ListSessionsResponse>
responseObserver) {
io.grpc.stub.ServerCalls.asyncUnimplementedUnaryCall(
getListSessionsMethod(), responseObserver);
}
/**
*
*
* <pre>
* Terminates the interactive session.
* </pre>
*/
default void terminateSession(
com.google.cloud.dataproc.v1.TerminateSessionRequest request,
io.grpc.stub.StreamObserver<com.google.longrunning.Operation> responseObserver) {
io.grpc.stub.ServerCalls.asyncUnimplementedUnaryCall(
getTerminateSessionMethod(), responseObserver);
}
/**
*
*
* <pre>
* Deletes the interactive session resource. If the session is not in terminal
* state, it is terminated, and then deleted.
* </pre>
*/
default void deleteSession(
com.google.cloud.dataproc.v1.DeleteSessionRequest request,
io.grpc.stub.StreamObserver<com.google.longrunning.Operation> responseObserver) {
io.grpc.stub.ServerCalls.asyncUnimplementedUnaryCall(
getDeleteSessionMethod(), responseObserver);
}
}
/**
* Base class for the server implementation of the service SessionController.
*
* <pre>
* The `SessionController` provides methods to manage interactive sessions.
* </pre>
*/
public abstract static class SessionControllerImplBase
implements io.grpc.BindableService, AsyncService {
@java.lang.Override
public final io.grpc.ServerServiceDefinition bindService() {
return SessionControllerGrpc.bindService(this);
}
}
/**
* A stub to allow clients to do asynchronous rpc calls to service SessionController.
*
* <pre>
* The `SessionController` provides methods to manage interactive sessions.
* </pre>
*/
public static final class SessionControllerStub
extends io.grpc.stub.AbstractAsyncStub<SessionControllerStub> {
private SessionControllerStub(io.grpc.Channel channel, io.grpc.CallOptions callOptions) {
super(channel, callOptions);
}
@java.lang.Override
protected SessionControllerStub build(
io.grpc.Channel channel, io.grpc.CallOptions callOptions) {
return new SessionControllerStub(channel, callOptions);
}
/**
*
*
* <pre>
* Create an interactive session asynchronously.
* </pre>
*/
public void createSession(
com.google.cloud.dataproc.v1.CreateSessionRequest request,
io.grpc.stub.StreamObserver<com.google.longrunning.Operation> responseObserver) {
io.grpc.stub.ClientCalls.asyncUnaryCall(
getChannel().newCall(getCreateSessionMethod(), getCallOptions()),
request,
responseObserver);
}
/**
*
*
* <pre>
* Gets the resource representation for an interactive session.
* </pre>
*/
public void getSession(
com.google.cloud.dataproc.v1.GetSessionRequest request,
io.grpc.stub.StreamObserver<com.google.cloud.dataproc.v1.Session> responseObserver) {
io.grpc.stub.ClientCalls.asyncUnaryCall(
getChannel().newCall(getGetSessionMethod(), getCallOptions()), request, responseObserver);
}
/**
*
*
* <pre>
* Lists interactive sessions.
* </pre>
*/
public void listSessions(
com.google.cloud.dataproc.v1.ListSessionsRequest request,
io.grpc.stub.StreamObserver<com.google.cloud.dataproc.v1.ListSessionsResponse>
responseObserver) {
io.grpc.stub.ClientCalls.asyncUnaryCall(
getChannel().newCall(getListSessionsMethod(), getCallOptions()),
request,
responseObserver);
}
/**
*
*
* <pre>
* Terminates the interactive session.
* </pre>
*/
public void terminateSession(
com.google.cloud.dataproc.v1.TerminateSessionRequest request,
io.grpc.stub.StreamObserver<com.google.longrunning.Operation> responseObserver) {
io.grpc.stub.ClientCalls.asyncUnaryCall(
getChannel().newCall(getTerminateSessionMethod(), getCallOptions()),
request,
responseObserver);
}
/**
*
*
* <pre>
* Deletes the interactive session resource. If the session is not in terminal
* state, it is terminated, and then deleted.
* </pre>
*/
public void deleteSession(
com.google.cloud.dataproc.v1.DeleteSessionRequest request,
io.grpc.stub.StreamObserver<com.google.longrunning.Operation> responseObserver) {
io.grpc.stub.ClientCalls.asyncUnaryCall(
getChannel().newCall(getDeleteSessionMethod(), getCallOptions()),
request,
responseObserver);
}
}
/**
* A stub to allow clients to do synchronous rpc calls to service SessionController.
*
* <pre>
* The `SessionController` provides methods to manage interactive sessions.
* </pre>
*/
public static final class SessionControllerBlockingV2Stub
extends io.grpc.stub.AbstractBlockingStub<SessionControllerBlockingV2Stub> {
private SessionControllerBlockingV2Stub(
io.grpc.Channel channel, io.grpc.CallOptions callOptions) {
super(channel, callOptions);
}
@java.lang.Override
protected SessionControllerBlockingV2Stub build(
io.grpc.Channel channel, io.grpc.CallOptions callOptions) {
return new SessionControllerBlockingV2Stub(channel, callOptions);
}
/**
*
*
* <pre>
* Create an interactive session asynchronously.
* </pre>
*/
public com.google.longrunning.Operation createSession(
com.google.cloud.dataproc.v1.CreateSessionRequest request) {
return io.grpc.stub.ClientCalls.blockingUnaryCall(
getChannel(), getCreateSessionMethod(), getCallOptions(), request);
}
/**
*
*
* <pre>
* Gets the resource representation for an interactive session.
* </pre>
*/
public com.google.cloud.dataproc.v1.Session getSession(
com.google.cloud.dataproc.v1.GetSessionRequest request) {
return io.grpc.stub.ClientCalls.blockingUnaryCall(
getChannel(), getGetSessionMethod(), getCallOptions(), request);
}
/**
*
*
* <pre>
* Lists interactive sessions.
* </pre>
*/
public com.google.cloud.dataproc.v1.ListSessionsResponse listSessions(
com.google.cloud.dataproc.v1.ListSessionsRequest request) {
return io.grpc.stub.ClientCalls.blockingUnaryCall(
getChannel(), getListSessionsMethod(), getCallOptions(), request);
}
/**
*
*
* <pre>
* Terminates the interactive session.
* </pre>
*/
public com.google.longrunning.Operation terminateSession(
com.google.cloud.dataproc.v1.TerminateSessionRequest request) {
return io.grpc.stub.ClientCalls.blockingUnaryCall(
getChannel(), getTerminateSessionMethod(), getCallOptions(), request);
}
/**
*
*
* <pre>
* Deletes the interactive session resource. If the session is not in terminal
* state, it is terminated, and then deleted.
* </pre>
*/
public com.google.longrunning.Operation deleteSession(
com.google.cloud.dataproc.v1.DeleteSessionRequest request) {
return io.grpc.stub.ClientCalls.blockingUnaryCall(
getChannel(), getDeleteSessionMethod(), getCallOptions(), request);
}
}
/**
* A stub to allow clients to do limited synchronous rpc calls to service SessionController.
*
* <pre>
* The `SessionController` provides methods to manage interactive sessions.
* </pre>
*/
public static final class SessionControllerBlockingStub
extends io.grpc.stub.AbstractBlockingStub<SessionControllerBlockingStub> {
private SessionControllerBlockingStub(
io.grpc.Channel channel, io.grpc.CallOptions callOptions) {
super(channel, callOptions);
}
@java.lang.Override
protected SessionControllerBlockingStub build(
io.grpc.Channel channel, io.grpc.CallOptions callOptions) {
return new SessionControllerBlockingStub(channel, callOptions);
}
/**
*
*
* <pre>
* Create an interactive session asynchronously.
* </pre>
*/
public com.google.longrunning.Operation createSession(
com.google.cloud.dataproc.v1.CreateSessionRequest request) {
return io.grpc.stub.ClientCalls.blockingUnaryCall(
getChannel(), getCreateSessionMethod(), getCallOptions(), request);
}
/**
*
*
* <pre>
* Gets the resource representation for an interactive session.
* </pre>
*/
public com.google.cloud.dataproc.v1.Session getSession(
com.google.cloud.dataproc.v1.GetSessionRequest request) {
return io.grpc.stub.ClientCalls.blockingUnaryCall(
getChannel(), getGetSessionMethod(), getCallOptions(), request);
}
/**
*
*
* <pre>
* Lists interactive sessions.
* </pre>
*/
public com.google.cloud.dataproc.v1.ListSessionsResponse listSessions(
com.google.cloud.dataproc.v1.ListSessionsRequest request) {
return io.grpc.stub.ClientCalls.blockingUnaryCall(
getChannel(), getListSessionsMethod(), getCallOptions(), request);
}
/**
*
*
* <pre>
* Terminates the interactive session.
* </pre>
*/
public com.google.longrunning.Operation terminateSession(
com.google.cloud.dataproc.v1.TerminateSessionRequest request) {
return io.grpc.stub.ClientCalls.blockingUnaryCall(
getChannel(), getTerminateSessionMethod(), getCallOptions(), request);
}
/**
*
*
* <pre>
* Deletes the interactive session resource. If the session is not in terminal
* state, it is terminated, and then deleted.
* </pre>
*/
public com.google.longrunning.Operation deleteSession(
com.google.cloud.dataproc.v1.DeleteSessionRequest request) {
return io.grpc.stub.ClientCalls.blockingUnaryCall(
getChannel(), getDeleteSessionMethod(), getCallOptions(), request);
}
}
/**
* A stub to allow clients to do ListenableFuture-style rpc calls to service SessionController.
*
* <pre>
* The `SessionController` provides methods to manage interactive sessions.
* </pre>
*/
public static final class SessionControllerFutureStub
extends io.grpc.stub.AbstractFutureStub<SessionControllerFutureStub> {
private SessionControllerFutureStub(io.grpc.Channel channel, io.grpc.CallOptions callOptions) {
super(channel, callOptions);
}
@java.lang.Override
protected SessionControllerFutureStub build(
io.grpc.Channel channel, io.grpc.CallOptions callOptions) {
return new SessionControllerFutureStub(channel, callOptions);
}
/**
*
*
* <pre>
* Create an interactive session asynchronously.
* </pre>
*/
public com.google.common.util.concurrent.ListenableFuture<com.google.longrunning.Operation>
createSession(com.google.cloud.dataproc.v1.CreateSessionRequest request) {
return io.grpc.stub.ClientCalls.futureUnaryCall(
getChannel().newCall(getCreateSessionMethod(), getCallOptions()), request);
}
/**
*
*
* <pre>
* Gets the resource representation for an interactive session.
* </pre>
*/
public com.google.common.util.concurrent.ListenableFuture<com.google.cloud.dataproc.v1.Session>
getSession(com.google.cloud.dataproc.v1.GetSessionRequest request) {
return io.grpc.stub.ClientCalls.futureUnaryCall(
getChannel().newCall(getGetSessionMethod(), getCallOptions()), request);
}
/**
*
*
* <pre>
* Lists interactive sessions.
* </pre>
*/
public com.google.common.util.concurrent.ListenableFuture<
com.google.cloud.dataproc.v1.ListSessionsResponse>
listSessions(com.google.cloud.dataproc.v1.ListSessionsRequest request) {
return io.grpc.stub.ClientCalls.futureUnaryCall(
getChannel().newCall(getListSessionsMethod(), getCallOptions()), request);
}
/**
*
*
* <pre>
* Terminates the interactive session.
* </pre>
*/
public com.google.common.util.concurrent.ListenableFuture<com.google.longrunning.Operation>
terminateSession(com.google.cloud.dataproc.v1.TerminateSessionRequest request) {
return io.grpc.stub.ClientCalls.futureUnaryCall(
getChannel().newCall(getTerminateSessionMethod(), getCallOptions()), request);
}
/**
*
*
* <pre>
* Deletes the interactive session resource. If the session is not in terminal
* state, it is terminated, and then deleted.
* </pre>
*/
public com.google.common.util.concurrent.ListenableFuture<com.google.longrunning.Operation>
deleteSession(com.google.cloud.dataproc.v1.DeleteSessionRequest request) {
return io.grpc.stub.ClientCalls.futureUnaryCall(
getChannel().newCall(getDeleteSessionMethod(), getCallOptions()), request);
}
}
private static final int METHODID_CREATE_SESSION = 0;
private static final int METHODID_GET_SESSION = 1;
private static final int METHODID_LIST_SESSIONS = 2;
private static final int METHODID_TERMINATE_SESSION = 3;
private static final int METHODID_DELETE_SESSION = 4;
private static final class MethodHandlers<Req, Resp>
implements io.grpc.stub.ServerCalls.UnaryMethod<Req, Resp>,
io.grpc.stub.ServerCalls.ServerStreamingMethod<Req, Resp>,
io.grpc.stub.ServerCalls.ClientStreamingMethod<Req, Resp>,
io.grpc.stub.ServerCalls.BidiStreamingMethod<Req, Resp> {
private final AsyncService serviceImpl;
private final int methodId;
MethodHandlers(AsyncService serviceImpl, int methodId) {
this.serviceImpl = serviceImpl;
this.methodId = methodId;
}
@java.lang.Override
@java.lang.SuppressWarnings("unchecked")
public void invoke(Req request, io.grpc.stub.StreamObserver<Resp> responseObserver) {
switch (methodId) {
case METHODID_CREATE_SESSION:
serviceImpl.createSession(
(com.google.cloud.dataproc.v1.CreateSessionRequest) request,
(io.grpc.stub.StreamObserver<com.google.longrunning.Operation>) responseObserver);
break;
case METHODID_GET_SESSION:
serviceImpl.getSession(
(com.google.cloud.dataproc.v1.GetSessionRequest) request,
(io.grpc.stub.StreamObserver<com.google.cloud.dataproc.v1.Session>) responseObserver);
break;
case METHODID_LIST_SESSIONS:
serviceImpl.listSessions(
(com.google.cloud.dataproc.v1.ListSessionsRequest) request,
(io.grpc.stub.StreamObserver<com.google.cloud.dataproc.v1.ListSessionsResponse>)
responseObserver);
break;
case METHODID_TERMINATE_SESSION:
serviceImpl.terminateSession(
(com.google.cloud.dataproc.v1.TerminateSessionRequest) request,
(io.grpc.stub.StreamObserver<com.google.longrunning.Operation>) responseObserver);
break;
case METHODID_DELETE_SESSION:
serviceImpl.deleteSession(
(com.google.cloud.dataproc.v1.DeleteSessionRequest) request,
(io.grpc.stub.StreamObserver<com.google.longrunning.Operation>) responseObserver);
break;
default:
throw new AssertionError();
}
}
@java.lang.Override
@java.lang.SuppressWarnings("unchecked")
public io.grpc.stub.StreamObserver<Req> invoke(
io.grpc.stub.StreamObserver<Resp> responseObserver) {
switch (methodId) {
default:
throw new AssertionError();
}
}
}
public static final io.grpc.ServerServiceDefinition bindService(AsyncService service) {
return io.grpc.ServerServiceDefinition.builder(getServiceDescriptor())
.addMethod(
getCreateSessionMethod(),
io.grpc.stub.ServerCalls.asyncUnaryCall(
new MethodHandlers<
com.google.cloud.dataproc.v1.CreateSessionRequest,
com.google.longrunning.Operation>(service, METHODID_CREATE_SESSION)))
.addMethod(
getGetSessionMethod(),
io.grpc.stub.ServerCalls.asyncUnaryCall(
new MethodHandlers<
com.google.cloud.dataproc.v1.GetSessionRequest,
com.google.cloud.dataproc.v1.Session>(service, METHODID_GET_SESSION)))
.addMethod(
getListSessionsMethod(),
io.grpc.stub.ServerCalls.asyncUnaryCall(
new MethodHandlers<
com.google.cloud.dataproc.v1.ListSessionsRequest,
com.google.cloud.dataproc.v1.ListSessionsResponse>(
service, METHODID_LIST_SESSIONS)))
.addMethod(
getTerminateSessionMethod(),
io.grpc.stub.ServerCalls.asyncUnaryCall(
new MethodHandlers<
com.google.cloud.dataproc.v1.TerminateSessionRequest,
com.google.longrunning.Operation>(service, METHODID_TERMINATE_SESSION)))
.addMethod(
getDeleteSessionMethod(),
io.grpc.stub.ServerCalls.asyncUnaryCall(
new MethodHandlers<
com.google.cloud.dataproc.v1.DeleteSessionRequest,
com.google.longrunning.Operation>(service, METHODID_DELETE_SESSION)))
.build();
}
private abstract static class SessionControllerBaseDescriptorSupplier
implements io.grpc.protobuf.ProtoFileDescriptorSupplier,
io.grpc.protobuf.ProtoServiceDescriptorSupplier {
SessionControllerBaseDescriptorSupplier() {}
@java.lang.Override
public com.google.protobuf.Descriptors.FileDescriptor getFileDescriptor() {
return com.google.cloud.dataproc.v1.SessionsProto.getDescriptor();
}
@java.lang.Override
public com.google.protobuf.Descriptors.ServiceDescriptor getServiceDescriptor() {
return getFileDescriptor().findServiceByName("SessionController");
}
}
private static final class SessionControllerFileDescriptorSupplier
extends SessionControllerBaseDescriptorSupplier {
SessionControllerFileDescriptorSupplier() {}
}
private static final class SessionControllerMethodDescriptorSupplier
extends SessionControllerBaseDescriptorSupplier
implements io.grpc.protobuf.ProtoMethodDescriptorSupplier {
private final java.lang.String methodName;
SessionControllerMethodDescriptorSupplier(java.lang.String methodName) {
this.methodName = methodName;
}
@java.lang.Override
public com.google.protobuf.Descriptors.MethodDescriptor getMethodDescriptor() {
return getServiceDescriptor().findMethodByName(methodName);
}
}
private static volatile io.grpc.ServiceDescriptor serviceDescriptor;
public static io.grpc.ServiceDescriptor getServiceDescriptor() {
io.grpc.ServiceDescriptor result = serviceDescriptor;
if (result == null) {
synchronized (SessionControllerGrpc.class) {
result = serviceDescriptor;
if (result == null) {
serviceDescriptor =
result =
io.grpc.ServiceDescriptor.newBuilder(SERVICE_NAME)
.setSchemaDescriptor(new SessionControllerFileDescriptorSupplier())
.addMethod(getCreateSessionMethod())
.addMethod(getGetSessionMethod())
.addMethod(getListSessionsMethod())
.addMethod(getTerminateSessionMethod())
.addMethod(getDeleteSessionMethod())
.build();
}
}
}
return result;
}
}
|
apache/james-project | 36,008 | mdn/src/test/java/org/apache/james/mdn/MDNReportFormattingTest.java | /****************************************************************
* Licensed to the Apache Software Foundation (ASF) under one *
* or more contributor license agreements. See the NOTICE file *
* distributed with this work for additional information *
* regarding copyright ownership. The ASF licenses this file *
* to you under the Apache License, Version 2.0 (the *
* "License"); you may not use this file except in compliance *
* with the License. You may obtain a copy of the License at *
* *
* http://www.apache.org/licenses/LICENSE-2.0 *
* *
* Unless required by applicable law or agreed to in writing, *
* software distributed under the License is distributed on an *
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY *
* KIND, either express or implied. See the License for the *
* specific language governing permissions and limitations *
* under the License. *
****************************************************************/
package org.apache.james.mdn;
import static org.assertj.core.api.Assertions.assertThat;
import org.apache.james.mdn.action.mode.DispositionActionMode;
import org.apache.james.mdn.fields.AddressType;
import org.apache.james.mdn.fields.Disposition;
import org.apache.james.mdn.fields.Error;
import org.apache.james.mdn.fields.ExtensionField;
import org.apache.james.mdn.fields.FinalRecipient;
import org.apache.james.mdn.fields.Gateway;
import org.apache.james.mdn.fields.OriginalMessageId;
import org.apache.james.mdn.fields.OriginalRecipient;
import org.apache.james.mdn.fields.ReportingUserAgent;
import org.apache.james.mdn.fields.Text;
import org.apache.james.mdn.modifier.DispositionModifier;
import org.apache.james.mdn.sending.mode.DispositionSendingMode;
import org.apache.james.mdn.type.DispositionType;
import org.junit.jupiter.api.Test;
class MDNReportFormattingTest {
@Test
void generateMDNReportShouldFormatAutomaticActions() {
Disposition disposition = Disposition.builder()
.actionMode(DispositionActionMode.Automatic)
.sendingMode(DispositionSendingMode.Automatic)
.type(DispositionType.Processed)
.addModifier(DispositionModifier.Error)
.addModifier(DispositionModifier.Failed)
.build();
String report = MDNReport.builder()
.reportingUserAgentField(ReportingUserAgent.builder().userAgentName("UA_name").userAgentProduct("UA_product").build())
.finalRecipientField(FinalRecipient.builder().finalRecipient(Text.fromRawText("final_recipient")).build())
.originalRecipientField(OriginalRecipient.builder().originalRecipient(Text.fromRawText("originalRecipient")).build())
.originalMessageIdField(new OriginalMessageId("original_message_id"))
.dispositionField(disposition)
.build()
.formattedValue();
assertThat(report)
.isEqualTo("Reporting-UA: UA_name; UA_product\r\n" +
"Original-Recipient: rfc822; originalRecipient\r\n" +
"Final-Recipient: rfc822; final_recipient\r\n" +
"Original-Message-ID: original_message_id\r\n" +
"Disposition: automatic-action/MDN-sent-automatically;processed/error,failed\r\n");
}
@Test
void generateMDNReportShouldFormatManualActions() {
Disposition disposition = Disposition.builder()
.actionMode(DispositionActionMode.Manual)
.sendingMode(DispositionSendingMode.Manual)
.type(DispositionType.Processed)
.addModifier(DispositionModifier.Error)
.addModifier(DispositionModifier.Failed)
.build();
String report = MDNReport.builder()
.reportingUserAgentField(ReportingUserAgent.builder().userAgentName("UA_name").userAgentProduct("UA_product").build())
.finalRecipientField(FinalRecipient.builder().finalRecipient(Text.fromRawText("final_recipient")).build())
.originalRecipientField(OriginalRecipient.builder().originalRecipient(Text.fromRawText("originalRecipient")).build())
.originalMessageIdField(new OriginalMessageId("original_message_id"))
.dispositionField(disposition)
.build()
.formattedValue();
assertThat(report)
.isEqualTo("Reporting-UA: UA_name; UA_product\r\n" +
"Original-Recipient: rfc822; originalRecipient\r\n" +
"Final-Recipient: rfc822; final_recipient\r\n" +
"Original-Message-ID: original_message_id\r\n" +
"Disposition: manual-action/MDN-sent-manually;processed/error,failed\r\n");
}
@Test
void generateMDNReportShouldFormatTypeDispatcher() {
Disposition disposition = Disposition.builder()
.actionMode(DispositionActionMode.Manual)
.sendingMode(DispositionSendingMode.Manual)
.type(DispositionType.Dispatched)
.addModifier(DispositionModifier.Error)
.addModifier(DispositionModifier.Failed)
.build();
String report = MDNReport.builder()
.reportingUserAgentField(ReportingUserAgent.builder().userAgentName("UA_name").userAgentProduct("UA_product").build())
.finalRecipientField(FinalRecipient.builder().finalRecipient(Text.fromRawText("final_recipient")).build())
.originalRecipientField(OriginalRecipient.builder().originalRecipient(Text.fromRawText("originalRecipient")).build())
.originalMessageIdField(new OriginalMessageId("original_message_id"))
.dispositionField(disposition)
.build()
.formattedValue();
assertThat(report)
.isEqualTo("Reporting-UA: UA_name; UA_product\r\n" +
"Original-Recipient: rfc822; originalRecipient\r\n" +
"Final-Recipient: rfc822; final_recipient\r\n" +
"Original-Message-ID: original_message_id\r\n" +
"Disposition: manual-action/MDN-sent-manually;dispatched/error,failed\r\n");
}
@Test
void generateMDNReportShouldFormatTypeDisplayed() {
Disposition disposition = Disposition.builder()
.actionMode(DispositionActionMode.Manual)
.sendingMode(DispositionSendingMode.Manual)
.type(DispositionType.Displayed)
.addModifier(DispositionModifier.Error)
.addModifier(DispositionModifier.Failed)
.build();
String report = MDNReport.builder()
.reportingUserAgentField(ReportingUserAgent.builder().userAgentName("UA_name").userAgentProduct("UA_product").build())
.finalRecipientField(FinalRecipient.builder().finalRecipient(Text.fromRawText("final_recipient")).build())
.originalRecipientField(OriginalRecipient.builder().originalRecipient(Text.fromRawText("originalRecipient")).build())
.originalMessageIdField(new OriginalMessageId("original_message_id"))
.dispositionField(disposition)
.build()
.formattedValue();
assertThat(report)
.isEqualTo("Reporting-UA: UA_name; UA_product\r\n" +
"Original-Recipient: rfc822; originalRecipient\r\n" +
"Final-Recipient: rfc822; final_recipient\r\n" +
"Original-Message-ID: original_message_id\r\n" +
"Disposition: manual-action/MDN-sent-manually;displayed/error,failed\r\n");
}
@Test
void generateMDNReportShouldFormatTypeDeleted() {
Disposition disposition = Disposition.builder()
.actionMode(DispositionActionMode.Manual)
.sendingMode(DispositionSendingMode.Manual)
.type(DispositionType.Deleted)
.addModifier(DispositionModifier.Error)
.addModifier(DispositionModifier.Failed)
.build();
String report = MDNReport.builder()
.reportingUserAgentField(ReportingUserAgent.builder().userAgentName("UA_name").userAgentProduct("UA_product").build())
.finalRecipientField(FinalRecipient.builder().finalRecipient(Text.fromRawText("final_recipient")).build())
.originalRecipientField(OriginalRecipient.builder().originalRecipient(Text.fromRawText("originalRecipient")).build())
.originalMessageIdField(new OriginalMessageId("original_message_id"))
.dispositionField(disposition)
.build()
.formattedValue();
assertThat(report)
.isEqualTo("Reporting-UA: UA_name; UA_product\r\n" +
"Original-Recipient: rfc822; originalRecipient\r\n" +
"Final-Recipient: rfc822; final_recipient\r\n" +
"Original-Message-ID: original_message_id\r\n" +
"Disposition: manual-action/MDN-sent-manually;deleted/error,failed\r\n");
}
@Test
void generateMDNReportShouldFormatAllModifier() {
Disposition disposition = Disposition.builder()
.actionMode(DispositionActionMode.Manual)
.sendingMode(DispositionSendingMode.Manual)
.type(DispositionType.Deleted)
.addModifiers(DispositionModifier.Error, DispositionModifier.Expired, DispositionModifier.Failed,
DispositionModifier.MailboxTerminated, DispositionModifier.Superseded, DispositionModifier.Warning)
.build();
String report = MDNReport.builder()
.reportingUserAgentField(ReportingUserAgent.builder().userAgentName("UA_name").userAgentProduct("UA_product").build())
.finalRecipientField(FinalRecipient.builder().finalRecipient(Text.fromRawText("final_recipient")).build())
.originalRecipientField(OriginalRecipient.builder().originalRecipient(Text.fromRawText("originalRecipient")).build())
.originalMessageIdField(new OriginalMessageId("original_message_id"))
.dispositionField(disposition)
.build()
.formattedValue();
assertThat(report)
.isEqualTo("Reporting-UA: UA_name; UA_product\r\n" +
"Original-Recipient: rfc822; originalRecipient\r\n" +
"Final-Recipient: rfc822; final_recipient\r\n" +
"Original-Message-ID: original_message_id\r\n" +
"Disposition: manual-action/MDN-sent-manually;deleted/error,expired,failed,mailbox-terminated,superseded,warning\r\n");
}
@Test
void generateMDNReportShouldFormatOneModifier() {
Disposition disposition = Disposition.builder()
.actionMode(DispositionActionMode.Manual)
.sendingMode(DispositionSendingMode.Manual)
.type(DispositionType.Deleted)
.addModifier(DispositionModifier.Error)
.build();
String report = MDNReport.builder()
.reportingUserAgentField(ReportingUserAgent.builder().userAgentName("UA_name").userAgentProduct("UA_product").build())
.finalRecipientField(FinalRecipient.builder().finalRecipient(Text.fromRawText("final_recipient")).build())
.originalRecipientField(OriginalRecipient.builder().originalRecipient(Text.fromRawText("originalRecipient")).build())
.originalMessageIdField(new OriginalMessageId("original_message_id"))
.dispositionField(disposition)
.build()
.formattedValue();
assertThat(report)
.isEqualTo("Reporting-UA: UA_name; UA_product\r\n" +
"Original-Recipient: rfc822; originalRecipient\r\n" +
"Final-Recipient: rfc822; final_recipient\r\n" +
"Original-Message-ID: original_message_id\r\n" +
"Disposition: manual-action/MDN-sent-manually;deleted/error\r\n");
}
@Test
void generateMDNReportShouldFormatUnknownModifier() {
Disposition disposition = Disposition.builder()
.actionMode(DispositionActionMode.Manual)
.sendingMode(DispositionSendingMode.Manual)
.type(DispositionType.Deleted)
.addModifier(new DispositionModifier("new"))
.build();
String report = MDNReport.builder()
.reportingUserAgentField(ReportingUserAgent.builder().userAgentName("UA_name").userAgentProduct("UA_product").build())
.finalRecipientField(FinalRecipient.builder().finalRecipient(Text.fromRawText("final_recipient")).build())
.originalRecipientField(OriginalRecipient.builder().originalRecipient(Text.fromRawText("originalRecipient")).build())
.originalMessageIdField(new OriginalMessageId("original_message_id"))
.dispositionField(disposition)
.build()
.formattedValue();
assertThat(report)
.isEqualTo("Reporting-UA: UA_name; UA_product\r\n" +
"Original-Recipient: rfc822; originalRecipient\r\n" +
"Final-Recipient: rfc822; final_recipient\r\n" +
"Original-Message-ID: original_message_id\r\n" +
"Disposition: manual-action/MDN-sent-manually;deleted/new\r\n");
}
@Test
void generateMDNReportShouldFormatNoModifier() {
Disposition disposition = Disposition.builder()
.actionMode(DispositionActionMode.Manual)
.sendingMode(DispositionSendingMode.Manual)
.type(DispositionType.Deleted)
.build();
String report = MDNReport.builder()
.reportingUserAgentField(ReportingUserAgent.builder().userAgentName("UA_name").userAgentProduct("UA_product").build())
.finalRecipientField(FinalRecipient.builder().finalRecipient(Text.fromRawText("final_recipient")).build())
.originalRecipientField(OriginalRecipient.builder().originalRecipient(Text.fromRawText("originalRecipient")).build())
.originalMessageIdField(new OriginalMessageId("original_message_id"))
.dispositionField(disposition)
.build()
.formattedValue();
assertThat(report)
.isEqualTo("Reporting-UA: UA_name; UA_product\r\n" +
"Original-Recipient: rfc822; originalRecipient\r\n" +
"Final-Recipient: rfc822; final_recipient\r\n" +
"Original-Message-ID: original_message_id\r\n" +
"Disposition: manual-action/MDN-sent-manually;deleted\r\n");
}
@Test
void generateMDNReportShouldFormatNullUserAgentProduct() {
Disposition disposition = Disposition.builder()
.actionMode(DispositionActionMode.Manual)
.sendingMode(DispositionSendingMode.Automatic)
.type(DispositionType.Deleted)
.addModifier(DispositionModifier.Error)
.addModifier(DispositionModifier.Failed)
.build();
String report = MDNReport.builder()
.reportingUserAgentField(ReportingUserAgent.builder().userAgentName("UA_name").build())
.finalRecipientField(FinalRecipient.builder().finalRecipient(Text.fromRawText("final_recipient")).build())
.originalRecipientField(OriginalRecipient.builder().originalRecipient(Text.fromRawText("originalRecipient")).build())
.originalMessageIdField(new OriginalMessageId("original_message_id"))
.dispositionField(disposition)
.build()
.formattedValue();
assertThat(report)
.isEqualTo("Reporting-UA: UA_name\r\n" +
"Original-Recipient: rfc822; originalRecipient\r\n" +
"Final-Recipient: rfc822; final_recipient\r\n" +
"Original-Message-ID: original_message_id\r\n" +
"Disposition: manual-action/MDN-sent-automatically;deleted/error,failed\r\n");
}
@Test
void generateMDNReportShouldFormatNullOriginalRecipient() {
Disposition disposition = Disposition.builder()
.actionMode(DispositionActionMode.Manual)
.sendingMode(DispositionSendingMode.Automatic)
.type(DispositionType.Deleted)
.addModifier(DispositionModifier.Error)
.addModifier(DispositionModifier.Failed)
.build();
String report = MDNReport.builder()
.reportingUserAgentField(ReportingUserAgent.builder().userAgentName("UA_name").userAgentProduct("UA_product").build())
.finalRecipientField(FinalRecipient.builder().finalRecipient(Text.fromRawText("final_recipient")).build())
.originalMessageIdField(new OriginalMessageId("original_message_id"))
.dispositionField(disposition)
.build()
.formattedValue();
assertThat(report)
.isEqualTo("Reporting-UA: UA_name; UA_product\r\n" +
"Final-Recipient: rfc822; final_recipient\r\n" +
"Original-Message-ID: original_message_id\r\n" +
"Disposition: manual-action/MDN-sent-automatically;deleted/error,failed\r\n");
}
@Test
void generateMDNReportShouldFormatWhenMissingOriginalMessageId() {
Disposition disposition = Disposition.builder()
.actionMode(DispositionActionMode.Manual)
.sendingMode(DispositionSendingMode.Automatic)
.type(DispositionType.Deleted)
.addModifier(DispositionModifier.Error)
.addModifier(DispositionModifier.Failed)
.build();
String report = MDNReport.builder()
.reportingUserAgentField(ReportingUserAgent.builder().userAgentName("UA_name").userAgentProduct("UA_product").build())
.finalRecipientField(FinalRecipient.builder().finalRecipient(Text.fromRawText("final_recipient")).build())
.originalRecipientField(OriginalRecipient.builder().originalRecipient(Text.fromRawText("originalRecipient")).build())
.dispositionField(disposition)
.build()
.formattedValue();
assertThat(report)
.isEqualTo("Reporting-UA: UA_name; UA_product\r\n" +
"Original-Recipient: rfc822; originalRecipient\r\n" +
"Final-Recipient: rfc822; final_recipient\r\n" +
"Disposition: manual-action/MDN-sent-automatically;deleted/error,failed\r\n");
}
@Test
void generateMDNReportShouldFormatGateway() {
Disposition disposition = Disposition.builder()
.actionMode(DispositionActionMode.Automatic)
.sendingMode(DispositionSendingMode.Automatic)
.type(DispositionType.Processed)
.addModifier(DispositionModifier.Error)
.addModifier(DispositionModifier.Failed)
.build();
String report = MDNReport.builder()
.reportingUserAgentField(ReportingUserAgent.builder().userAgentName("UA_name").userAgentProduct("UA_product").build())
.gatewayField(Gateway.builder().name(Text.fromRawText("host.com")).build())
.finalRecipientField(FinalRecipient.builder().finalRecipient(Text.fromRawText("final_recipient")).build())
.originalRecipientField(OriginalRecipient.builder().originalRecipient(Text.fromRawText("originalRecipient")).build())
.originalMessageIdField(new OriginalMessageId("original_message_id"))
.dispositionField(disposition)
.build()
.formattedValue();
assertThat(report)
.isEqualTo("Reporting-UA: UA_name; UA_product\r\n" +
"MDN-Gateway: dns;host.com\r\n" +
"Original-Recipient: rfc822; originalRecipient\r\n" +
"Final-Recipient: rfc822; final_recipient\r\n" +
"Original-Message-ID: original_message_id\r\n" +
"Disposition: automatic-action/MDN-sent-automatically;processed/error,failed\r\n");
}
@Test
void generateMDNReportShouldFormatGatewayWithExoticNameType() {
Disposition disposition = Disposition.builder()
.actionMode(DispositionActionMode.Automatic)
.sendingMode(DispositionSendingMode.Automatic)
.type(DispositionType.Processed)
.addModifier(DispositionModifier.Error)
.addModifier(DispositionModifier.Failed)
.build();
String report = MDNReport.builder()
.reportingUserAgentField(ReportingUserAgent.builder().userAgentName("UA_name").userAgentProduct("UA_product").build())
.gatewayField(Gateway.builder().nameType(new AddressType("postal")).name(Text.fromRawText("5 rue Charles mercier")).build())
.finalRecipientField(FinalRecipient.builder().finalRecipient(Text.fromRawText("final_recipient")).build())
.originalRecipientField(OriginalRecipient.builder().originalRecipient(Text.fromRawText("originalRecipient")).build())
.originalMessageIdField(new OriginalMessageId("original_message_id"))
.dispositionField(disposition)
.build()
.formattedValue();
assertThat(report)
.isEqualTo("Reporting-UA: UA_name; UA_product\r\n" +
"MDN-Gateway: postal;5 rue Charles mercier\r\n" +
"Original-Recipient: rfc822; originalRecipient\r\n" +
"Final-Recipient: rfc822; final_recipient\r\n" +
"Original-Message-ID: original_message_id\r\n" +
"Disposition: automatic-action/MDN-sent-automatically;processed/error,failed\r\n");
}
@Test
void generateMDNReportShouldFormatExoticAddressTypeForOriginalRecipient() {
Disposition disposition = Disposition.builder()
.actionMode(DispositionActionMode.Automatic)
.sendingMode(DispositionSendingMode.Automatic)
.type(DispositionType.Processed)
.addModifier(DispositionModifier.Error)
.addModifier(DispositionModifier.Failed)
.build();
String report = MDNReport.builder()
.reportingUserAgentField(ReportingUserAgent.builder().userAgentName("UA_name").userAgentProduct("UA_product").build())
.finalRecipientField(FinalRecipient.builder().finalRecipient(Text.fromRawText("final_recipient")).build())
.originalRecipientField(OriginalRecipient.builder().addressType(new AddressType("roomNumber")).originalRecipient(Text.fromRawText("385")).build())
.originalMessageIdField(new OriginalMessageId("original_message_id"))
.dispositionField(disposition)
.build()
.formattedValue();
assertThat(report)
.isEqualTo("Reporting-UA: UA_name; UA_product\r\n" +
"Original-Recipient: roomNumber; 385\r\n" +
"Final-Recipient: rfc822; final_recipient\r\n" +
"Original-Message-ID: original_message_id\r\n" +
"Disposition: automatic-action/MDN-sent-automatically;processed/error,failed\r\n");
}
@Test
void generateMDNReportShouldFormatMultilineAddresses() {
Disposition disposition = Disposition.builder()
.actionMode(DispositionActionMode.Automatic)
.sendingMode(DispositionSendingMode.Automatic)
.type(DispositionType.Processed)
.addModifier(DispositionModifier.Error)
.addModifier(DispositionModifier.Failed)
.build();
String report = MDNReport.builder()
.reportingUserAgentField(ReportingUserAgent.builder().userAgentName("UA_name").userAgentProduct("UA_product").build())
.gatewayField(Gateway.builder().nameType(new AddressType("postal")).name(Text.fromRawText("8 rue Charles mercier\n 36555 Saint Coincoin\n France")).build())
.finalRecipientField(FinalRecipient.builder().addressType(new AddressType("postal")).finalRecipient(Text.fromRawText("5 rue Mercier\n 36555 Saint Coincoin\n France")).build())
.originalRecipientField(OriginalRecipient.builder().addressType(new AddressType("postal")).originalRecipient(Text.fromRawText("3 rue Mercier\n 36555 Saint Coincoin\n France")).build())
.originalMessageIdField(new OriginalMessageId("original_message_id"))
.dispositionField(disposition)
.build()
.formattedValue();
assertThat(report)
.isEqualTo("Reporting-UA: UA_name; UA_product\r\n" +
"MDN-Gateway: postal;8 rue Charles mercier\r\n" +
" 36555 Saint Coincoin\r\n" +
" France\r\n" +
"Original-Recipient: postal; 3 rue Mercier\r\n" +
" 36555 Saint Coincoin\r\n" +
" France\r\n" +
"Final-Recipient: postal; 5 rue Mercier\r\n" +
" 36555 Saint Coincoin\r\n" +
" France\r\n" +
"Original-Message-ID: original_message_id\r\n" +
"Disposition: automatic-action/MDN-sent-automatically;processed/error,failed\r\n");
}
@Test
void generateMDNReportShouldFormatUnknownAddressTypeForOriginalRecipient() {
Disposition disposition = Disposition.builder()
.actionMode(DispositionActionMode.Automatic)
.sendingMode(DispositionSendingMode.Automatic)
.type(DispositionType.Processed)
.addModifier(DispositionModifier.Error)
.addModifier(DispositionModifier.Failed)
.build();
String report = MDNReport.builder()
.reportingUserAgentField(ReportingUserAgent.builder().userAgentName("UA_name").userAgentProduct("UA_product").build())
.finalRecipientField(FinalRecipient.builder().finalRecipient(Text.fromRawText("final_recipient")).build())
.originalRecipientField(OriginalRecipient.ofUnknown(Text.fromRawText("#$%*")))
.originalMessageIdField(new OriginalMessageId("original_message_id"))
.dispositionField(disposition)
.build()
.formattedValue();
assertThat(report)
.isEqualTo("Reporting-UA: UA_name; UA_product\r\n" +
"Original-Recipient: unknown; #$%*\r\n" +
"Final-Recipient: rfc822; final_recipient\r\n" +
"Original-Message-ID: original_message_id\r\n" +
"Disposition: automatic-action/MDN-sent-automatically;processed/error,failed\r\n");
}
@Test
void generateMDNReportShouldFormatExoticFinalRecipientAddressType() {
Disposition disposition = Disposition.builder()
.actionMode(DispositionActionMode.Automatic)
.sendingMode(DispositionSendingMode.Automatic)
.type(DispositionType.Processed)
.addModifier(DispositionModifier.Error)
.addModifier(DispositionModifier.Failed)
.build();
String report = MDNReport.builder()
.reportingUserAgentField(ReportingUserAgent.builder().userAgentName("UA_name").userAgentProduct("UA_product").build())
.finalRecipientField(FinalRecipient.builder().addressType(new AddressType("roomNumber")).finalRecipient(Text.fromRawText("781")).build())
.originalRecipientField(OriginalRecipient.builder().originalRecipient(Text.fromRawText("originalRecipient")).build())
.originalMessageIdField(new OriginalMessageId("original_message_id"))
.dispositionField(disposition)
.build()
.formattedValue();
assertThat(report)
.isEqualTo("Reporting-UA: UA_name; UA_product\r\n" +
"Original-Recipient: rfc822; originalRecipient\r\n" +
"Final-Recipient: roomNumber; 781\r\n" +
"Original-Message-ID: original_message_id\r\n" +
"Disposition: automatic-action/MDN-sent-automatically;processed/error,failed\r\n");
}
@Test
void generateMDNReportShouldFormatErrorField() {
Disposition disposition = Disposition.builder()
.actionMode(DispositionActionMode.Automatic)
.sendingMode(DispositionSendingMode.Automatic)
.type(DispositionType.Processed)
.addModifier(DispositionModifier.Error)
.addModifier(DispositionModifier.Failed)
.build();
String report = MDNReport.builder()
.reportingUserAgentField(ReportingUserAgent.builder().userAgentName("UA_name").userAgentProduct("UA_product").build())
.finalRecipientField(FinalRecipient.builder().finalRecipient(Text.fromRawText("final_recipient")).build())
.originalRecipientField(OriginalRecipient.builder().originalRecipient(Text.fromRawText("originalRecipient")).build())
.originalMessageIdField(new OriginalMessageId("original_message_id"))
.dispositionField(disposition)
.addErrorField(new Error(Text.fromRawText("An error message")))
.build()
.formattedValue();
assertThat(report)
.isEqualTo("Reporting-UA: UA_name; UA_product\r\n" +
"Original-Recipient: rfc822; originalRecipient\r\n" +
"Final-Recipient: rfc822; final_recipient\r\n" +
"Original-Message-ID: original_message_id\r\n" +
"Disposition: automatic-action/MDN-sent-automatically;processed/error,failed\r\n" +
"Error: An error message\r\n");
}
@Test
void generateMDNReportShouldFormatErrorFields() {
Disposition disposition = Disposition.builder()
.actionMode(DispositionActionMode.Automatic)
.sendingMode(DispositionSendingMode.Automatic)
.type(DispositionType.Processed)
.addModifier(DispositionModifier.Error)
.addModifier(DispositionModifier.Failed)
.build();
String report = MDNReport.builder()
.reportingUserAgentField(ReportingUserAgent.builder().userAgentName("UA_name").userAgentProduct("UA_product").build())
.finalRecipientField(FinalRecipient.builder().finalRecipient(Text.fromRawText("final_recipient")).build())
.originalRecipientField(OriginalRecipient.builder().originalRecipient(Text.fromRawText("originalRecipient")).build())
.originalMessageIdField(new OriginalMessageId("original_message_id"))
.dispositionField(disposition)
.addErrorFields(
new Error(Text.fromRawText("An error message")),
new Error(Text.fromRawText("A second error message")))
.build()
.formattedValue();
assertThat(report)
.isEqualTo("Reporting-UA: UA_name; UA_product\r\n" +
"Original-Recipient: rfc822; originalRecipient\r\n" +
"Final-Recipient: rfc822; final_recipient\r\n" +
"Original-Message-ID: original_message_id\r\n" +
"Disposition: automatic-action/MDN-sent-automatically;processed/error,failed\r\n" +
"Error: An error message\r\n" +
"Error: A second error message\r\n");
}
@Test
void generateMDNReportShouldFormatErrorFieldsOnSeveralLines() {
Disposition disposition = Disposition.builder()
.actionMode(DispositionActionMode.Automatic)
.sendingMode(DispositionSendingMode.Automatic)
.type(DispositionType.Processed)
.addModifier(DispositionModifier.Error)
.addModifier(DispositionModifier.Failed)
.build();
String report = MDNReport.builder()
.reportingUserAgentField(ReportingUserAgent.builder().userAgentName("UA_name").userAgentProduct("UA_product").build())
.finalRecipientField(FinalRecipient.builder().finalRecipient(Text.fromRawText("final_recipient")).build())
.originalRecipientField(OriginalRecipient.builder().originalRecipient(Text.fromRawText("originalRecipient")).build())
.originalMessageIdField(new OriginalMessageId("original_message_id"))
.dispositionField(disposition)
.addErrorField(new Error(Text.fromRawText("An error message\non several lines")))
.build()
.formattedValue();
assertThat(report)
.isEqualTo("Reporting-UA: UA_name; UA_product\r\n" +
"Original-Recipient: rfc822; originalRecipient\r\n" +
"Final-Recipient: rfc822; final_recipient\r\n" +
"Original-Message-ID: original_message_id\r\n" +
"Disposition: automatic-action/MDN-sent-automatically;processed/error,failed\r\n" +
"Error: An error message\r\n" +
" on several lines\r\n");
}
@Test
void generateMDNReportShouldFormatOneExtension() {
Disposition disposition = Disposition.builder()
.actionMode(DispositionActionMode.Automatic)
.sendingMode(DispositionSendingMode.Automatic)
.type(DispositionType.Processed)
.addModifier(DispositionModifier.Error)
.addModifier(DispositionModifier.Failed)
.build();
String report = MDNReport.builder()
.reportingUserAgentField(ReportingUserAgent.builder().userAgentName("UA_name").userAgentProduct("UA_product").build())
.finalRecipientField(FinalRecipient.builder().finalRecipient(Text.fromRawText("final_recipient")).build())
.originalRecipientField(OriginalRecipient.builder().originalRecipient(Text.fromRawText("originalRecipient")).build())
.originalMessageIdField(new OriginalMessageId("original_message_id"))
.dispositionField(disposition)
.withExtensionField(ExtensionField.builder().fieldName("X-OPENPAAS-IP").rawValue("177.177.177.77").build())
.build()
.formattedValue();
assertThat(report)
.isEqualTo("Reporting-UA: UA_name; UA_product\r\n" +
"Original-Recipient: rfc822; originalRecipient\r\n" +
"Final-Recipient: rfc822; final_recipient\r\n" +
"Original-Message-ID: original_message_id\r\n" +
"Disposition: automatic-action/MDN-sent-automatically;processed/error,failed\r\n" +
"X-OPENPAAS-IP: 177.177.177.77\r\n");
}
@Test
void generateMDNReportShouldFormatManyExtensions() {
Disposition disposition = Disposition.builder()
.actionMode(DispositionActionMode.Automatic)
.sendingMode(DispositionSendingMode.Automatic)
.type(DispositionType.Processed)
.addModifier(DispositionModifier.Error)
.addModifier(DispositionModifier.Failed)
.build();
String report = MDNReport.builder()
.reportingUserAgentField(ReportingUserAgent.builder().userAgentName("UA_name").userAgentProduct("UA_product").build())
.finalRecipientField(FinalRecipient.builder().finalRecipient(Text.fromRawText("final_recipient")).build())
.originalRecipientField(OriginalRecipient.builder().originalRecipient(Text.fromRawText("originalRecipient")).build())
.originalMessageIdField(new OriginalMessageId("original_message_id"))
.dispositionField(disposition)
.withExtensionFields(
ExtensionField.builder().fieldName("X-OPENPAAS-IP").rawValue("177.177.177.77").build(),
ExtensionField.builder().fieldName("X-OPENPAAS-PORT").rawValue("8000").build())
.build()
.formattedValue();
assertThat(report)
.isEqualTo("Reporting-UA: UA_name; UA_product\r\n" +
"Original-Recipient: rfc822; originalRecipient\r\n" +
"Final-Recipient: rfc822; final_recipient\r\n" +
"Original-Message-ID: original_message_id\r\n" +
"Disposition: automatic-action/MDN-sent-automatically;processed/error,failed\r\n" +
"X-OPENPAAS-IP: 177.177.177.77\r\n" +
"X-OPENPAAS-PORT: 8000\r\n");
}
}
|
googleapis/google-cloud-java | 36,542 | java-resourcemanager/google-cloud-resourcemanager/src/main/java/com/google/cloud/resourcemanager/v3/stub/TagValuesStubSettings.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.cloud.resourcemanager.v3.stub;
import static com.google.cloud.resourcemanager.v3.TagValuesClient.ListTagValuesPagedResponse;
import com.google.api.core.ApiFunction;
import com.google.api.core.ApiFuture;
import com.google.api.core.BetaApi;
import com.google.api.core.ObsoleteApi;
import com.google.api.gax.core.GaxProperties;
import com.google.api.gax.core.GoogleCredentialsProvider;
import com.google.api.gax.core.InstantiatingExecutorProvider;
import com.google.api.gax.grpc.GaxGrpcProperties;
import com.google.api.gax.grpc.GrpcTransportChannel;
import com.google.api.gax.grpc.InstantiatingGrpcChannelProvider;
import com.google.api.gax.grpc.ProtoOperationTransformers;
import com.google.api.gax.httpjson.GaxHttpJsonProperties;
import com.google.api.gax.httpjson.HttpJsonTransportChannel;
import com.google.api.gax.httpjson.InstantiatingHttpJsonChannelProvider;
import com.google.api.gax.longrunning.OperationSnapshot;
import com.google.api.gax.longrunning.OperationTimedPollAlgorithm;
import com.google.api.gax.retrying.RetrySettings;
import com.google.api.gax.rpc.ApiCallContext;
import com.google.api.gax.rpc.ApiClientHeaderProvider;
import com.google.api.gax.rpc.ClientContext;
import com.google.api.gax.rpc.OperationCallSettings;
import com.google.api.gax.rpc.PageContext;
import com.google.api.gax.rpc.PagedCallSettings;
import com.google.api.gax.rpc.PagedListDescriptor;
import com.google.api.gax.rpc.PagedListResponseFactory;
import com.google.api.gax.rpc.StatusCode;
import com.google.api.gax.rpc.StubSettings;
import com.google.api.gax.rpc.TransportChannelProvider;
import com.google.api.gax.rpc.UnaryCallSettings;
import com.google.api.gax.rpc.UnaryCallable;
import com.google.cloud.resourcemanager.v3.CreateTagValueMetadata;
import com.google.cloud.resourcemanager.v3.CreateTagValueRequest;
import com.google.cloud.resourcemanager.v3.DeleteTagValueMetadata;
import com.google.cloud.resourcemanager.v3.DeleteTagValueRequest;
import com.google.cloud.resourcemanager.v3.GetNamespacedTagValueRequest;
import com.google.cloud.resourcemanager.v3.GetTagValueRequest;
import com.google.cloud.resourcemanager.v3.ListTagValuesRequest;
import com.google.cloud.resourcemanager.v3.ListTagValuesResponse;
import com.google.cloud.resourcemanager.v3.TagValue;
import com.google.cloud.resourcemanager.v3.UpdateTagValueMetadata;
import com.google.cloud.resourcemanager.v3.UpdateTagValueRequest;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.Lists;
import com.google.iam.v1.GetIamPolicyRequest;
import com.google.iam.v1.Policy;
import com.google.iam.v1.SetIamPolicyRequest;
import com.google.iam.v1.TestIamPermissionsRequest;
import com.google.iam.v1.TestIamPermissionsResponse;
import com.google.longrunning.Operation;
import java.io.IOException;
import java.time.Duration;
import java.util.List;
import javax.annotation.Generated;
// AUTO-GENERATED DOCUMENTATION AND CLASS.
/**
* Settings class to configure an instance of {@link TagValuesStub}.
*
* <p>The default instance has everything set to sensible defaults:
*
* <ul>
* <li>The default service address (cloudresourcemanager.googleapis.com) and default port (443)
* are used.
* <li>Credentials are acquired automatically through Application Default Credentials.
* <li>Retries are configured for idempotent methods but not for non-idempotent methods.
* </ul>
*
* <p>The builder of this class is recursive, so contained classes are themselves builders. When
* build() is called, the tree of builders is called to create the complete settings object.
*
* <p>For example, to set the
* [RetrySettings](https://cloud.google.com/java/docs/reference/gax/latest/com.google.api.gax.retrying.RetrySettings)
* of getTagValue:
*
* <pre>{@code
* // This snippet has been automatically generated and should be regarded as a code template only.
* // It will require modifications to work:
* // - It may require correct/in-range values for request initialization.
* // - It may require specifying regional endpoints when creating the service client as shown in
* // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library
* TagValuesStubSettings.Builder tagValuesSettingsBuilder = TagValuesStubSettings.newBuilder();
* tagValuesSettingsBuilder
* .getTagValueSettings()
* .setRetrySettings(
* tagValuesSettingsBuilder
* .getTagValueSettings()
* .getRetrySettings()
* .toBuilder()
* .setInitialRetryDelayDuration(Duration.ofSeconds(1))
* .setInitialRpcTimeoutDuration(Duration.ofSeconds(5))
* .setMaxAttempts(5)
* .setMaxRetryDelayDuration(Duration.ofSeconds(30))
* .setMaxRpcTimeoutDuration(Duration.ofSeconds(60))
* .setRetryDelayMultiplier(1.3)
* .setRpcTimeoutMultiplier(1.5)
* .setTotalTimeoutDuration(Duration.ofSeconds(300))
* .build());
* TagValuesStubSettings tagValuesSettings = tagValuesSettingsBuilder.build();
* }</pre>
*
* Please refer to the [Client Side Retry
* Guide](https://github.com/googleapis/google-cloud-java/blob/main/docs/client_retries.md) for
* additional support in setting retries.
*
* <p>To configure the RetrySettings of a Long Running Operation method, create an
* OperationTimedPollAlgorithm object and update the RPC's polling algorithm. For example, to
* configure the RetrySettings for createTagValue:
*
* <pre>{@code
* // This snippet has been automatically generated and should be regarded as a code template only.
* // It will require modifications to work:
* // - It may require correct/in-range values for request initialization.
* // - It may require specifying regional endpoints when creating the service client as shown in
* // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library
* TagValuesStubSettings.Builder tagValuesSettingsBuilder = TagValuesStubSettings.newBuilder();
* TimedRetryAlgorithm timedRetryAlgorithm =
* OperationalTimedPollAlgorithm.create(
* RetrySettings.newBuilder()
* .setInitialRetryDelayDuration(Duration.ofMillis(500))
* .setRetryDelayMultiplier(1.5)
* .setMaxRetryDelayDuration(Duration.ofMillis(5000))
* .setTotalTimeoutDuration(Duration.ofHours(24))
* .build());
* tagValuesSettingsBuilder
* .createClusterOperationSettings()
* .setPollingAlgorithm(timedRetryAlgorithm)
* .build();
* }</pre>
*/
@Generated("by gapic-generator-java")
public class TagValuesStubSettings extends StubSettings<TagValuesStubSettings> {
/** The default scopes of the service. */
private static final ImmutableList<String> DEFAULT_SERVICE_SCOPES =
ImmutableList.<String>builder()
.add("https://www.googleapis.com/auth/cloud-platform")
.add("https://www.googleapis.com/auth/cloud-platform.read-only")
.build();
private final PagedCallSettings<
ListTagValuesRequest, ListTagValuesResponse, ListTagValuesPagedResponse>
listTagValuesSettings;
private final UnaryCallSettings<GetTagValueRequest, TagValue> getTagValueSettings;
private final UnaryCallSettings<GetNamespacedTagValueRequest, TagValue>
getNamespacedTagValueSettings;
private final UnaryCallSettings<CreateTagValueRequest, Operation> createTagValueSettings;
private final OperationCallSettings<CreateTagValueRequest, TagValue, CreateTagValueMetadata>
createTagValueOperationSettings;
private final UnaryCallSettings<UpdateTagValueRequest, Operation> updateTagValueSettings;
private final OperationCallSettings<UpdateTagValueRequest, TagValue, UpdateTagValueMetadata>
updateTagValueOperationSettings;
private final UnaryCallSettings<DeleteTagValueRequest, Operation> deleteTagValueSettings;
private final OperationCallSettings<DeleteTagValueRequest, TagValue, DeleteTagValueMetadata>
deleteTagValueOperationSettings;
private final UnaryCallSettings<GetIamPolicyRequest, Policy> getIamPolicySettings;
private final UnaryCallSettings<SetIamPolicyRequest, Policy> setIamPolicySettings;
private final UnaryCallSettings<TestIamPermissionsRequest, TestIamPermissionsResponse>
testIamPermissionsSettings;
private static final PagedListDescriptor<ListTagValuesRequest, ListTagValuesResponse, TagValue>
LIST_TAG_VALUES_PAGE_STR_DESC =
new PagedListDescriptor<ListTagValuesRequest, ListTagValuesResponse, TagValue>() {
@Override
public String emptyToken() {
return "";
}
@Override
public ListTagValuesRequest injectToken(ListTagValuesRequest payload, String token) {
return ListTagValuesRequest.newBuilder(payload).setPageToken(token).build();
}
@Override
public ListTagValuesRequest injectPageSize(ListTagValuesRequest payload, int pageSize) {
return ListTagValuesRequest.newBuilder(payload).setPageSize(pageSize).build();
}
@Override
public Integer extractPageSize(ListTagValuesRequest payload) {
return payload.getPageSize();
}
@Override
public String extractNextToken(ListTagValuesResponse payload) {
return payload.getNextPageToken();
}
@Override
public Iterable<TagValue> extractResources(ListTagValuesResponse payload) {
return payload.getTagValuesList();
}
};
private static final PagedListResponseFactory<
ListTagValuesRequest, ListTagValuesResponse, ListTagValuesPagedResponse>
LIST_TAG_VALUES_PAGE_STR_FACT =
new PagedListResponseFactory<
ListTagValuesRequest, ListTagValuesResponse, ListTagValuesPagedResponse>() {
@Override
public ApiFuture<ListTagValuesPagedResponse> getFuturePagedResponse(
UnaryCallable<ListTagValuesRequest, ListTagValuesResponse> callable,
ListTagValuesRequest request,
ApiCallContext context,
ApiFuture<ListTagValuesResponse> futureResponse) {
PageContext<ListTagValuesRequest, ListTagValuesResponse, TagValue> pageContext =
PageContext.create(callable, LIST_TAG_VALUES_PAGE_STR_DESC, request, context);
return ListTagValuesPagedResponse.createAsync(pageContext, futureResponse);
}
};
/** Returns the object with the settings used for calls to listTagValues. */
public PagedCallSettings<ListTagValuesRequest, ListTagValuesResponse, ListTagValuesPagedResponse>
listTagValuesSettings() {
return listTagValuesSettings;
}
/** Returns the object with the settings used for calls to getTagValue. */
public UnaryCallSettings<GetTagValueRequest, TagValue> getTagValueSettings() {
return getTagValueSettings;
}
/** Returns the object with the settings used for calls to getNamespacedTagValue. */
public UnaryCallSettings<GetNamespacedTagValueRequest, TagValue> getNamespacedTagValueSettings() {
return getNamespacedTagValueSettings;
}
/** Returns the object with the settings used for calls to createTagValue. */
public UnaryCallSettings<CreateTagValueRequest, Operation> createTagValueSettings() {
return createTagValueSettings;
}
/** Returns the object with the settings used for calls to createTagValue. */
public OperationCallSettings<CreateTagValueRequest, TagValue, CreateTagValueMetadata>
createTagValueOperationSettings() {
return createTagValueOperationSettings;
}
/** Returns the object with the settings used for calls to updateTagValue. */
public UnaryCallSettings<UpdateTagValueRequest, Operation> updateTagValueSettings() {
return updateTagValueSettings;
}
/** Returns the object with the settings used for calls to updateTagValue. */
public OperationCallSettings<UpdateTagValueRequest, TagValue, UpdateTagValueMetadata>
updateTagValueOperationSettings() {
return updateTagValueOperationSettings;
}
/** Returns the object with the settings used for calls to deleteTagValue. */
public UnaryCallSettings<DeleteTagValueRequest, Operation> deleteTagValueSettings() {
return deleteTagValueSettings;
}
/** Returns the object with the settings used for calls to deleteTagValue. */
public OperationCallSettings<DeleteTagValueRequest, TagValue, DeleteTagValueMetadata>
deleteTagValueOperationSettings() {
return deleteTagValueOperationSettings;
}
/** Returns the object with the settings used for calls to getIamPolicy. */
public UnaryCallSettings<GetIamPolicyRequest, Policy> getIamPolicySettings() {
return getIamPolicySettings;
}
/** Returns the object with the settings used for calls to setIamPolicy. */
public UnaryCallSettings<SetIamPolicyRequest, Policy> setIamPolicySettings() {
return setIamPolicySettings;
}
/** Returns the object with the settings used for calls to testIamPermissions. */
public UnaryCallSettings<TestIamPermissionsRequest, TestIamPermissionsResponse>
testIamPermissionsSettings() {
return testIamPermissionsSettings;
}
public TagValuesStub createStub() throws IOException {
if (getTransportChannelProvider()
.getTransportName()
.equals(GrpcTransportChannel.getGrpcTransportName())) {
return GrpcTagValuesStub.create(this);
}
if (getTransportChannelProvider()
.getTransportName()
.equals(HttpJsonTransportChannel.getHttpJsonTransportName())) {
return HttpJsonTagValuesStub.create(this);
}
throw new UnsupportedOperationException(
String.format(
"Transport not supported: %s", getTransportChannelProvider().getTransportName()));
}
/** Returns the default service name. */
@Override
public String getServiceName() {
return "cloudresourcemanager";
}
/** Returns a builder for the default ExecutorProvider for this service. */
public static InstantiatingExecutorProvider.Builder defaultExecutorProviderBuilder() {
return InstantiatingExecutorProvider.newBuilder();
}
/** Returns the default service endpoint. */
@ObsoleteApi("Use getEndpoint() instead")
public static String getDefaultEndpoint() {
return "cloudresourcemanager.googleapis.com:443";
}
/** Returns the default mTLS service endpoint. */
public static String getDefaultMtlsEndpoint() {
return "cloudresourcemanager.mtls.googleapis.com:443";
}
/** Returns the default service scopes. */
public static List<String> getDefaultServiceScopes() {
return DEFAULT_SERVICE_SCOPES;
}
/** Returns a builder for the default credentials for this service. */
public static GoogleCredentialsProvider.Builder defaultCredentialsProviderBuilder() {
return GoogleCredentialsProvider.newBuilder()
.setScopesToApply(DEFAULT_SERVICE_SCOPES)
.setUseJwtAccessWithScope(true);
}
/** Returns a builder for the default gRPC ChannelProvider for this service. */
public static InstantiatingGrpcChannelProvider.Builder defaultGrpcTransportProviderBuilder() {
return InstantiatingGrpcChannelProvider.newBuilder()
.setMaxInboundMessageSize(Integer.MAX_VALUE);
}
/** Returns a builder for the default REST ChannelProvider for this service. */
@BetaApi
public static InstantiatingHttpJsonChannelProvider.Builder
defaultHttpJsonTransportProviderBuilder() {
return InstantiatingHttpJsonChannelProvider.newBuilder();
}
public static TransportChannelProvider defaultTransportChannelProvider() {
return defaultGrpcTransportProviderBuilder().build();
}
public static ApiClientHeaderProvider.Builder defaultGrpcApiClientHeaderProviderBuilder() {
return ApiClientHeaderProvider.newBuilder()
.setGeneratedLibToken("gapic", GaxProperties.getLibraryVersion(TagValuesStubSettings.class))
.setTransportToken(
GaxGrpcProperties.getGrpcTokenName(), GaxGrpcProperties.getGrpcVersion());
}
public static ApiClientHeaderProvider.Builder defaultHttpJsonApiClientHeaderProviderBuilder() {
return ApiClientHeaderProvider.newBuilder()
.setGeneratedLibToken("gapic", GaxProperties.getLibraryVersion(TagValuesStubSettings.class))
.setTransportToken(
GaxHttpJsonProperties.getHttpJsonTokenName(),
GaxHttpJsonProperties.getHttpJsonVersion());
}
public static ApiClientHeaderProvider.Builder defaultApiClientHeaderProviderBuilder() {
return TagValuesStubSettings.defaultGrpcApiClientHeaderProviderBuilder();
}
/** Returns a new gRPC builder for this class. */
public static Builder newBuilder() {
return Builder.createDefault();
}
/** Returns a new REST builder for this class. */
public static Builder newHttpJsonBuilder() {
return Builder.createHttpJsonDefault();
}
/** Returns a new builder for this class. */
public static Builder newBuilder(ClientContext clientContext) {
return new Builder(clientContext);
}
/** Returns a builder containing all the values of this settings class. */
public Builder toBuilder() {
return new Builder(this);
}
protected TagValuesStubSettings(Builder settingsBuilder) throws IOException {
super(settingsBuilder);
listTagValuesSettings = settingsBuilder.listTagValuesSettings().build();
getTagValueSettings = settingsBuilder.getTagValueSettings().build();
getNamespacedTagValueSettings = settingsBuilder.getNamespacedTagValueSettings().build();
createTagValueSettings = settingsBuilder.createTagValueSettings().build();
createTagValueOperationSettings = settingsBuilder.createTagValueOperationSettings().build();
updateTagValueSettings = settingsBuilder.updateTagValueSettings().build();
updateTagValueOperationSettings = settingsBuilder.updateTagValueOperationSettings().build();
deleteTagValueSettings = settingsBuilder.deleteTagValueSettings().build();
deleteTagValueOperationSettings = settingsBuilder.deleteTagValueOperationSettings().build();
getIamPolicySettings = settingsBuilder.getIamPolicySettings().build();
setIamPolicySettings = settingsBuilder.setIamPolicySettings().build();
testIamPermissionsSettings = settingsBuilder.testIamPermissionsSettings().build();
}
/** Builder for TagValuesStubSettings. */
public static class Builder extends StubSettings.Builder<TagValuesStubSettings, Builder> {
private final ImmutableList<UnaryCallSettings.Builder<?, ?>> unaryMethodSettingsBuilders;
private final PagedCallSettings.Builder<
ListTagValuesRequest, ListTagValuesResponse, ListTagValuesPagedResponse>
listTagValuesSettings;
private final UnaryCallSettings.Builder<GetTagValueRequest, TagValue> getTagValueSettings;
private final UnaryCallSettings.Builder<GetNamespacedTagValueRequest, TagValue>
getNamespacedTagValueSettings;
private final UnaryCallSettings.Builder<CreateTagValueRequest, Operation>
createTagValueSettings;
private final OperationCallSettings.Builder<
CreateTagValueRequest, TagValue, CreateTagValueMetadata>
createTagValueOperationSettings;
private final UnaryCallSettings.Builder<UpdateTagValueRequest, Operation>
updateTagValueSettings;
private final OperationCallSettings.Builder<
UpdateTagValueRequest, TagValue, UpdateTagValueMetadata>
updateTagValueOperationSettings;
private final UnaryCallSettings.Builder<DeleteTagValueRequest, Operation>
deleteTagValueSettings;
private final OperationCallSettings.Builder<
DeleteTagValueRequest, TagValue, DeleteTagValueMetadata>
deleteTagValueOperationSettings;
private final UnaryCallSettings.Builder<GetIamPolicyRequest, Policy> getIamPolicySettings;
private final UnaryCallSettings.Builder<SetIamPolicyRequest, Policy> setIamPolicySettings;
private final UnaryCallSettings.Builder<TestIamPermissionsRequest, TestIamPermissionsResponse>
testIamPermissionsSettings;
private static final ImmutableMap<String, ImmutableSet<StatusCode.Code>>
RETRYABLE_CODE_DEFINITIONS;
static {
ImmutableMap.Builder<String, ImmutableSet<StatusCode.Code>> definitions =
ImmutableMap.builder();
definitions.put(
"retry_policy_0_codes",
ImmutableSet.copyOf(Lists.<StatusCode.Code>newArrayList(StatusCode.Code.UNAVAILABLE)));
definitions.put("no_retry_codes", ImmutableSet.copyOf(Lists.<StatusCode.Code>newArrayList()));
definitions.put(
"no_retry_1_codes", ImmutableSet.copyOf(Lists.<StatusCode.Code>newArrayList()));
RETRYABLE_CODE_DEFINITIONS = definitions.build();
}
private static final ImmutableMap<String, RetrySettings> RETRY_PARAM_DEFINITIONS;
static {
ImmutableMap.Builder<String, RetrySettings> definitions = ImmutableMap.builder();
RetrySettings settings = null;
settings =
RetrySettings.newBuilder()
.setInitialRetryDelayDuration(Duration.ofMillis(100L))
.setRetryDelayMultiplier(1.3)
.setMaxRetryDelayDuration(Duration.ofMillis(60000L))
.setInitialRpcTimeoutDuration(Duration.ofMillis(60000L))
.setRpcTimeoutMultiplier(1.0)
.setMaxRpcTimeoutDuration(Duration.ofMillis(60000L))
.setTotalTimeoutDuration(Duration.ofMillis(60000L))
.build();
definitions.put("retry_policy_0_params", settings);
settings = RetrySettings.newBuilder().setRpcTimeoutMultiplier(1.0).build();
definitions.put("no_retry_params", settings);
settings =
RetrySettings.newBuilder()
.setInitialRpcTimeoutDuration(Duration.ofMillis(60000L))
.setRpcTimeoutMultiplier(1.0)
.setMaxRpcTimeoutDuration(Duration.ofMillis(60000L))
.setTotalTimeoutDuration(Duration.ofMillis(60000L))
.build();
definitions.put("no_retry_1_params", settings);
RETRY_PARAM_DEFINITIONS = definitions.build();
}
protected Builder() {
this(((ClientContext) null));
}
protected Builder(ClientContext clientContext) {
super(clientContext);
listTagValuesSettings = PagedCallSettings.newBuilder(LIST_TAG_VALUES_PAGE_STR_FACT);
getTagValueSettings = UnaryCallSettings.newUnaryCallSettingsBuilder();
getNamespacedTagValueSettings = UnaryCallSettings.newUnaryCallSettingsBuilder();
createTagValueSettings = UnaryCallSettings.newUnaryCallSettingsBuilder();
createTagValueOperationSettings = OperationCallSettings.newBuilder();
updateTagValueSettings = UnaryCallSettings.newUnaryCallSettingsBuilder();
updateTagValueOperationSettings = OperationCallSettings.newBuilder();
deleteTagValueSettings = UnaryCallSettings.newUnaryCallSettingsBuilder();
deleteTagValueOperationSettings = OperationCallSettings.newBuilder();
getIamPolicySettings = UnaryCallSettings.newUnaryCallSettingsBuilder();
setIamPolicySettings = UnaryCallSettings.newUnaryCallSettingsBuilder();
testIamPermissionsSettings = UnaryCallSettings.newUnaryCallSettingsBuilder();
unaryMethodSettingsBuilders =
ImmutableList.<UnaryCallSettings.Builder<?, ?>>of(
listTagValuesSettings,
getTagValueSettings,
getNamespacedTagValueSettings,
createTagValueSettings,
updateTagValueSettings,
deleteTagValueSettings,
getIamPolicySettings,
setIamPolicySettings,
testIamPermissionsSettings);
initDefaults(this);
}
protected Builder(TagValuesStubSettings settings) {
super(settings);
listTagValuesSettings = settings.listTagValuesSettings.toBuilder();
getTagValueSettings = settings.getTagValueSettings.toBuilder();
getNamespacedTagValueSettings = settings.getNamespacedTagValueSettings.toBuilder();
createTagValueSettings = settings.createTagValueSettings.toBuilder();
createTagValueOperationSettings = settings.createTagValueOperationSettings.toBuilder();
updateTagValueSettings = settings.updateTagValueSettings.toBuilder();
updateTagValueOperationSettings = settings.updateTagValueOperationSettings.toBuilder();
deleteTagValueSettings = settings.deleteTagValueSettings.toBuilder();
deleteTagValueOperationSettings = settings.deleteTagValueOperationSettings.toBuilder();
getIamPolicySettings = settings.getIamPolicySettings.toBuilder();
setIamPolicySettings = settings.setIamPolicySettings.toBuilder();
testIamPermissionsSettings = settings.testIamPermissionsSettings.toBuilder();
unaryMethodSettingsBuilders =
ImmutableList.<UnaryCallSettings.Builder<?, ?>>of(
listTagValuesSettings,
getTagValueSettings,
getNamespacedTagValueSettings,
createTagValueSettings,
updateTagValueSettings,
deleteTagValueSettings,
getIamPolicySettings,
setIamPolicySettings,
testIamPermissionsSettings);
}
private static Builder createDefault() {
Builder builder = new Builder(((ClientContext) null));
builder.setTransportChannelProvider(defaultTransportChannelProvider());
builder.setCredentialsProvider(defaultCredentialsProviderBuilder().build());
builder.setInternalHeaderProvider(defaultApiClientHeaderProviderBuilder().build());
builder.setMtlsEndpoint(getDefaultMtlsEndpoint());
builder.setSwitchToMtlsEndpointAllowed(true);
return initDefaults(builder);
}
private static Builder createHttpJsonDefault() {
Builder builder = new Builder(((ClientContext) null));
builder.setTransportChannelProvider(defaultHttpJsonTransportProviderBuilder().build());
builder.setCredentialsProvider(defaultCredentialsProviderBuilder().build());
builder.setInternalHeaderProvider(defaultHttpJsonApiClientHeaderProviderBuilder().build());
builder.setMtlsEndpoint(getDefaultMtlsEndpoint());
builder.setSwitchToMtlsEndpointAllowed(true);
return initDefaults(builder);
}
private static Builder initDefaults(Builder builder) {
builder
.listTagValuesSettings()
.setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_0_codes"))
.setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_0_params"));
builder
.getTagValueSettings()
.setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_0_codes"))
.setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_0_params"));
builder
.getNamespacedTagValueSettings()
.setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("no_retry_codes"))
.setRetrySettings(RETRY_PARAM_DEFINITIONS.get("no_retry_params"));
builder
.createTagValueSettings()
.setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("no_retry_1_codes"))
.setRetrySettings(RETRY_PARAM_DEFINITIONS.get("no_retry_1_params"));
builder
.updateTagValueSettings()
.setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("no_retry_1_codes"))
.setRetrySettings(RETRY_PARAM_DEFINITIONS.get("no_retry_1_params"));
builder
.deleteTagValueSettings()
.setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("no_retry_1_codes"))
.setRetrySettings(RETRY_PARAM_DEFINITIONS.get("no_retry_1_params"));
builder
.getIamPolicySettings()
.setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_0_codes"))
.setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_0_params"));
builder
.setIamPolicySettings()
.setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("no_retry_1_codes"))
.setRetrySettings(RETRY_PARAM_DEFINITIONS.get("no_retry_1_params"));
builder
.testIamPermissionsSettings()
.setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("no_retry_codes"))
.setRetrySettings(RETRY_PARAM_DEFINITIONS.get("no_retry_params"));
builder
.createTagValueOperationSettings()
.setInitialCallSettings(
UnaryCallSettings
.<CreateTagValueRequest, OperationSnapshot>newUnaryCallSettingsBuilder()
.setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("no_retry_1_codes"))
.setRetrySettings(RETRY_PARAM_DEFINITIONS.get("no_retry_1_params"))
.build())
.setResponseTransformer(
ProtoOperationTransformers.ResponseTransformer.create(TagValue.class))
.setMetadataTransformer(
ProtoOperationTransformers.MetadataTransformer.create(CreateTagValueMetadata.class))
.setPollingAlgorithm(
OperationTimedPollAlgorithm.create(
RetrySettings.newBuilder()
.setInitialRetryDelayDuration(Duration.ofMillis(5000L))
.setRetryDelayMultiplier(1.5)
.setMaxRetryDelayDuration(Duration.ofMillis(45000L))
.setInitialRpcTimeoutDuration(Duration.ZERO)
.setRpcTimeoutMultiplier(1.0)
.setMaxRpcTimeoutDuration(Duration.ZERO)
.setTotalTimeoutDuration(Duration.ofMillis(300000L))
.build()));
builder
.updateTagValueOperationSettings()
.setInitialCallSettings(
UnaryCallSettings
.<UpdateTagValueRequest, OperationSnapshot>newUnaryCallSettingsBuilder()
.setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("no_retry_1_codes"))
.setRetrySettings(RETRY_PARAM_DEFINITIONS.get("no_retry_1_params"))
.build())
.setResponseTransformer(
ProtoOperationTransformers.ResponseTransformer.create(TagValue.class))
.setMetadataTransformer(
ProtoOperationTransformers.MetadataTransformer.create(UpdateTagValueMetadata.class))
.setPollingAlgorithm(
OperationTimedPollAlgorithm.create(
RetrySettings.newBuilder()
.setInitialRetryDelayDuration(Duration.ofMillis(5000L))
.setRetryDelayMultiplier(1.5)
.setMaxRetryDelayDuration(Duration.ofMillis(45000L))
.setInitialRpcTimeoutDuration(Duration.ZERO)
.setRpcTimeoutMultiplier(1.0)
.setMaxRpcTimeoutDuration(Duration.ZERO)
.setTotalTimeoutDuration(Duration.ofMillis(300000L))
.build()));
builder
.deleteTagValueOperationSettings()
.setInitialCallSettings(
UnaryCallSettings
.<DeleteTagValueRequest, OperationSnapshot>newUnaryCallSettingsBuilder()
.setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("no_retry_1_codes"))
.setRetrySettings(RETRY_PARAM_DEFINITIONS.get("no_retry_1_params"))
.build())
.setResponseTransformer(
ProtoOperationTransformers.ResponseTransformer.create(TagValue.class))
.setMetadataTransformer(
ProtoOperationTransformers.MetadataTransformer.create(DeleteTagValueMetadata.class))
.setPollingAlgorithm(
OperationTimedPollAlgorithm.create(
RetrySettings.newBuilder()
.setInitialRetryDelayDuration(Duration.ofMillis(5000L))
.setRetryDelayMultiplier(1.5)
.setMaxRetryDelayDuration(Duration.ofMillis(45000L))
.setInitialRpcTimeoutDuration(Duration.ZERO)
.setRpcTimeoutMultiplier(1.0)
.setMaxRpcTimeoutDuration(Duration.ZERO)
.setTotalTimeoutDuration(Duration.ofMillis(300000L))
.build()));
return builder;
}
/**
* Applies the given settings updater function to all of the unary API methods in this service.
*
* <p>Note: This method does not support applying settings to streaming methods.
*/
public Builder applyToAllUnaryMethods(
ApiFunction<UnaryCallSettings.Builder<?, ?>, Void> settingsUpdater) {
super.applyToAllUnaryMethods(unaryMethodSettingsBuilders, settingsUpdater);
return this;
}
public ImmutableList<UnaryCallSettings.Builder<?, ?>> unaryMethodSettingsBuilders() {
return unaryMethodSettingsBuilders;
}
/** Returns the builder for the settings used for calls to listTagValues. */
public PagedCallSettings.Builder<
ListTagValuesRequest, ListTagValuesResponse, ListTagValuesPagedResponse>
listTagValuesSettings() {
return listTagValuesSettings;
}
/** Returns the builder for the settings used for calls to getTagValue. */
public UnaryCallSettings.Builder<GetTagValueRequest, TagValue> getTagValueSettings() {
return getTagValueSettings;
}
/** Returns the builder for the settings used for calls to getNamespacedTagValue. */
public UnaryCallSettings.Builder<GetNamespacedTagValueRequest, TagValue>
getNamespacedTagValueSettings() {
return getNamespacedTagValueSettings;
}
/** Returns the builder for the settings used for calls to createTagValue. */
public UnaryCallSettings.Builder<CreateTagValueRequest, Operation> createTagValueSettings() {
return createTagValueSettings;
}
/** Returns the builder for the settings used for calls to createTagValue. */
public OperationCallSettings.Builder<CreateTagValueRequest, TagValue, CreateTagValueMetadata>
createTagValueOperationSettings() {
return createTagValueOperationSettings;
}
/** Returns the builder for the settings used for calls to updateTagValue. */
public UnaryCallSettings.Builder<UpdateTagValueRequest, Operation> updateTagValueSettings() {
return updateTagValueSettings;
}
/** Returns the builder for the settings used for calls to updateTagValue. */
public OperationCallSettings.Builder<UpdateTagValueRequest, TagValue, UpdateTagValueMetadata>
updateTagValueOperationSettings() {
return updateTagValueOperationSettings;
}
/** Returns the builder for the settings used for calls to deleteTagValue. */
public UnaryCallSettings.Builder<DeleteTagValueRequest, Operation> deleteTagValueSettings() {
return deleteTagValueSettings;
}
/** Returns the builder for the settings used for calls to deleteTagValue. */
public OperationCallSettings.Builder<DeleteTagValueRequest, TagValue, DeleteTagValueMetadata>
deleteTagValueOperationSettings() {
return deleteTagValueOperationSettings;
}
/** Returns the builder for the settings used for calls to getIamPolicy. */
public UnaryCallSettings.Builder<GetIamPolicyRequest, Policy> getIamPolicySettings() {
return getIamPolicySettings;
}
/** Returns the builder for the settings used for calls to setIamPolicy. */
public UnaryCallSettings.Builder<SetIamPolicyRequest, Policy> setIamPolicySettings() {
return setIamPolicySettings;
}
/** Returns the builder for the settings used for calls to testIamPermissions. */
public UnaryCallSettings.Builder<TestIamPermissionsRequest, TestIamPermissionsResponse>
testIamPermissionsSettings() {
return testIamPermissionsSettings;
}
@Override
public TagValuesStubSettings build() throws IOException {
return new TagValuesStubSettings(this);
}
}
}
|
apache/plc4x | 36,507 | plc4j/drivers/profinet-ng/src/main/generated/org/apache/plc4x/java/profinet/readwrite/DataItem.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.plc4x.java.profinet.readwrite;
import static org.apache.plc4x.java.spi.codegen.fields.FieldReaderFactory.*;
import static org.apache.plc4x.java.spi.codegen.fields.FieldWriterFactory.*;
import static org.apache.plc4x.java.spi.codegen.io.DataReaderFactory.*;
import static org.apache.plc4x.java.spi.codegen.io.DataWriterFactory.*;
import static org.apache.plc4x.java.spi.generation.StaticHelper.*;
import java.math.BigInteger;
import java.time.*;
import java.util.*;
import java.util.stream.Collectors;
import org.apache.plc4x.java.api.exceptions.*;
import org.apache.plc4x.java.api.value.*;
import org.apache.plc4x.java.spi.codegen.*;
import org.apache.plc4x.java.spi.codegen.fields.*;
import org.apache.plc4x.java.spi.codegen.io.*;
import org.apache.plc4x.java.spi.generation.*;
import org.apache.plc4x.java.spi.values.*;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
// Code generated by code-generation. DO NOT EDIT.
public class DataItem {
private static final Logger LOGGER = LoggerFactory.getLogger(DataItem.class);
public static PlcValue staticParse(
ReadBuffer readBuffer, ProfinetDataType dataType, Integer numberOfValues)
throws ParseException {
if (EvaluationHelper.equals(dataType, ProfinetDataType.BOOL)
&& EvaluationHelper.equals(numberOfValues, (int) 1)) { // BOOL
boolean value = readSimpleField("value", readBoolean(readBuffer));
return new PlcBOOL(value);
} else if (EvaluationHelper.equals(dataType, ProfinetDataType.BOOL)) { // List
List<Boolean> _value = readCountArrayField("value", readBoolean(readBuffer), numberOfValues);
List<PlcValue> value = new ArrayList<>(_value.size());
for (boolean _item : _value) {
value.add(new PlcBOOL(_item));
}
return new PlcList(value);
} else if (EvaluationHelper.equals(dataType, ProfinetDataType.OCTETSTRING)
&& EvaluationHelper.equals(numberOfValues, (int) 1)) { // USINT
short value = readSimpleField("value", readUnsignedShort(readBuffer, 8));
return new PlcUSINT(value);
} else if (EvaluationHelper.equals(dataType, ProfinetDataType.OCTETSTRING)) { // List
List<Short> _value =
readCountArrayField("value", readUnsignedShort(readBuffer, 8), numberOfValues);
List<PlcValue> value = new ArrayList<>(_value.size());
for (short _item : _value) {
value.add(new PlcUSINT(_item));
}
return new PlcList(value);
} else if (EvaluationHelper.equals(dataType, ProfinetDataType.BYTE)
&& EvaluationHelper.equals(numberOfValues, (int) 1)) { // BYTE
short value = readSimpleField("value", readUnsignedShort(readBuffer, 8));
return new PlcBYTE(value);
} else if (EvaluationHelper.equals(dataType, ProfinetDataType.BYTE)) { // List
List<Boolean> _value =
readCountArrayField("value", readBoolean(readBuffer), (numberOfValues) * (8));
List<PlcValue> value = new ArrayList<>(_value.size());
for (boolean _item : _value) {
value.add(new PlcBOOL(_item));
}
return new PlcList(value);
} else if (EvaluationHelper.equals(dataType, ProfinetDataType.WORD)) { // WORD
int value = readSimpleField("value", readUnsignedInt(readBuffer, 16));
return new PlcWORD(value);
} else if (EvaluationHelper.equals(dataType, ProfinetDataType.DWORD)) { // DWORD
long value = readSimpleField("value", readUnsignedLong(readBuffer, 32));
return new PlcDWORD(value);
} else if (EvaluationHelper.equals(dataType, ProfinetDataType.LWORD)) { // LWORD
BigInteger value = readSimpleField("value", readUnsignedBigInteger(readBuffer, 64));
return new PlcLWORD(value);
} else if (EvaluationHelper.equals(dataType, ProfinetDataType.SINT)
&& EvaluationHelper.equals(numberOfValues, (int) 1)) { // SINT
byte value = readSimpleField("value", readSignedByte(readBuffer, 8));
return new PlcSINT(value);
} else if (EvaluationHelper.equals(dataType, ProfinetDataType.SINT)) { // List
List<Byte> _value =
readCountArrayField("value", readSignedByte(readBuffer, 8), numberOfValues);
List<PlcValue> value = new ArrayList<>(_value.size());
for (byte _item : _value) {
value.add(new PlcSINT(_item));
}
return new PlcList(value);
} else if (EvaluationHelper.equals(dataType, ProfinetDataType.INT)
&& EvaluationHelper.equals(numberOfValues, (int) 1)) { // INT
short value = readSimpleField("value", readSignedShort(readBuffer, 16));
return new PlcINT(value);
} else if (EvaluationHelper.equals(dataType, ProfinetDataType.INT)) { // List
List<Short> _value =
readCountArrayField("value", readSignedShort(readBuffer, 16), numberOfValues);
List<PlcValue> value = new ArrayList<>(_value.size());
for (short _item : _value) {
value.add(new PlcINT(_item));
}
return new PlcList(value);
} else if (EvaluationHelper.equals(dataType, ProfinetDataType.DINT)
&& EvaluationHelper.equals(numberOfValues, (int) 1)) { // DINT
int value = readSimpleField("value", readSignedInt(readBuffer, 32));
return new PlcDINT(value);
} else if (EvaluationHelper.equals(dataType, ProfinetDataType.DINT)) { // List
List<Integer> _value =
readCountArrayField("value", readSignedInt(readBuffer, 32), numberOfValues);
List<PlcValue> value = new ArrayList<>(_value.size());
for (int _item : _value) {
value.add(new PlcDINT(_item));
}
return new PlcList(value);
} else if (EvaluationHelper.equals(dataType, ProfinetDataType.LINT)
&& EvaluationHelper.equals(numberOfValues, (int) 1)) { // LINT
long value = readSimpleField("value", readSignedLong(readBuffer, 64));
return new PlcLINT(value);
} else if (EvaluationHelper.equals(dataType, ProfinetDataType.LINT)) { // List
List<Long> _value =
readCountArrayField("value", readSignedLong(readBuffer, 64), numberOfValues);
List<PlcValue> value = new ArrayList<>(_value.size());
for (long _item : _value) {
value.add(new PlcLINT(_item));
}
return new PlcList(value);
} else if (EvaluationHelper.equals(dataType, ProfinetDataType.USINT)
&& EvaluationHelper.equals(numberOfValues, (int) 1)) { // USINT
short value = readSimpleField("value", readUnsignedShort(readBuffer, 8));
return new PlcUSINT(value);
} else if (EvaluationHelper.equals(dataType, ProfinetDataType.USINT)) { // List
List<Short> _value =
readCountArrayField("value", readUnsignedShort(readBuffer, 8), numberOfValues);
List<PlcValue> value = new ArrayList<>(_value.size());
for (short _item : _value) {
value.add(new PlcUSINT(_item));
}
return new PlcList(value);
} else if (EvaluationHelper.equals(dataType, ProfinetDataType.UINT)
&& EvaluationHelper.equals(numberOfValues, (int) 1)) { // UINT
int value = readSimpleField("value", readUnsignedInt(readBuffer, 16));
return new PlcUINT(value);
} else if (EvaluationHelper.equals(dataType, ProfinetDataType.UINT)) { // List
List<Integer> _value =
readCountArrayField("value", readUnsignedInt(readBuffer, 16), numberOfValues);
List<PlcValue> value = new ArrayList<>(_value.size());
for (int _item : _value) {
value.add(new PlcUINT(_item));
}
return new PlcList(value);
} else if (EvaluationHelper.equals(dataType, ProfinetDataType.UDINT)
&& EvaluationHelper.equals(numberOfValues, (int) 1)) { // UDINT
long value = readSimpleField("value", readUnsignedLong(readBuffer, 32));
return new PlcUDINT(value);
} else if (EvaluationHelper.equals(dataType, ProfinetDataType.UDINT)) { // List
List<Long> _value =
readCountArrayField("value", readUnsignedLong(readBuffer, 32), numberOfValues);
List<PlcValue> value = new ArrayList<>(_value.size());
for (long _item : _value) {
value.add(new PlcUDINT(_item));
}
return new PlcList(value);
} else if (EvaluationHelper.equals(dataType, ProfinetDataType.ULINT)
&& EvaluationHelper.equals(numberOfValues, (int) 1)) { // ULINT
BigInteger value = readSimpleField("value", readUnsignedBigInteger(readBuffer, 64));
return new PlcULINT(value);
} else if (EvaluationHelper.equals(dataType, ProfinetDataType.ULINT)) { // List
List<BigInteger> _value =
readCountArrayField("value", readUnsignedBigInteger(readBuffer, 64), numberOfValues);
List<PlcValue> value = new ArrayList<>(_value.size());
for (BigInteger _item : _value) {
value.add(new PlcULINT(_item));
}
return new PlcList(value);
} else if (EvaluationHelper.equals(dataType, ProfinetDataType.REAL)
&& EvaluationHelper.equals(numberOfValues, (int) 1)) { // REAL
float value = readSimpleField("value", readFloat(readBuffer, 32));
return new PlcREAL(value);
} else if (EvaluationHelper.equals(dataType, ProfinetDataType.REAL)) { // List
List<Float> _value = readCountArrayField("value", readFloat(readBuffer, 32), numberOfValues);
List<PlcValue> value = new ArrayList<>(_value.size());
for (float _item : _value) {
value.add(new PlcREAL(_item));
}
return new PlcList(value);
} else if (EvaluationHelper.equals(dataType, ProfinetDataType.LREAL)
&& EvaluationHelper.equals(numberOfValues, (int) 1)) { // LREAL
double value = readSimpleField("value", readDouble(readBuffer, 64));
return new PlcLREAL(value);
} else if (EvaluationHelper.equals(dataType, ProfinetDataType.LREAL)) { // List
List<Double> _value =
readCountArrayField("value", readDouble(readBuffer, 64), numberOfValues);
List<PlcValue> value = new ArrayList<>(_value.size());
for (double _item : _value) {
value.add(new PlcLREAL(_item));
}
return new PlcList(value);
} else if (EvaluationHelper.equals(dataType, ProfinetDataType.CHAR)
&& EvaluationHelper.equals(numberOfValues, (int) 1)) { // CHAR
String value =
readSimpleField("value", readString(readBuffer, 8), WithOption.WithEncoding("UTF-8"));
return new PlcCHAR(value);
} else if (EvaluationHelper.equals(dataType, ProfinetDataType.CHAR)) { // List
List<String> _value =
readCountArrayField(
"value", readString(readBuffer, 8), numberOfValues, WithOption.WithEncoding("UTF-8"));
List<PlcValue> value = new ArrayList<>(_value.size());
for (String _item : _value) {
value.add(new PlcSTRING(_item));
}
return new PlcList(value);
} else if (EvaluationHelper.equals(dataType, ProfinetDataType.WCHAR)
&& EvaluationHelper.equals(numberOfValues, (int) 1)) { // WCHAR
String value =
readSimpleField("value", readString(readBuffer, 16), WithOption.WithEncoding("UTF-16"));
return new PlcWCHAR(value);
} else if (EvaluationHelper.equals(dataType, ProfinetDataType.WCHAR)) { // List
List<String> _value =
readCountArrayField(
"value",
readString(readBuffer, 16),
numberOfValues,
WithOption.WithEncoding("UTF-16"));
List<PlcValue> value = new ArrayList<>(_value.size());
for (String _item : _value) {
value.add(new PlcSTRING(_item));
}
return new PlcList(value);
} else if (EvaluationHelper.equals(dataType, ProfinetDataType.UNICODESTRING8)
&& EvaluationHelper.equals(numberOfValues, (int) 1)) { // CHAR
String value =
readSimpleField("value", readString(readBuffer, 8), WithOption.WithEncoding("UTF-8"));
return new PlcCHAR(value);
} else if (EvaluationHelper.equals(dataType, ProfinetDataType.UNICODESTRING8)) { // List
List<String> _value =
readCountArrayField(
"value", readString(readBuffer, 8), numberOfValues, WithOption.WithEncoding("UTF-8"));
List<PlcValue> value = new ArrayList<>(_value.size());
for (String _item : _value) {
value.add(new PlcSTRING(_item));
}
return new PlcList(value);
} else if (EvaluationHelper.equals(dataType, ProfinetDataType.WSTRING)
&& EvaluationHelper.equals(numberOfValues, (int) 1)) { // CHAR
String value =
readSimpleField("value", readString(readBuffer, 16), WithOption.WithEncoding("UTF-16"));
return new PlcCHAR(value);
} else if (EvaluationHelper.equals(dataType, ProfinetDataType.WSTRING)) { // List
List<String> _value =
readCountArrayField(
"value",
readString(readBuffer, 16),
numberOfValues,
WithOption.WithEncoding("UTF-16"));
List<PlcValue> value = new ArrayList<>(_value.size());
for (String _item : _value) {
value.add(new PlcSTRING(_item));
}
return new PlcList(value);
} else if (EvaluationHelper.equals(dataType, ProfinetDataType.VISIBLESTRING)
&& EvaluationHelper.equals(numberOfValues, (int) 1)) { // CHAR
String value =
readSimpleField("value", readString(readBuffer, 8), WithOption.WithEncoding("UTF-8"));
return new PlcCHAR(value);
} else if (EvaluationHelper.equals(dataType, ProfinetDataType.VISIBLESTRING)) { // List
List<String> _value =
readCountArrayField(
"value", readString(readBuffer, 8), numberOfValues, WithOption.WithEncoding("UTF-8"));
List<PlcValue> value = new ArrayList<>(_value.size());
for (String _item : _value) {
value.add(new PlcSTRING(_item));
}
return new PlcList(value);
} else if (EvaluationHelper.equals(dataType, ProfinetDataType.F_MESSAGETRAILER4BYTE)
&& EvaluationHelper.equals(numberOfValues, (int) 1)) { // List
List<Short> _value =
readCountArrayField("value", readUnsignedShort(readBuffer, 8), (4) * (8));
List<PlcValue> value = new ArrayList<>(_value.size());
for (short _item : _value) {
value.add(new PlcUSINT(_item));
}
return new PlcList(value);
} else if (EvaluationHelper.equals(dataType, ProfinetDataType.F_MESSAGETRAILER4BYTE)) { // List
List<Short> _value =
readCountArrayField("value", readUnsignedShort(readBuffer, 8), (numberOfValues) * (32));
List<PlcValue> value = new ArrayList<>(_value.size());
for (short _item : _value) {
value.add(new PlcUSINT(_item));
}
return new PlcList(value);
}
return null;
}
public static int getLengthInBytes(
PlcValue _value, ProfinetDataType dataType, Integer numberOfValues) {
return (int) Math.ceil((float) getLengthInBits(_value, dataType, numberOfValues) / 8.0);
}
public static int getLengthInBits(
PlcValue _value, ProfinetDataType dataType, Integer numberOfValues) {
int lengthInBits = 0;
if (EvaluationHelper.equals(dataType, ProfinetDataType.BOOL)
&& EvaluationHelper.equals(numberOfValues, (int) 1)) { // BOOL
// Simple field (value)
lengthInBits += 1;
} else if (EvaluationHelper.equals(dataType, ProfinetDataType.BOOL)) { // List
// Array field
if (_value != null) {
lengthInBits += 1 * _value.getList().size();
}
} else if (EvaluationHelper.equals(dataType, ProfinetDataType.OCTETSTRING)
&& EvaluationHelper.equals(numberOfValues, (int) 1)) { // USINT
// Simple field (value)
lengthInBits += 8;
} else if (EvaluationHelper.equals(dataType, ProfinetDataType.OCTETSTRING)) { // List
// Array field
if (_value != null) {
lengthInBits += 8 * _value.getList().size();
}
} else if (EvaluationHelper.equals(dataType, ProfinetDataType.BYTE)
&& EvaluationHelper.equals(numberOfValues, (int) 1)) { // BYTE
// Simple field (value)
lengthInBits += 8;
} else if (EvaluationHelper.equals(dataType, ProfinetDataType.BYTE)) { // List
// Array field
if (_value != null) {
lengthInBits += 1 * _value.getList().size();
}
} else if (EvaluationHelper.equals(dataType, ProfinetDataType.WORD)) { // WORD
// Simple field (value)
lengthInBits += 16;
} else if (EvaluationHelper.equals(dataType, ProfinetDataType.DWORD)) { // DWORD
// Simple field (value)
lengthInBits += 32;
} else if (EvaluationHelper.equals(dataType, ProfinetDataType.LWORD)) { // LWORD
// Simple field (value)
lengthInBits += 64;
} else if (EvaluationHelper.equals(dataType, ProfinetDataType.SINT)
&& EvaluationHelper.equals(numberOfValues, (int) 1)) { // SINT
// Simple field (value)
lengthInBits += 8;
} else if (EvaluationHelper.equals(dataType, ProfinetDataType.SINT)) { // List
// Array field
if (_value != null) {
lengthInBits += 8 * _value.getList().size();
}
} else if (EvaluationHelper.equals(dataType, ProfinetDataType.INT)
&& EvaluationHelper.equals(numberOfValues, (int) 1)) { // INT
// Simple field (value)
lengthInBits += 16;
} else if (EvaluationHelper.equals(dataType, ProfinetDataType.INT)) { // List
// Array field
if (_value != null) {
lengthInBits += 16 * _value.getList().size();
}
} else if (EvaluationHelper.equals(dataType, ProfinetDataType.DINT)
&& EvaluationHelper.equals(numberOfValues, (int) 1)) { // DINT
// Simple field (value)
lengthInBits += 32;
} else if (EvaluationHelper.equals(dataType, ProfinetDataType.DINT)) { // List
// Array field
if (_value != null) {
lengthInBits += 32 * _value.getList().size();
}
} else if (EvaluationHelper.equals(dataType, ProfinetDataType.LINT)
&& EvaluationHelper.equals(numberOfValues, (int) 1)) { // LINT
// Simple field (value)
lengthInBits += 64;
} else if (EvaluationHelper.equals(dataType, ProfinetDataType.LINT)) { // List
// Array field
if (_value != null) {
lengthInBits += 64 * _value.getList().size();
}
} else if (EvaluationHelper.equals(dataType, ProfinetDataType.USINT)
&& EvaluationHelper.equals(numberOfValues, (int) 1)) { // USINT
// Simple field (value)
lengthInBits += 8;
} else if (EvaluationHelper.equals(dataType, ProfinetDataType.USINT)) { // List
// Array field
if (_value != null) {
lengthInBits += 8 * _value.getList().size();
}
} else if (EvaluationHelper.equals(dataType, ProfinetDataType.UINT)
&& EvaluationHelper.equals(numberOfValues, (int) 1)) { // UINT
// Simple field (value)
lengthInBits += 16;
} else if (EvaluationHelper.equals(dataType, ProfinetDataType.UINT)) { // List
// Array field
if (_value != null) {
lengthInBits += 16 * _value.getList().size();
}
} else if (EvaluationHelper.equals(dataType, ProfinetDataType.UDINT)
&& EvaluationHelper.equals(numberOfValues, (int) 1)) { // UDINT
// Simple field (value)
lengthInBits += 32;
} else if (EvaluationHelper.equals(dataType, ProfinetDataType.UDINT)) { // List
// Array field
if (_value != null) {
lengthInBits += 32 * _value.getList().size();
}
} else if (EvaluationHelper.equals(dataType, ProfinetDataType.ULINT)
&& EvaluationHelper.equals(numberOfValues, (int) 1)) { // ULINT
// Simple field (value)
lengthInBits += 64;
} else if (EvaluationHelper.equals(dataType, ProfinetDataType.ULINT)) { // List
// Array field
if (_value != null) {
lengthInBits += 64 * _value.getList().size();
}
} else if (EvaluationHelper.equals(dataType, ProfinetDataType.REAL)
&& EvaluationHelper.equals(numberOfValues, (int) 1)) { // REAL
// Simple field (value)
lengthInBits += 32;
} else if (EvaluationHelper.equals(dataType, ProfinetDataType.REAL)) { // List
// Array field
if (_value != null) {
lengthInBits += 32 * _value.getList().size();
}
} else if (EvaluationHelper.equals(dataType, ProfinetDataType.LREAL)
&& EvaluationHelper.equals(numberOfValues, (int) 1)) { // LREAL
// Simple field (value)
lengthInBits += 64;
} else if (EvaluationHelper.equals(dataType, ProfinetDataType.LREAL)) { // List
// Array field
if (_value != null) {
lengthInBits += 64 * _value.getList().size();
}
} else if (EvaluationHelper.equals(dataType, ProfinetDataType.CHAR)
&& EvaluationHelper.equals(numberOfValues, (int) 1)) { // CHAR
// Simple field (value)
lengthInBits += 8;
} else if (EvaluationHelper.equals(dataType, ProfinetDataType.CHAR)) { // List
// Array field
if (_value != null) {
lengthInBits += 8 * _value.getList().size();
}
} else if (EvaluationHelper.equals(dataType, ProfinetDataType.WCHAR)
&& EvaluationHelper.equals(numberOfValues, (int) 1)) { // WCHAR
// Simple field (value)
lengthInBits += 16;
} else if (EvaluationHelper.equals(dataType, ProfinetDataType.WCHAR)) { // List
// Array field
if (_value != null) {
lengthInBits += 16 * _value.getList().size();
}
} else if (EvaluationHelper.equals(dataType, ProfinetDataType.UNICODESTRING8)
&& EvaluationHelper.equals(numberOfValues, (int) 1)) { // CHAR
// Simple field (value)
lengthInBits += 8;
} else if (EvaluationHelper.equals(dataType, ProfinetDataType.UNICODESTRING8)) { // List
// Array field
if (_value != null) {
lengthInBits += 8 * _value.getList().size();
}
} else if (EvaluationHelper.equals(dataType, ProfinetDataType.WSTRING)
&& EvaluationHelper.equals(numberOfValues, (int) 1)) { // CHAR
// Simple field (value)
lengthInBits += 16;
} else if (EvaluationHelper.equals(dataType, ProfinetDataType.WSTRING)) { // List
// Array field
if (_value != null) {
lengthInBits += 16 * _value.getList().size();
}
} else if (EvaluationHelper.equals(dataType, ProfinetDataType.VISIBLESTRING)
&& EvaluationHelper.equals(numberOfValues, (int) 1)) { // CHAR
// Simple field (value)
lengthInBits += 8;
} else if (EvaluationHelper.equals(dataType, ProfinetDataType.VISIBLESTRING)) { // List
// Array field
if (_value != null) {
lengthInBits += 8 * _value.getList().size();
}
} else if (EvaluationHelper.equals(dataType, ProfinetDataType.F_MESSAGETRAILER4BYTE)
&& EvaluationHelper.equals(numberOfValues, (int) 1)) { // List
// Array field
if (_value != null) {
lengthInBits += 8 * _value.getList().size();
}
} else if (EvaluationHelper.equals(dataType, ProfinetDataType.F_MESSAGETRAILER4BYTE)) { // List
// Array field
if (_value != null) {
lengthInBits += 8 * _value.getList().size();
}
}
return lengthInBits;
}
public static void staticSerialize(
WriteBuffer writeBuffer, PlcValue _value, ProfinetDataType dataType, Integer numberOfValues)
throws SerializationException {
staticSerialize(writeBuffer, _value, dataType, numberOfValues, ByteOrder.BIG_ENDIAN);
}
public static void staticSerialize(
WriteBuffer writeBuffer,
PlcValue _value,
ProfinetDataType dataType,
Integer numberOfValues,
ByteOrder byteOrder)
throws SerializationException {
if (EvaluationHelper.equals(dataType, ProfinetDataType.BOOL)
&& EvaluationHelper.equals(numberOfValues, (int) 1)) { // BOOL
// Simple Field (value)
writeSimpleField("value", (boolean) _value.getBoolean(), writeBoolean(writeBuffer));
} else if (EvaluationHelper.equals(dataType, ProfinetDataType.BOOL)) { // List
// Array Field (value)
writeSimpleTypeArrayField(
"value",
_value.getList().stream().map(PlcValue::getBoolean).collect(Collectors.toList()),
writeBoolean(writeBuffer));
} else if (EvaluationHelper.equals(dataType, ProfinetDataType.OCTETSTRING)
&& EvaluationHelper.equals(numberOfValues, (int) 1)) { // USINT
// Simple Field (value)
writeSimpleField("value", (short) _value.getShort(), writeUnsignedShort(writeBuffer, 8));
} else if (EvaluationHelper.equals(dataType, ProfinetDataType.OCTETSTRING)) { // List
// Array Field (value)
writeSimpleTypeArrayField(
"value",
_value.getList().stream().map(PlcValue::getShort).collect(Collectors.toList()),
writeUnsignedShort(writeBuffer, 8));
} else if (EvaluationHelper.equals(dataType, ProfinetDataType.BYTE)
&& EvaluationHelper.equals(numberOfValues, (int) 1)) { // BYTE
// Simple Field (value)
writeSimpleField("value", (short) _value.getShort(), writeUnsignedShort(writeBuffer, 8));
} else if (EvaluationHelper.equals(dataType, ProfinetDataType.BYTE)) { // List
// Array Field (value)
writeSimpleTypeArrayField(
"value",
_value.getList().stream().map(PlcValue::getBoolean).collect(Collectors.toList()),
writeBoolean(writeBuffer));
} else if (EvaluationHelper.equals(dataType, ProfinetDataType.WORD)) { // WORD
// Simple Field (value)
writeSimpleField("value", (int) _value.getInteger(), writeUnsignedInt(writeBuffer, 16));
} else if (EvaluationHelper.equals(dataType, ProfinetDataType.DWORD)) { // DWORD
// Simple Field (value)
writeSimpleField("value", (long) _value.getLong(), writeUnsignedLong(writeBuffer, 32));
} else if (EvaluationHelper.equals(dataType, ProfinetDataType.LWORD)) { // LWORD
// Simple Field (value)
writeSimpleField(
"value", (BigInteger) _value.getBigInteger(), writeUnsignedBigInteger(writeBuffer, 64));
} else if (EvaluationHelper.equals(dataType, ProfinetDataType.SINT)
&& EvaluationHelper.equals(numberOfValues, (int) 1)) { // SINT
// Simple Field (value)
writeSimpleField("value", (byte) _value.getByte(), writeSignedByte(writeBuffer, 8));
} else if (EvaluationHelper.equals(dataType, ProfinetDataType.SINT)) { // List
// Array Field (value)
writeSimpleTypeArrayField(
"value",
_value.getList().stream().map(PlcValue::getByte).collect(Collectors.toList()),
writeSignedByte(writeBuffer, 8));
} else if (EvaluationHelper.equals(dataType, ProfinetDataType.INT)
&& EvaluationHelper.equals(numberOfValues, (int) 1)) { // INT
// Simple Field (value)
writeSimpleField("value", (short) _value.getShort(), writeSignedShort(writeBuffer, 16));
} else if (EvaluationHelper.equals(dataType, ProfinetDataType.INT)) { // List
// Array Field (value)
writeSimpleTypeArrayField(
"value",
_value.getList().stream().map(PlcValue::getShort).collect(Collectors.toList()),
writeSignedShort(writeBuffer, 16));
} else if (EvaluationHelper.equals(dataType, ProfinetDataType.DINT)
&& EvaluationHelper.equals(numberOfValues, (int) 1)) { // DINT
// Simple Field (value)
writeSimpleField("value", (int) _value.getInteger(), writeSignedInt(writeBuffer, 32));
} else if (EvaluationHelper.equals(dataType, ProfinetDataType.DINT)) { // List
// Array Field (value)
writeSimpleTypeArrayField(
"value",
_value.getList().stream().map(PlcValue::getInteger).collect(Collectors.toList()),
writeSignedInt(writeBuffer, 32));
} else if (EvaluationHelper.equals(dataType, ProfinetDataType.LINT)
&& EvaluationHelper.equals(numberOfValues, (int) 1)) { // LINT
// Simple Field (value)
writeSimpleField("value", (long) _value.getLong(), writeSignedLong(writeBuffer, 64));
} else if (EvaluationHelper.equals(dataType, ProfinetDataType.LINT)) { // List
// Array Field (value)
writeSimpleTypeArrayField(
"value",
_value.getList().stream().map(PlcValue::getLong).collect(Collectors.toList()),
writeSignedLong(writeBuffer, 64));
} else if (EvaluationHelper.equals(dataType, ProfinetDataType.USINT)
&& EvaluationHelper.equals(numberOfValues, (int) 1)) { // USINT
// Simple Field (value)
writeSimpleField("value", (short) _value.getShort(), writeUnsignedShort(writeBuffer, 8));
} else if (EvaluationHelper.equals(dataType, ProfinetDataType.USINT)) { // List
// Array Field (value)
writeSimpleTypeArrayField(
"value",
_value.getList().stream().map(PlcValue::getShort).collect(Collectors.toList()),
writeUnsignedShort(writeBuffer, 8));
} else if (EvaluationHelper.equals(dataType, ProfinetDataType.UINT)
&& EvaluationHelper.equals(numberOfValues, (int) 1)) { // UINT
// Simple Field (value)
writeSimpleField("value", (int) _value.getInteger(), writeUnsignedInt(writeBuffer, 16));
} else if (EvaluationHelper.equals(dataType, ProfinetDataType.UINT)) { // List
// Array Field (value)
writeSimpleTypeArrayField(
"value",
_value.getList().stream().map(PlcValue::getInteger).collect(Collectors.toList()),
writeUnsignedInt(writeBuffer, 16));
} else if (EvaluationHelper.equals(dataType, ProfinetDataType.UDINT)
&& EvaluationHelper.equals(numberOfValues, (int) 1)) { // UDINT
// Simple Field (value)
writeSimpleField("value", (long) _value.getLong(), writeUnsignedLong(writeBuffer, 32));
} else if (EvaluationHelper.equals(dataType, ProfinetDataType.UDINT)) { // List
// Array Field (value)
writeSimpleTypeArrayField(
"value",
_value.getList().stream().map(PlcValue::getLong).collect(Collectors.toList()),
writeUnsignedLong(writeBuffer, 32));
} else if (EvaluationHelper.equals(dataType, ProfinetDataType.ULINT)
&& EvaluationHelper.equals(numberOfValues, (int) 1)) { // ULINT
// Simple Field (value)
writeSimpleField(
"value", (BigInteger) _value.getBigInteger(), writeUnsignedBigInteger(writeBuffer, 64));
} else if (EvaluationHelper.equals(dataType, ProfinetDataType.ULINT)) { // List
// Array Field (value)
writeSimpleTypeArrayField(
"value",
_value.getList().stream().map(PlcValue::getBigInteger).collect(Collectors.toList()),
writeUnsignedBigInteger(writeBuffer, 64));
} else if (EvaluationHelper.equals(dataType, ProfinetDataType.REAL)
&& EvaluationHelper.equals(numberOfValues, (int) 1)) { // REAL
// Simple Field (value)
writeSimpleField("value", (float) _value.getFloat(), writeFloat(writeBuffer, 32));
} else if (EvaluationHelper.equals(dataType, ProfinetDataType.REAL)) { // List
// Array Field (value)
writeSimpleTypeArrayField(
"value",
_value.getList().stream().map(PlcValue::getFloat).collect(Collectors.toList()),
writeFloat(writeBuffer, 32));
} else if (EvaluationHelper.equals(dataType, ProfinetDataType.LREAL)
&& EvaluationHelper.equals(numberOfValues, (int) 1)) { // LREAL
// Simple Field (value)
writeSimpleField("value", (double) _value.getDouble(), writeDouble(writeBuffer, 64));
} else if (EvaluationHelper.equals(dataType, ProfinetDataType.LREAL)) { // List
// Array Field (value)
writeSimpleTypeArrayField(
"value",
_value.getList().stream().map(PlcValue::getDouble).collect(Collectors.toList()),
writeDouble(writeBuffer, 64));
} else if (EvaluationHelper.equals(dataType, ProfinetDataType.CHAR)
&& EvaluationHelper.equals(numberOfValues, (int) 1)) { // CHAR
// Simple Field (value)
writeSimpleField(
"value",
(String) _value.getString(),
writeString(writeBuffer, 8),
WithOption.WithEncoding("UTF-8"));
} else if (EvaluationHelper.equals(dataType, ProfinetDataType.CHAR)) { // List
// Array Field (value)
writeSimpleTypeArrayField(
"value",
_value.getList().stream().map(PlcValue::getString).collect(Collectors.toList()),
writeString(writeBuffer, 8),
WithOption.WithEncoding("UTF-8"));
} else if (EvaluationHelper.equals(dataType, ProfinetDataType.WCHAR)
&& EvaluationHelper.equals(numberOfValues, (int) 1)) { // WCHAR
// Simple Field (value)
writeSimpleField(
"value",
(String) _value.getString(),
writeString(writeBuffer, 16),
WithOption.WithEncoding("UTF-16"));
} else if (EvaluationHelper.equals(dataType, ProfinetDataType.WCHAR)) { // List
// Array Field (value)
writeSimpleTypeArrayField(
"value",
_value.getList().stream().map(PlcValue::getString).collect(Collectors.toList()),
writeString(writeBuffer, 16),
WithOption.WithEncoding("UTF-16"));
} else if (EvaluationHelper.equals(dataType, ProfinetDataType.UNICODESTRING8)
&& EvaluationHelper.equals(numberOfValues, (int) 1)) { // CHAR
// Simple Field (value)
writeSimpleField(
"value",
(String) _value.getString(),
writeString(writeBuffer, 8),
WithOption.WithEncoding("UTF-8"));
} else if (EvaluationHelper.equals(dataType, ProfinetDataType.UNICODESTRING8)) { // List
// Array Field (value)
writeSimpleTypeArrayField(
"value",
_value.getList().stream().map(PlcValue::getString).collect(Collectors.toList()),
writeString(writeBuffer, 8),
WithOption.WithEncoding("UTF-8"));
} else if (EvaluationHelper.equals(dataType, ProfinetDataType.WSTRING)
&& EvaluationHelper.equals(numberOfValues, (int) 1)) { // CHAR
// Simple Field (value)
writeSimpleField(
"value",
(String) _value.getString(),
writeString(writeBuffer, 16),
WithOption.WithEncoding("UTF-16"));
} else if (EvaluationHelper.equals(dataType, ProfinetDataType.WSTRING)) { // List
// Array Field (value)
writeSimpleTypeArrayField(
"value",
_value.getList().stream().map(PlcValue::getString).collect(Collectors.toList()),
writeString(writeBuffer, 16),
WithOption.WithEncoding("UTF-16"));
} else if (EvaluationHelper.equals(dataType, ProfinetDataType.VISIBLESTRING)
&& EvaluationHelper.equals(numberOfValues, (int) 1)) { // CHAR
// Simple Field (value)
writeSimpleField(
"value",
(String) _value.getString(),
writeString(writeBuffer, 8),
WithOption.WithEncoding("UTF-8"));
} else if (EvaluationHelper.equals(dataType, ProfinetDataType.VISIBLESTRING)) { // List
// Array Field (value)
writeSimpleTypeArrayField(
"value",
_value.getList().stream().map(PlcValue::getString).collect(Collectors.toList()),
writeString(writeBuffer, 8),
WithOption.WithEncoding("UTF-8"));
} else if (EvaluationHelper.equals(dataType, ProfinetDataType.F_MESSAGETRAILER4BYTE)
&& EvaluationHelper.equals(numberOfValues, (int) 1)) { // List
// Array Field (value)
writeSimpleTypeArrayField(
"value",
_value.getList().stream().map(PlcValue::getShort).collect(Collectors.toList()),
writeUnsignedShort(writeBuffer, 8));
} else if (EvaluationHelper.equals(dataType, ProfinetDataType.F_MESSAGETRAILER4BYTE)) { // List
// Array Field (value)
writeSimpleTypeArrayField(
"value",
_value.getList().stream().map(PlcValue::getShort).collect(Collectors.toList()),
writeUnsignedShort(writeBuffer, 8));
}
}
}
|
apache/paimon | 36,614 | paimon-core/src/test/java/org/apache/paimon/TestFileStore.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.paimon;
import org.apache.paimon.data.BinaryRow;
import org.apache.paimon.data.serializer.InternalRowSerializer;
import org.apache.paimon.fs.FileIO;
import org.apache.paimon.fs.FileIOFinder;
import org.apache.paimon.fs.Path;
import org.apache.paimon.index.IndexFileMeta;
import org.apache.paimon.io.DataFileMeta;
import org.apache.paimon.io.DataFilePathFactory;
import org.apache.paimon.io.DataIncrement;
import org.apache.paimon.manifest.FileEntry;
import org.apache.paimon.manifest.FileKind;
import org.apache.paimon.manifest.FileSource;
import org.apache.paimon.manifest.ManifestCommittable;
import org.apache.paimon.manifest.ManifestEntry;
import org.apache.paimon.manifest.ManifestFile;
import org.apache.paimon.manifest.ManifestFileMeta;
import org.apache.paimon.manifest.ManifestList;
import org.apache.paimon.memory.HeapMemorySegmentPool;
import org.apache.paimon.memory.MemoryOwner;
import org.apache.paimon.mergetree.compact.MergeFunctionFactory;
import org.apache.paimon.operation.AbstractFileStoreWrite;
import org.apache.paimon.operation.FileStoreCommit;
import org.apache.paimon.operation.FileStoreCommitImpl;
import org.apache.paimon.operation.SplitRead;
import org.apache.paimon.options.ExpireConfig;
import org.apache.paimon.options.MemorySize;
import org.apache.paimon.options.Options;
import org.apache.paimon.reader.RecordReaderIterator;
import org.apache.paimon.schema.KeyValueFieldsExtractor;
import org.apache.paimon.schema.SchemaManager;
import org.apache.paimon.schema.TableSchema;
import org.apache.paimon.table.CatalogEnvironment;
import org.apache.paimon.table.ExpireChangelogImpl;
import org.apache.paimon.table.ExpireSnapshots;
import org.apache.paimon.table.ExpireSnapshotsImpl;
import org.apache.paimon.table.SpecialFields;
import org.apache.paimon.table.sink.CommitMessageImpl;
import org.apache.paimon.table.source.DataSplit;
import org.apache.paimon.table.source.ScanMode;
import org.apache.paimon.types.RowType;
import org.apache.paimon.utils.ChangelogManager;
import org.apache.paimon.utils.CommitIncrement;
import org.apache.paimon.utils.DataFilePathFactories;
import org.apache.paimon.utils.FileStorePathFactory;
import org.apache.paimon.utils.HintFileUtils;
import org.apache.paimon.utils.Pair;
import org.apache.paimon.utils.RecordWriter;
import org.apache.paimon.utils.SnapshotManager;
import org.apache.paimon.utils.TagManager;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.Paths;
import java.time.Duration;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.UUID;
import java.util.concurrent.ThreadLocalRandom;
import java.util.function.BiConsumer;
import java.util.function.Function;
import java.util.stream.Collectors;
import static org.assertj.core.api.Assertions.assertThat;
/** {@link FileStore} for tests. */
public class TestFileStore extends KeyValueFileStore {
private static final Logger LOG = LoggerFactory.getLogger(TestFileStore.class);
public static final MemorySize WRITE_BUFFER_SIZE = MemorySize.parse("16 kb");
public static final MemorySize PAGE_SIZE = MemorySize.parse("4 kb");
private final String root;
private final FileIO fileIO;
private final InternalRowSerializer keySerializer;
private final InternalRowSerializer valueSerializer;
private final String commitUser;
private long commitIdentifier;
private TestFileStore(
String root,
CoreOptions options,
RowType partitionType,
RowType keyType,
RowType valueType,
KeyValueFieldsExtractor keyValueFieldsExtractor,
MergeFunctionFactory<KeyValue> mfFactory,
TableSchema tableSchema) {
super(
FileIOFinder.find(new Path(root)),
schemaManager(root, options),
tableSchema != null
? tableSchema
: new TableSchema(
0L,
valueType.getFields(),
valueType.getFieldCount(),
partitionType.getFieldNames(),
cleanPrimaryKeys(keyType.getFieldNames()),
Collections.emptyMap(),
null),
false,
options,
partitionType,
keyType,
keyType,
valueType,
keyValueFieldsExtractor,
mfFactory,
(new Path(root)).getName(),
CatalogEnvironment.empty());
this.root = root;
this.fileIO = FileIOFinder.find(new Path(root));
this.keySerializer = new InternalRowSerializer(keyType);
this.valueSerializer = new InternalRowSerializer(valueType);
this.commitUser = UUID.randomUUID().toString();
this.commitIdentifier = 0L;
}
private static List<String> cleanPrimaryKeys(List<String> primaryKeys) {
return primaryKeys.stream()
.map(k -> k.substring(SpecialFields.KEY_FIELD_PREFIX.length()))
.collect(Collectors.toList());
}
private static SchemaManager schemaManager(String root, CoreOptions options) {
return new SchemaManager(FileIOFinder.find(new Path(root)), options.path());
}
public FileIO fileIO() {
return fileIO;
}
public AbstractFileStoreWrite<KeyValue> newWrite() {
return super.newWrite(commitUser);
}
public FileStoreCommitImpl newCommit() {
return super.newCommit(commitUser, null);
}
public ExpireSnapshots newExpire(int numRetainedMin, int numRetainedMax, long millisRetained) {
return new ExpireSnapshotsImpl(
snapshotManager(),
changelogManager(),
newSnapshotDeletion(),
new TagManager(fileIO, options.path()))
.config(
ExpireConfig.builder()
.snapshotRetainMax(numRetainedMax)
.snapshotRetainMin(numRetainedMin)
.snapshotTimeRetain(Duration.ofMillis(millisRetained))
.build());
}
public ExpireSnapshots newExpire(ExpireConfig expireConfig) {
return new ExpireSnapshotsImpl(
snapshotManager(),
changelogManager(),
newSnapshotDeletion(),
new TagManager(fileIO, options.path()))
.config(expireConfig);
}
public ExpireSnapshots newChangelogExpire(ExpireConfig config) {
ExpireChangelogImpl impl =
new ExpireChangelogImpl(
snapshotManager(),
changelogManager(),
new TagManager(fileIO, options.path()),
newChangelogDeletion());
impl.config(config);
return impl;
}
public List<Snapshot> commitData(
List<KeyValue> kvs,
Function<KeyValue, BinaryRow> partitionCalculator,
Function<KeyValue, Integer> bucketCalculator)
throws Exception {
return commitData(kvs, partitionCalculator, bucketCalculator, new HashMap<>());
}
public List<Snapshot> commitDataWatermark(
List<KeyValue> kvs, Function<KeyValue, BinaryRow> partitionCalculator, Long watermark)
throws Exception {
return commitDataImpl(
kvs,
partitionCalculator,
kv -> 0,
false,
null,
watermark,
Collections.emptyList(),
(commit, committable) -> commit.commit(committable, false));
}
public List<Snapshot> commitData(
List<KeyValue> kvs,
Function<KeyValue, BinaryRow> partitionCalculator,
Function<KeyValue, Integer> bucketCalculator,
Map<Integer, Long> logOffsets)
throws Exception {
return commitDataImpl(
kvs,
partitionCalculator,
bucketCalculator,
false,
null,
null,
Collections.emptyList(),
(commit, committable) -> {
logOffsets.forEach(
(bucket, offset) -> committable.addLogOffset(bucket, offset, false));
commit.commit(committable, false);
});
}
public List<Snapshot> overwriteData(
List<KeyValue> kvs,
Function<KeyValue, BinaryRow> partitionCalculator,
Function<KeyValue, Integer> bucketCalculator,
Map<String, String> partition)
throws Exception {
return commitDataImpl(
kvs,
partitionCalculator,
bucketCalculator,
true,
null,
null,
Collections.emptyList(),
(commit, committable) ->
commit.overwritePartition(partition, committable, Collections.emptyMap()));
}
public Snapshot dropPartitions(List<Map<String, String>> partitions) {
SnapshotManager snapshotManager = snapshotManager();
Long snapshotIdBeforeCommit = snapshotManager.latestSnapshotId();
if (snapshotIdBeforeCommit == null) {
snapshotIdBeforeCommit = Snapshot.FIRST_SNAPSHOT_ID - 1;
}
try (FileStoreCommit commit = newCommit(commitUser, null)) {
commit.dropPartitions(partitions, Long.MAX_VALUE);
}
Long snapshotIdAfterCommit = snapshotManager.latestSnapshotId();
assertThat(snapshotIdAfterCommit).isNotNull();
assertThat(snapshotIdBeforeCommit + 1).isEqualTo(snapshotIdAfterCommit);
return snapshotManager.snapshot(snapshotIdAfterCommit);
}
public List<Snapshot> commitDataIndex(
KeyValue kv,
Function<KeyValue, BinaryRow> partitionCalculator,
int bucket,
IndexFileMeta... indexFiles)
throws Exception {
return commitDataImpl(
Collections.singletonList(kv),
partitionCalculator,
ignore -> bucket,
false,
null,
null,
Arrays.asList(indexFiles),
(commit, committable) -> commit.commit(committable, false));
}
public List<Snapshot> commitDataImpl(
List<KeyValue> kvs,
Function<KeyValue, BinaryRow> partitionCalculator,
Function<KeyValue, Integer> bucketCalculator,
boolean ignorePreviousFiles,
Long identifier,
Long watermark,
List<IndexFileMeta> indexFiles,
BiConsumer<FileStoreCommit, ManifestCommittable> commitFunction)
throws Exception {
AbstractFileStoreWrite<KeyValue> write = newWrite();
Map<BinaryRow, Map<Integer, RecordWriter<KeyValue>>> writers = new HashMap<>();
for (KeyValue kv : kvs) {
BinaryRow partition = partitionCalculator.apply(kv);
int bucket = bucketCalculator.apply(kv);
writers.computeIfAbsent(partition, p -> new HashMap<>())
.compute(
bucket,
(b, w) -> {
if (w == null) {
write.withIgnorePreviousFiles(ignorePreviousFiles);
RecordWriter<KeyValue> writer =
write.createWriterContainer(partition, bucket).writer;
((MemoryOwner) writer)
.setMemoryPool(
new HeapMemorySegmentPool(
WRITE_BUFFER_SIZE.getBytes(),
(int) PAGE_SIZE.getBytes()));
return writer;
} else {
return w;
}
})
.write(kv);
}
ManifestCommittable committable =
new ManifestCommittable(
identifier == null ? commitIdentifier++ : identifier, watermark);
for (Map.Entry<BinaryRow, Map<Integer, RecordWriter<KeyValue>>> entryWithPartition :
writers.entrySet()) {
for (Map.Entry<Integer, RecordWriter<KeyValue>> entryWithBucket :
entryWithPartition.getValue().entrySet()) {
CommitIncrement increment =
entryWithBucket.getValue().prepareCommit(ignorePreviousFiles);
DataIncrement dataIncrement = increment.newFilesIncrement();
dataIncrement.newIndexFiles().addAll(indexFiles);
committable.addFileCommittable(
new CommitMessageImpl(
entryWithPartition.getKey(),
entryWithBucket.getKey(),
options().bucket(),
dataIncrement,
increment.compactIncrement()));
}
}
SnapshotManager snapshotManager = snapshotManager();
Long snapshotIdBeforeCommit = snapshotManager.latestSnapshotId();
if (snapshotIdBeforeCommit == null) {
snapshotIdBeforeCommit = Snapshot.FIRST_SNAPSHOT_ID - 1;
}
try (FileStoreCommit commit = newCommit(commitUser, null)) {
commitFunction.accept(commit, committable);
}
Long snapshotIdAfterCommit = snapshotManager.latestSnapshotId();
if (snapshotIdAfterCommit == null) {
snapshotIdAfterCommit = Snapshot.FIRST_SNAPSHOT_ID - 1;
}
writers.values().stream()
.flatMap(m -> m.values().stream())
.forEach(
w -> {
try {
// wait for compaction to end, otherwise orphan files may occur
// see CompactManager#cancelCompaction for more info
w.sync();
w.close();
} catch (Exception e) {
throw new RuntimeException(e);
}
});
List<Snapshot> snapshots = new ArrayList<>();
for (long id = snapshotIdBeforeCommit + 1; id <= snapshotIdAfterCommit; id++) {
snapshots.add(snapshotManager.snapshot(id));
}
return snapshots;
}
public List<KeyValue> readKvsFromSnapshot(long snapshotId) throws Exception {
List<ManifestEntry> entries = newScan().withSnapshot(snapshotId).plan().files();
return readKvsFromManifestEntries(entries, false);
}
public List<KeyValue> readAllChangelogUntilSnapshot(long endInclusive) throws Exception {
List<KeyValue> result = new ArrayList<>();
for (long snapshotId = Snapshot.FIRST_SNAPSHOT_ID;
snapshotId <= endInclusive;
snapshotId++) {
List<ManifestEntry> entries =
newScan()
.withKind(
options.changelogProducer()
== CoreOptions.ChangelogProducer.NONE
? ScanMode.DELTA
: ScanMode.CHANGELOG)
.withSnapshot(snapshotId)
.plan()
.files();
result.addAll(readKvsFromManifestEntries(entries, true));
}
return result;
}
public List<KeyValue> readKvsFromManifestEntries(
List<ManifestEntry> entries, boolean isStreaming) throws Exception {
if (LOG.isDebugEnabled()) {
for (ManifestEntry entry : entries) {
LOG.debug("reading from " + entry.toString());
}
}
Map<BinaryRow, Map<Integer, List<DataFileMeta>>> filesPerPartitionAndBucket =
new HashMap<>();
for (ManifestEntry entry : entries) {
filesPerPartitionAndBucket
.computeIfAbsent(entry.partition(), p -> new HashMap<>())
.computeIfAbsent(entry.bucket(), b -> new ArrayList<>())
.add(entry.file());
}
List<KeyValue> kvs = new ArrayList<>();
SplitRead<KeyValue> read = newRead();
for (Map.Entry<BinaryRow, Map<Integer, List<DataFileMeta>>> entryWithPartition :
filesPerPartitionAndBucket.entrySet()) {
for (Map.Entry<Integer, List<DataFileMeta>> entryWithBucket :
entryWithPartition.getValue().entrySet()) {
RecordReaderIterator<KeyValue> iterator =
new RecordReaderIterator<>(
read.createReader(
DataSplit.builder()
.withPartition(entryWithPartition.getKey())
.withBucket(entryWithBucket.getKey())
.withDataFiles(entryWithBucket.getValue())
.isStreaming(isStreaming)
.rawConvertible(false)
.withBucketPath("not used")
.build()));
while (iterator.hasNext()) {
kvs.add(iterator.next().copy(keySerializer, valueSerializer));
}
iterator.close();
}
}
return kvs;
}
public Map<BinaryRow, BinaryRow> toKvMap(List<KeyValue> kvs) {
if (LOG.isDebugEnabled()) {
LOG.debug(
"Compacting list of key values to kv map\n"
+ kvs.stream()
.map(
kv ->
kv.toString(
TestKeyValueGenerator.KEY_TYPE,
TestKeyValueGenerator.DEFAULT_ROW_TYPE))
.collect(Collectors.joining("\n")));
}
Map<BinaryRow, BinaryRow> result = new HashMap<>();
for (KeyValue kv : kvs) {
BinaryRow key = keySerializer.toBinaryRow(kv.key()).copy();
BinaryRow value = valueSerializer.toBinaryRow(kv.value()).copy();
switch (kv.valueKind()) {
case INSERT:
case UPDATE_AFTER:
result.put(key, value);
break;
case UPDATE_BEFORE:
case DELETE:
result.remove(key);
break;
default:
throw new UnsupportedOperationException(
"Unknown value kind " + kv.valueKind().name());
}
}
return result;
}
public void assertCleaned() throws IOException {
Set<Path> filesInUse = getFilesInUse();
Set<Path> actualFiles =
Files.walk(Paths.get(root))
.filter(Files::isRegularFile)
.map(p -> new Path(p.toString()))
.collect(Collectors.toSet());
// remove best effort latest and earliest hint files
// Consider concurrency test, it will not be possible to check here because the hint_file is
// possibly not the most accurate, so this check is only.
// - latest should < true_latest
// - earliest should < true_earliest
SnapshotManager snapshotManager = snapshotManager();
ChangelogManager changelogManager = changelogManager();
Path snapshotDir = snapshotManager.snapshotDirectory();
Path earliest = new Path(snapshotDir, HintFileUtils.EARLIEST);
Path latest = new Path(snapshotDir, HintFileUtils.LATEST);
if (actualFiles.remove(earliest)) {
long earliestId = snapshotManager.earliestSnapshotId();
fileIO.delete(earliest, false);
assertThat(earliestId <= snapshotManager.earliestSnapshotId()).isTrue();
}
if (actualFiles.remove(latest)) {
long latestId = snapshotManager.latestSnapshotId();
fileIO.delete(latest, false);
assertThat(latestId <= snapshotManager.latestSnapshotId()).isTrue();
}
Path changelogDir = changelogManager.changelogDirectory();
Path earliestChangelog = new Path(changelogDir, HintFileUtils.EARLIEST);
Path latestChangelog = new Path(changelogDir, HintFileUtils.LATEST);
if (actualFiles.remove(earliestChangelog)) {
long earliestId = changelogManager.earliestLongLivedChangelogId();
fileIO.delete(earliest, false);
assertThat(earliestId <= changelogManager.earliestLongLivedChangelogId()).isTrue();
}
if (actualFiles.remove(latestChangelog)) {
long latestId = changelogManager.latestLongLivedChangelogId();
fileIO.delete(latest, false);
assertThat(latestId <= changelogManager.latestLongLivedChangelogId()).isTrue();
}
// for easier debugging
String expectedString =
filesInUse.stream().map(Path::toString).sorted().collect(Collectors.joining(",\n"));
String actualString =
actualFiles.stream()
.map(Path::toString)
.sorted()
.collect(Collectors.joining(",\n"));
assertThat(actualString).isEqualTo(expectedString);
}
private Set<Path> getFilesInUse() {
Set<Path> result = new HashSet<>();
SchemaManager schemaManager = new SchemaManager(fileIO, options.path());
schemaManager.listAllIds().forEach(id -> result.add(schemaManager.toSchemaPath(id)));
SnapshotManager snapshotManager = snapshotManager();
ChangelogManager changelogManager = changelogManager();
Long latestSnapshotId = snapshotManager.latestSnapshotId();
if (latestSnapshotId == null) {
return result;
}
long firstInUseSnapshotId = Snapshot.FIRST_SNAPSHOT_ID;
for (long id = latestSnapshotId - 1; id >= Snapshot.FIRST_SNAPSHOT_ID; id--) {
if (!snapshotManager.snapshotExists(id)
&& !changelogManager.longLivedChangelogExists(id)) {
firstInUseSnapshotId = id + 1;
break;
}
}
for (long id = firstInUseSnapshotId; id <= latestSnapshotId; id++) {
result.addAll(getFilesInUse(id));
}
return result;
}
public Set<Path> getFilesInUse(long snapshotId) {
return getFilesInUse(
snapshotId,
snapshotManager(),
changelogManager(),
fileIO,
pathFactory(),
manifestListFactory().create(),
manifestFileFactory().create());
}
public static Set<Path> getFilesInUse(
long snapshotId,
SnapshotManager snapshotManager,
ChangelogManager changelogManager,
FileIO fileIO,
FileStorePathFactory pathFactory,
ManifestList manifestList,
ManifestFile manifestFile) {
Set<Path> result = new HashSet<>();
if (snapshotManager.snapshotExists(snapshotId)) {
Set<Path> files =
getSnapshotFileInUse(
snapshotId,
snapshotManager,
changelogManager,
fileIO,
pathFactory,
manifestList,
manifestFile);
result.addAll(files);
} else if (changelogManager.longLivedChangelogExists(snapshotId)) {
Set<Path> files =
getChangelogFileInUse(
snapshotId,
snapshotManager,
changelogManager,
fileIO,
pathFactory,
manifestList,
manifestFile);
result.addAll(files);
} else {
throw new RuntimeException(
String.format("The snapshot %s does not exist.", snapshotId));
}
return result;
}
private static Set<Path> getSnapshotFileInUse(
long snapshotId,
SnapshotManager snapshotManager,
ChangelogManager changelogManager,
FileIO fileIO,
FileStorePathFactory pathFactory,
ManifestList manifestList,
ManifestFile manifestFile) {
Set<Path> result = new HashSet<>();
SchemaManager schemaManager = new SchemaManager(fileIO, snapshotManager.tablePath());
CoreOptions options = new CoreOptions(schemaManager.latest().get().options());
boolean produceChangelog =
options.changelogProducer() != CoreOptions.ChangelogProducer.NONE;
// The option from the table may not align with the expiration config
boolean changelogDecoupled = changelogManager.earliestLongLivedChangelogId() != null;
Path snapshotPath = snapshotManager.snapshotPath(snapshotId);
Snapshot snapshot = SnapshotManager.fromPath(fileIO, snapshotPath);
// snapshot file
result.add(snapshotPath);
// manifest lists
result.add(pathFactory.toManifestListPath(snapshot.baseManifestList()));
result.add(pathFactory.toManifestListPath(snapshot.deltaManifestList()));
if (snapshot.changelogManifestList() != null) {
result.add(pathFactory.toManifestListPath(snapshot.changelogManifestList()));
}
// manifests
List<ManifestFileMeta> manifests = manifestList.readAllManifests(snapshot);
manifests.forEach(m -> result.add(pathFactory.toManifestFilePath(m.fileName())));
// data file
List<ManifestEntry> entries =
manifests.stream()
.flatMap(m -> manifestFile.read(m.fileName()).stream())
.collect(Collectors.toList());
entries = new ArrayList<>(FileEntry.mergeEntries(entries));
DataFilePathFactories factories = new DataFilePathFactories(pathFactory);
for (ManifestEntry entry : entries) {
DataFilePathFactory dataFilePathFactory =
factories.get(entry.partition(), entry.bucket());
result.add(dataFilePathFactory.toPath(entry));
}
// Add 'DELETE' 'APPEND' file in snapshot
// These 'delete' files can be merged by the plan#splits,
// so it's not shown in the entries above.
// In other words, these files are not used (by snapshot or changelog) now,
// but it can only be cleaned after this snapshot expired, so we should add it to the file
// use list.
if (changelogDecoupled && !produceChangelog) {
entries =
manifestList.readDeltaManifests(snapshot).stream()
.flatMap(m -> manifestFile.read(m.fileName()).stream())
.collect(Collectors.toList());
for (ManifestEntry entry : entries) {
// append delete file are delayed to delete
if (entry.kind() == FileKind.DELETE
&& entry.file().fileSource().orElse(FileSource.APPEND)
== FileSource.APPEND) {
DataFilePathFactory dataFilePathFactory =
factories.get(entry.partition(), entry.bucket());
result.add(dataFilePathFactory.toPath(entry));
}
}
}
return result;
}
private static Set<Path> getChangelogFileInUse(
long changelogId,
SnapshotManager snapshotManager,
ChangelogManager changelogManager,
FileIO fileIO,
FileStorePathFactory pathFactory,
ManifestList manifestList,
ManifestFile manifestFile) {
Set<Path> result = new HashSet<>();
SchemaManager schemaManager = new SchemaManager(fileIO, snapshotManager.tablePath());
CoreOptions options = new CoreOptions(schemaManager.latest().get().options());
Path changelogPath = changelogManager.longLivedChangelogPath(changelogId);
Changelog changelog = Changelog.fromPath(fileIO, changelogPath);
// changelog file
result.add(changelogPath);
Map<Pair<BinaryRow, Integer>, DataFilePathFactory> dataFilePathFactoryMap = new HashMap<>();
// data file
// not all manifests contains useful data file
// (1) produceChangelog = 'true': data file in changelog manifests
// (2) produceChangelog = 'false': 'APPEND' data file in delta manifests
// delta file
if (options.changelogProducer() == CoreOptions.ChangelogProducer.NONE) {
// TODO why we need to keep base manifests?
result.add(pathFactory.toManifestListPath(changelog.baseManifestList()));
manifestList
.readDataManifests(changelog)
.forEach(m -> result.add(pathFactory.toManifestFilePath(m.fileName())));
result.add(pathFactory.toManifestListPath(changelog.deltaManifestList()));
List<ManifestFileMeta> manifests = manifestList.readDeltaManifests(changelog);
manifests.forEach(m -> result.add(pathFactory.toManifestFilePath(m.fileName())));
List<ManifestEntry> files =
manifests.stream()
.flatMap(m -> manifestFile.read(m.fileName()).stream())
.collect(Collectors.toList());
for (ManifestEntry entry : files) {
if (entry.file().fileSource().orElse(FileSource.APPEND) == FileSource.APPEND) {
Pair<BinaryRow, Integer> bucket = Pair.of(entry.partition(), entry.bucket());
DataFilePathFactory dataFilePathFactory =
dataFilePathFactoryMap.computeIfAbsent(
bucket,
b ->
pathFactory.createDataFilePathFactory(
entry.partition(), entry.bucket()));
result.add(dataFilePathFactory.toPath(entry));
}
}
} else if (changelog.changelogManifestList() != null) {
result.add(pathFactory.toManifestListPath(changelog.changelogManifestList()));
List<ManifestFileMeta> manifests = manifestList.readChangelogManifests(changelog);
manifests.forEach(m -> result.add(pathFactory.toManifestFilePath(m.fileName())));
List<ManifestEntry> files =
manifests.stream()
.flatMap(m -> manifestFile.read(m.fileName()).stream())
.collect(Collectors.toList());
for (ManifestEntry entry : files) {
Pair<BinaryRow, Integer> bucket = Pair.of(entry.partition(), entry.bucket());
DataFilePathFactory dataFilePathFactory =
dataFilePathFactoryMap.computeIfAbsent(
bucket,
b ->
pathFactory.createDataFilePathFactory(
entry.partition(), entry.bucket()));
result.add(dataFilePathFactory.toPath(entry));
}
}
return result;
}
/** Builder of {@link TestFileStore}. */
public static class Builder {
private final String format;
private final String root;
private final int numBuckets;
private final RowType partitionType;
private final RowType keyType;
private final RowType valueType;
private final KeyValueFieldsExtractor keyValueFieldsExtractor;
private final MergeFunctionFactory<KeyValue> mfFactory;
private final TableSchema tableSchema;
private CoreOptions.ChangelogProducer changelogProducer;
public Builder(
String format,
String root,
int numBuckets,
RowType partitionType,
RowType keyType,
RowType valueType,
KeyValueFieldsExtractor keyValueFieldsExtractor,
MergeFunctionFactory<KeyValue> mfFactory,
TableSchema tableSchema) {
this.format = format;
this.root = root;
this.numBuckets = numBuckets;
this.partitionType = partitionType;
this.keyType = keyType;
this.valueType = valueType;
this.keyValueFieldsExtractor = keyValueFieldsExtractor;
this.mfFactory = mfFactory;
this.tableSchema = tableSchema;
this.changelogProducer = CoreOptions.ChangelogProducer.NONE;
}
public Builder changelogProducer(CoreOptions.ChangelogProducer changelogProducer) {
this.changelogProducer = changelogProducer;
return this;
}
public TestFileStore build() {
Options conf =
tableSchema == null ? new Options() : Options.fromMap(tableSchema.options());
conf.set(CoreOptions.WRITE_BUFFER_SIZE, WRITE_BUFFER_SIZE);
conf.set(CoreOptions.PAGE_SIZE, PAGE_SIZE);
conf.set(CoreOptions.TARGET_FILE_SIZE, MemorySize.parse("1 kb"));
conf.set(
CoreOptions.MANIFEST_TARGET_FILE_SIZE,
MemorySize.parse((ThreadLocalRandom.current().nextInt(16) + 1) + "kb"));
conf.set(CoreOptions.FILE_FORMAT, format);
conf.set(CoreOptions.MANIFEST_FORMAT, format);
conf.set(CoreOptions.PATH, root);
conf.set(CoreOptions.BUCKET, numBuckets);
conf.set(CoreOptions.CHANGELOG_PRODUCER, changelogProducer);
// disable dynamic-partition-overwrite in FileStoreCommit layer test
conf.set(CoreOptions.DYNAMIC_PARTITION_OVERWRITE, false);
return new TestFileStore(
root,
new CoreOptions(conf),
partitionType,
keyType,
valueType,
keyValueFieldsExtractor,
mfFactory,
tableSchema);
}
}
}
|
apache/pig | 36,474 | src/org/apache/pig/backend/hadoop/executionengine/mapReduceLayer/MapReduceLauncher.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.pig.backend.hadoop.executionengine.mapReduceLayer;
import java.io.IOException;
import java.io.PrintStream;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Calendar;
import java.util.HashMap;
import java.util.Iterator;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.Properties;
import javax.xml.parsers.ParserConfigurationException;
import javax.xml.transform.TransformerException;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.mapred.Counters;
import org.apache.hadoop.mapred.JobClient;
import org.apache.hadoop.mapred.JobConf;
import org.apache.hadoop.mapred.JobID;
import org.apache.hadoop.mapred.RunningJob;
import org.apache.hadoop.mapreduce.Cluster;
import org.apache.hadoop.mapreduce.TaskReport;
import org.apache.hadoop.mapred.jobcontrol.Job;
import org.apache.hadoop.mapreduce.TaskType;
import org.apache.hadoop.mapreduce.lib.jobcontrol.ControlledJob;
import org.apache.pig.PigConfiguration;
import org.apache.pig.PigException;
import org.apache.pig.PigRunner.ReturnCode;
import org.apache.pig.PigWarning;
import org.apache.pig.backend.BackendException;
import org.apache.pig.backend.executionengine.ExecException;
import org.apache.pig.backend.hadoop.datastorage.ConfigurationUtil;
import org.apache.pig.backend.hadoop.executionengine.JobCreationException;
import org.apache.pig.backend.hadoop.executionengine.Launcher;
import org.apache.pig.backend.hadoop.executionengine.mapReduceLayer.MRCompiler.LastInputStreamingOptimizer;
import org.apache.pig.backend.hadoop.executionengine.mapReduceLayer.plans.DotMRPrinter;
import org.apache.pig.backend.hadoop.executionengine.mapReduceLayer.plans.EndOfAllInputSetter;
import org.apache.pig.backend.hadoop.executionengine.mapReduceLayer.plans.MRIntermediateDataVisitor;
import org.apache.pig.backend.hadoop.executionengine.mapReduceLayer.plans.MROperPlan;
import org.apache.pig.backend.hadoop.executionengine.mapReduceLayer.plans.MRPrinter;
import org.apache.pig.backend.hadoop.executionengine.mapReduceLayer.plans.POPackageAnnotator;
import org.apache.pig.backend.hadoop.executionengine.mapReduceLayer.plans.XMLMRPrinter;
import org.apache.pig.backend.hadoop.executionengine.physicalLayer.plans.PhysicalPlan;
import org.apache.pig.backend.hadoop.executionengine.physicalLayer.relationalOperators.JoinPackager;
import org.apache.pig.backend.hadoop.executionengine.physicalLayer.relationalOperators.POStore;
import org.apache.pig.backend.hadoop.executionengine.shims.HadoopShims;
import org.apache.pig.impl.PigContext;
import org.apache.pig.impl.PigImplConstants;
import org.apache.pig.impl.io.FileLocalizer;
import org.apache.pig.impl.io.FileSpec;
import org.apache.pig.impl.plan.CompilationMessageCollector;
import org.apache.pig.impl.plan.CompilationMessageCollector.MessageType;
import org.apache.pig.impl.plan.PlanException;
import org.apache.pig.impl.plan.VisitorException;
import org.apache.pig.impl.util.ConfigurationValidator;
import org.apache.pig.impl.util.LogUtils;
import org.apache.pig.impl.util.UDFContext;
import org.apache.pig.impl.util.Utils;
import org.apache.pig.tools.pigstats.OutputStats;
import org.apache.pig.tools.pigstats.PigStats;
import org.apache.pig.tools.pigstats.PigStatsUtil;
import org.apache.pig.tools.pigstats.mapreduce.MRJobStats;
import org.apache.pig.tools.pigstats.mapreduce.MRPigStatsUtil;
import org.apache.pig.tools.pigstats.mapreduce.MRScriptState;
/**
* Main class that launches pig for Map Reduce
*
*/
public class MapReduceLauncher extends Launcher {
public static final String SUCCEEDED_FILE_NAME = "_SUCCESS";
private static final Log log = LogFactory.getLog(MapReduceLauncher.class);
private boolean aggregateWarning = false;
public MapReduceLauncher() {
super();
Utils.addShutdownHookWithPriority(new HangingJobKiller(),
PigImplConstants.SHUTDOWN_HOOK_JOB_KILL_PRIORITY);
}
@Override
public void kill() {
try {
if (jc != null && jc.getRunningJobs().size() > 0) {
log.info("Received kill signal");
for (Job job : jc.getRunningJobs()) {
org.apache.hadoop.mapreduce.Job mrJob = job.getJob();
try {
if (mrJob != null) {
mrJob.killJob();
}
} catch (Exception ir) {
throw new IOException(ir);
}
log.info("Job " + job.getAssignedJobID() + " killed");
String timeStamp = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss")
.format(Calendar.getInstance().getTime());
System.err.println(timeStamp + " Job " + job.getAssignedJobID() + " killed");
}
}
} catch (Exception e) {
log.warn("Encounter exception on cleanup:" + e);
}
}
@Override
public void killJob(String jobID, Configuration conf) throws BackendException {
try {
if (conf != null) {
JobConf jobConf = new JobConf(conf);
JobClient jc = new JobClient(jobConf);
JobID id = JobID.forName(jobID);
RunningJob job = jc.getJob(id);
if (job == null)
System.out.println("Job with id " + jobID + " is not active");
else
{
job.killJob();
log.info("Kill " + id + " submitted.");
}
}
} catch (IOException e) {
throw new BackendException(e);
}
}
/**
* Get the exception that caused a failure on the backend for a
* store location (if any).
*/
public Exception getError(FileSpec spec) {
return failureMap.get(spec);
}
@Override
public PigStats launchPig(PhysicalPlan php,
String grpName,
PigContext pc) throws PlanException,
VisitorException,
IOException,
ExecException,
JobCreationException,
Exception {
long sleepTime = 500;
aggregateWarning = Boolean.valueOf(pc.getProperties().getProperty("aggregate.warning"));
MROperPlan mrp = compile(php, pc);
addGCParams(pc.getProperties(), JobConf.MAPRED_TASK_JAVA_OPTS, false);
addGCParams(pc.getProperties(), JobConf.MAPRED_MAP_TASK_JAVA_OPTS, true);
addGCParams(pc.getProperties(), JobConf.MAPRED_REDUCE_TASK_JAVA_OPTS, true);
ConfigurationValidator.validatePigProperties(pc.getProperties());
Configuration conf = ConfigurationUtil.toConfiguration(pc.getProperties());
MRExecutionEngine exe = (MRExecutionEngine) pc.getExecutionEngine();
Properties defaultProperties = new Properties();
JobConf defaultJobConf = exe.getLocalConf();
Utils.recomputeProperties(defaultJobConf, defaultProperties);
// This is a generic JobClient for checking progress of the jobs
JobClient statsJobClient = new JobClient(exe.getJobConf());
JobControlCompiler jcc = new JobControlCompiler(pc, conf, ConfigurationUtil.toConfiguration(defaultProperties));
MRScriptState.get().addWorkflowAdjacenciesToConf(mrp, conf);
// start collecting statistics
PigStats.start(pc.getExecutionEngine().instantiatePigStats());
MRPigStatsUtil.startCollection(pc, statsJobClient, jcc, mrp);
// Find all the intermediate data stores. The plan will be destroyed during compile/execution
// so this needs to be done before.
MRIntermediateDataVisitor intermediateVisitor = new MRIntermediateDataVisitor(mrp);
intermediateVisitor.visit();
List<Job> failedJobs = new LinkedList<Job>();
List<NativeMapReduceOper> failedNativeMR = new LinkedList<NativeMapReduceOper>();
List<Job> completeFailedJobsInThisRun = new LinkedList<Job>();
List<Job> succJobs = new LinkedList<Job>();
int totalMRJobs = mrp.size();
int numMRJobsCompl = 0;
double lastProg = -1;
long scriptSubmittedTimestamp = System.currentTimeMillis();
//create the exception handler for the job control thread
//and register the handler with the job control thread
JobControlThreadExceptionHandler jctExceptionHandler = new JobControlThreadExceptionHandler();
boolean stop_on_failure =
Boolean.valueOf(pc.getProperties().getProperty("stop.on.failure", "false"));
boolean stoppedOnFailure = false;
// jc is null only when mrp.size == 0
while(mrp.size() != 0 && !stoppedOnFailure) {
jc = jcc.compile(mrp, grpName);
if(jc == null) {
List<MapReduceOper> roots = new LinkedList<MapReduceOper>();
roots.addAll(mrp.getRoots());
// run the native mapreduce roots first then run the rest of the roots
for(MapReduceOper mro: roots) {
if(mro instanceof NativeMapReduceOper) {
NativeMapReduceOper natOp = (NativeMapReduceOper)mro;
try {
MRScriptState.get().emitJobsSubmittedNotification(1);
natOp.runJob();
numMRJobsCompl++;
} catch (IOException e) {
mrp.trimBelow(natOp);
failedNativeMR.add(natOp);
String msg = "Error running native mapreduce" +
" operator job :" + natOp.getJobId() + e.getMessage();
String stackTrace = Utils.getStackStraceStr(e);
LogUtils.writeLog(msg,
stackTrace,
pc.getProperties().getProperty("pig.logfile"),
log
);
log.info(msg);
if (stop_on_failure) {
int errCode = 6017;
throw new ExecException(msg, errCode,
PigException.REMOTE_ENVIRONMENT);
}
}
double prog = ((double)numMRJobsCompl)/totalMRJobs;
notifyProgress(prog, lastProg);
lastProg = prog;
mrp.remove(natOp);
}
}
continue;
}
// Initially, all jobs are in wait state.
List<Job> jobsWithoutIds = jc.getWaitingJobs();
log.info(jobsWithoutIds.size() +" map-reduce job(s) waiting for submission.");
//notify listeners about jobs submitted
MRScriptState.get().emitJobsSubmittedNotification(jobsWithoutIds.size());
// update Pig stats' job DAG with just compiled jobs
MRPigStatsUtil.updateJobMroMap(jcc.getJobMroMap());
// determine job tracker url
String jobTrackerLoc;
JobConf jobConf = jobsWithoutIds.get(0).getJobConf();
try {
String port = jobConf.get(MRConfiguration.JOB_TRACKER_HTTP_ADDRESS);
String jobTrackerAdd = jobConf.get(MRConfiguration.JOB_TRACKER);
jobTrackerLoc = jobTrackerAdd.substring(0,jobTrackerAdd.indexOf(":"))
+ port.substring(port.indexOf(":"));
}
catch(Exception e){
// Could not get the job tracker location, most probably we are running in local mode.
// If it is the case, we don't print out job tracker location,
// because it is meaningless for local mode.
jobTrackerLoc = null;
log.debug("Failed to get job tracker location.");
}
completeFailedJobsInThisRun.clear();
// Set the thread UDFContext so registered classes are available.
final UDFContext udfContext = UDFContext.getUDFContext();
Thread jcThread = new Thread(jc, "JobControl") {
@Override
public void run() {
UDFContext.setUdfContext(udfContext.clone()); //PIG-2576
super.run();
}
};
jcThread.setUncaughtExceptionHandler(jctExceptionHandler);
jcThread.setContextClassLoader(PigContext.getClassLoader());
// mark the times that the jobs were submitted so it's reflected in job history props
for (Job job : jc.getWaitingJobs()) {
JobConf jobConfCopy = job.getJobConf();
jobConfCopy.set("pig.script.submitted.timestamp",
Long.toString(scriptSubmittedTimestamp));
jobConfCopy.set("pig.job.submitted.timestamp",
Long.toString(System.currentTimeMillis()));
job.setJobConf(jobConfCopy);
}
//All the setup done, now lets launch the jobs.
jcThread.start();
try {
// a flag whether to warn failure during the loop below, so users can notice failure earlier.
boolean warn_failure = true;
// Now wait, till we are finished.
while(!jc.allFinished()){
jcThread.join(sleepTime);
List<Job> jobsAssignedIdInThisRun = new ArrayList<Job>();
for(Job job : jobsWithoutIds){
if (job.getAssignedJobID() != null){
jobsAssignedIdInThisRun.add(job);
log.info("HadoopJobId: "+job.getAssignedJobID());
// display the aliases being processed
MapReduceOper mro = jcc.getJobMroMap().get(job);
if (mro != null) {
String alias = MRScriptState.get().getAlias(mro);
log.info("Processing aliases " + alias);
String aliasLocation = MRScriptState.get().getAliasLocation(mro);
log.info("detailed locations: " + aliasLocation);
}
// update statistics for this job so jobId is set
MRPigStatsUtil.addJobStats(job);
MRScriptState.get().emitJobStartedNotification(
job.getAssignedJobID().toString());
}
else{
// This job is not assigned an id yet.
}
}
jobsWithoutIds.removeAll(jobsAssignedIdInThisRun);
double prog = (numMRJobsCompl+calculateProgress(jc))/totalMRJobs;
if (notifyProgress(prog, lastProg)) {
List<Job> runnJobs = jc.getRunningJobs();
if (runnJobs != null) {
StringBuilder msg = new StringBuilder();
for (Object object : runnJobs) {
Job j = (Job) object;
if (j != null) {
msg.append(j.getAssignedJobID()).append(",");
}
}
if (msg.length() > 0) {
msg.setCharAt(msg.length() - 1, ']');
log.info("Running jobs are [" + msg);
}
}
lastProg = prog;
}
// collect job stats by frequently polling of completed jobs (PIG-1829)
MRPigStatsUtil.accumulateStats(jc);
// if stop_on_failure is enabled, we need to stop immediately when any job has failed
stoppedOnFailure = stopJobsOnFailure(stop_on_failure);
// otherwise, we just display a warning message if there's any failure
if (!stop_on_failure && warn_failure && !jc.getFailedJobs().isEmpty()) {
// we don't warn again for this group of jobs
warn_failure = false;
log.warn("Ooops! Some job has failed! Specify -stop_on_failure if you "
+ "want Pig to stop immediately on failure.");
}
}
//check for the jobControlException first
//if the job controller fails before launching the jobs then there are
//no jobs to check for failure
if (jobControlException != null) {
if (jobControlException instanceof PigException) {
if (jobControlExceptionStackTrace != null) {
LogUtils.writeLog("Error message from job controller",
jobControlExceptionStackTrace, pc
.getProperties().getProperty(
"pig.logfile"), log);
}
throw jobControlException;
} else {
int errCode = 2117;
String msg = "Unexpected error when launching map reduce job.";
throw new ExecException(msg, errCode, PigException.BUG,
jobControlException);
}
}
if (!jc.getFailedJobs().isEmpty() ) {
// stop if stop_on_failure is enabled
stoppedOnFailure = stopJobsOnFailure(stop_on_failure);
if (!stoppedOnFailure) {
// If we only have one store and that job fail, then we sure
// that the job completely fail, and we shall stop dependent jobs
for (Job job : jc.getFailedJobs()) {
completeFailedJobsInThisRun.add(job);
log.info("job " + job.getAssignedJobID() + " has failed! Stop running all dependent jobs");
}
}
failedJobs.addAll(jc.getFailedJobs());
}
int removedMROp = jcc.updateMROpPlan(completeFailedJobsInThisRun);
numMRJobsCompl += removedMROp;
List<Job> jobs = jc.getSuccessfulJobs();
jcc.moveResults(jobs);
succJobs.addAll(jobs);
// collecting final statistics
MRPigStatsUtil.accumulateStats(jc);
}
catch (Exception e) {
throw e;
}
finally {
jc.stop();
}
}
MRScriptState.get().emitProgressUpdatedNotification(100);
log.info( "100% complete");
boolean failed = false;
if(failedNativeMR.size() > 0){
failed = true;
}
if (Boolean.valueOf(pc.getProperties().getProperty(PigConfiguration.PIG_DELETE_TEMP_FILE, "true"))) {
// Clean up all the intermediate data
for (String path : intermediateVisitor.getIntermediate()) {
// Skip non-file system paths such as hbase, see PIG-3617
if (HadoopShims.hasFileSystemImpl(new Path(path), conf)) {
FileLocalizer.delete(path, pc);
}
}
}
// Look to see if any jobs failed. If so, we need to report that.
if (failedJobs != null && failedJobs.size() > 0) {
Exception backendException = null;
for (Job fj : failedJobs) {
try {
getStats(fj, true, pc);
} catch (Exception e) {
backendException = e;
}
List<POStore> sts = jcc.getStores(fj);
for (POStore st: sts) {
failureMap.put(st.getSFile(), backendException);
}
MRPigStatsUtil.setBackendException(fj, backendException);
}
failed = true;
}
// stats collection is done, log the results
MRPigStatsUtil.stopCollection(true);
// PigStatsUtil.stopCollection also computes the return code based on
// total jobs to run, jobs successful and jobs failed
failed = failed || !PigStats.get().isSuccessful();
Map<Enum, Long> warningAggMap = new HashMap<Enum, Long>();
if (succJobs != null) {
for (Job job : succJobs) {
List<POStore> sts = jcc.getStores(job);
for (POStore st : sts) {
if (!st.isTmpStore()) {
// create an "_SUCCESS" file in output location if
// output location is a filesystem dir
createSuccessFile(job, st);
} else {
log.debug("Successfully stored result in: \""
+ st.getSFile().getFileName() + "\"");
}
}
getStats(job, false, pc);
if (aggregateWarning) {
computeWarningAggregate(job, warningAggMap);
}
}
}
if(aggregateWarning) {
CompilationMessageCollector.logAggregate(warningAggMap, MessageType.Warning, log) ;
}
if (!failed) {
log.info("Success!");
} else {
if (succJobs != null && succJobs.size() > 0) {
log.info("Some jobs have failed! Stop running all dependent jobs");
} else {
log.info("Failed!");
}
}
jcc.reset();
int ret = failed ? ((succJobs != null && succJobs.size() > 0)
? ReturnCode.PARTIAL_FAILURE
: ReturnCode.FAILURE)
: ReturnCode.SUCCESS;
PigStats pigStats = PigStatsUtil.getPigStats(ret);
// run cleanup for all of the stores
for (OutputStats output : pigStats.getOutputStats()) {
POStore store = output.getPOStore();
try {
if (!output.isSuccessful()) {
store.getStoreFunc().cleanupOnFailure(
store.getSFile().getFileName(),
new org.apache.hadoop.mapreduce.Job(output.getConf()));
} else {
store.getStoreFunc().cleanupOnSuccess(
store.getSFile().getFileName(),
new org.apache.hadoop.mapreduce.Job(output.getConf()));
}
} catch (IOException e) {
throw new ExecException(e);
} catch (AbstractMethodError nsme) {
// Just swallow it. This means we're running against an
// older instance of a StoreFunc that doesn't implement
// this method.
}
}
if (stoppedOnFailure) {
throw new ExecException("Stopping execution on job failure with -stop_on_failure option", 6017,
PigException.REMOTE_ENVIRONMENT);
}
return pigStats;
}
/**
* If stop_on_failure is enabled and any job has failed, it stops other jobs.
* @param stop_on_failure whether it's enabled.
* @return true if there were failed jobs and stop_on_failure is enabled
*/
private boolean stopJobsOnFailure(boolean stop_on_failure) throws IOException, InterruptedException {
if (jc.getFailedJobs().isEmpty())
return false;
if (stop_on_failure) {
List<ControlledJob> readyJobsList = jc.getReadyJobsList();
List<ControlledJob> runningJobList = jc.getRunningJobList();
if (readyJobsList.size() > 0 || runningJobList.size() > 0) {
log.info("Some job(s) failed. Failing other ready and running jobs as -stop_on_failure is on");
for (ControlledJob job : readyJobsList) {
job.failJob("Failing ready job for -stop_on_failure: " + job.getMapredJobId());
}
for (ControlledJob job : runningJobList) {
job.failJob("Failing running job for -stop_on_failure: " + job.getMapredJobId());
}
}
return true;
}
return false;
}
/**
* Log the progress and notify listeners if there is sufficient progress
* @param prog current progress
* @param lastProg progress last time
*/
private boolean notifyProgress(double prog, double lastProg) {
if (prog >= (lastProg + 0.04)) {
int perCom = (int)(prog * 100);
if(perCom!=100) {
log.info( perCom + "% complete");
MRScriptState.get().emitProgressUpdatedNotification(perCom);
}
return true;
}
return false;
}
@Override
public void explain(
PhysicalPlan php,
PigContext pc,
PrintStream ps,
String format,
boolean verbose) throws PlanException, VisitorException,
IOException {
log.trace("Entering MapReduceLauncher.explain");
MROperPlan mrp = compile(php, pc);
if (format.equals("text")) {
MRPrinter printer = new MRPrinter(ps, mrp);
printer.setVerbose(verbose);
printer.visit();
} else if (format.equals("xml")) {
try {
XMLMRPrinter printer = new XMLMRPrinter(ps, mrp);
printer.visit();
printer.closePlan();
} catch (ParserConfigurationException e) {
e.printStackTrace();
} catch (TransformerException e) {
e.printStackTrace();
}
} else {
ps.println("#--------------------------------------------------");
ps.println("# Map Reduce Plan ");
ps.println("#--------------------------------------------------");
DotMRPrinter printer =new DotMRPrinter(mrp, ps);
printer.setVerbose(verbose);
printer.dump();
ps.println("");
}
}
public MROperPlan compile(
PhysicalPlan php,
PigContext pc) throws PlanException, IOException, VisitorException {
MRCompiler comp = new MRCompiler(php, pc);
comp.compile();
comp.aggregateScalarsFiles();
comp.connectSoftLink();
MROperPlan plan = comp.getMRPlan();
//display the warning message(s) from the MRCompiler
comp.getMessageCollector().logMessages(MessageType.Warning, aggregateWarning, log);
String lastInputChunkSize =
pc.getProperties().getProperty(
"last.input.chunksize", JoinPackager.DEFAULT_CHUNK_SIZE);
String prop = pc.getProperties().getProperty(PigConfiguration.PIG_EXEC_NO_COMBINER);
if (!pc.inIllustrator && !("true".equals(prop))) {
boolean doMapAgg =
Boolean.valueOf(pc.getProperties().getProperty(PigConfiguration.PIG_EXEC_MAP_PARTAGG,"false"));
CombinerOptimizer co = new CombinerOptimizer(plan, doMapAgg);
co.visit();
//display the warning message(s) from the CombinerOptimizer
co.getMessageCollector().logMessages(MessageType.Warning, aggregateWarning, log);
}
// Optimize the jobs that have a load/store only first MR job followed
// by a sample job.
SampleOptimizer so = new SampleOptimizer(plan, pc);
so.visit();
// We must ensure that there is only 1 reducer for a limit. Add a single-reducer job.
if (!pc.inIllustrator) {
LimitAdjuster la = new LimitAdjuster(plan, pc);
la.visit();
la.adjust();
}
// Optimize to use secondary sort key if possible
prop = pc.getProperties().getProperty(PigConfiguration.PIG_EXEC_NO_SECONDARY_KEY);
if (!pc.inIllustrator && !("true".equals(prop))) {
SecondaryKeyOptimizerMR skOptimizer = new SecondaryKeyOptimizerMR(plan);
skOptimizer.visit();
}
// optimize key - value handling in package
POPackageAnnotator pkgAnnotator = new POPackageAnnotator(plan);
pkgAnnotator.visit();
// optimize joins
LastInputStreamingOptimizer liso =
new MRCompiler.LastInputStreamingOptimizer(plan, lastInputChunkSize);
liso.visit();
// figure out the type of the key for the map plan
// this is needed when the key is null to create
// an appropriate NullableXXXWritable object
KeyTypeDiscoveryVisitor kdv = new KeyTypeDiscoveryVisitor(plan);
kdv.visit();
// removes the filter(constant(true)) operators introduced by
// splits.
NoopFilterRemover fRem = new NoopFilterRemover(plan);
fRem.visit();
boolean isMultiQuery =
Boolean.valueOf(pc.getProperties().getProperty(PigConfiguration.PIG_OPT_MULTIQUERY, "true"));
if (isMultiQuery) {
// reduces the number of MROpers in the MR plan generated
// by multi-query (multi-store) script.
MultiQueryOptimizer mqOptimizer = new MultiQueryOptimizer(plan, pc.inIllustrator);
mqOptimizer.visit();
}
// removes unnecessary stores (as can happen with splits in
// some cases.). This has to run after the MultiQuery and
// NoopFilterRemover.
NoopStoreRemover sRem = new NoopStoreRemover(plan);
sRem.visit();
// check whether stream operator is present
// after MultiQueryOptimizer because it can shift streams from
// map to reduce, etc.
EndOfAllInputSetter checker = new EndOfAllInputSetter(plan);
checker.visit();
boolean isAccum =
Boolean.valueOf(pc.getProperties().getProperty("opt.accumulator","true"));
if (isAccum) {
AccumulatorOptimizer accum = new AccumulatorOptimizer(plan);
accum.visit();
}
return plan;
}
private boolean shouldMarkOutputDir(Job job) {
return job.getJobConf().getBoolean(MRConfiguration.FILEOUTPUTCOMMITTER_MARKSUCCESSFULJOBS,
false);
}
private void createSuccessFile(Job job, POStore store) throws IOException {
if(shouldMarkOutputDir(job)) {
Path outputPath = new Path(store.getSFile().getFileName());
String scheme = outputPath.toUri().getScheme();
if (HadoopShims.hasFileSystemImpl(outputPath, job.getJobConf())) {
FileSystem fs = outputPath.getFileSystem(job.getJobConf());
if (fs.exists(outputPath)) {
// create a file in the folder to mark it
Path filePath = new Path(outputPath, SUCCEEDED_FILE_NAME);
if (!fs.exists(filePath)) {
fs.create(filePath).close();
}
}
} else {
log.warn("No FileSystem for scheme: " + scheme + ". Not creating success file");
}
}
}
@SuppressWarnings("deprecation")
void computeWarningAggregate(Job job, Map<Enum, Long> aggMap) {
try {
Counters counters = MRJobStats.getCounters(job);
if (counters==null)
{
long nullCounterCount =
(aggMap.get(PigWarning.NULL_COUNTER_COUNT) == null)
? 0
: aggMap.get(PigWarning.NULL_COUNTER_COUNT);
nullCounterCount++;
aggMap.put(PigWarning.NULL_COUNTER_COUNT, nullCounterCount);
}
for (Enum e : PigWarning.values()) {
if (e != PigWarning.NULL_COUNTER_COUNT) {
Long currentCount = aggMap.get(e);
currentCount = (currentCount == null ? 0 : currentCount);
// This code checks if the counters is null, if it is,
// we need to report to the user that the number
// of warning aggregations may not be correct. In fact,
// Counters should not be null, it is
// a hadoop bug, once this bug is fixed in hadoop, the
// null handling code should never be hit.
// See Pig-943
if (counters != null)
currentCount += counters.getCounter(e);
aggMap.put(e, currentCount);
}
}
} catch (Exception e) {
String msg = "Unable to retrieve job to compute warning aggregation.";
log.warn(msg);
}
}
private void getStats(Job job, boolean errNotDbg,
PigContext pigContext) throws ExecException {
JobID MRJobID = job.getAssignedJobID();
String jobMessage = job.getMessage();
Exception backendException = null;
if (MRJobID == null) {
try {
LogUtils.writeLog(
"Backend error message during job submission",
jobMessage,
pigContext.getProperties().getProperty("pig.logfile"),
log);
backendException = getExceptionFromString(jobMessage);
} catch (Exception e) {
int errCode = 2997;
String msg = "Unable to recreate exception from backend error: "
+ jobMessage;
throw new ExecException(msg, errCode, PigException.BUG);
}
throw new ExecException(backendException);
}
try {
Iterator<TaskReport> mapRep = MRJobStats.getTaskReports(job, TaskType.MAP);
if (mapRep != null) {
getErrorMessages(mapRep, "map", errNotDbg, pigContext);
totalHadoopTimeSpent += computeTimeSpent(mapRep);
mapRep = null;
}
Iterator<TaskReport> redRep = MRJobStats.getTaskReports(job, TaskType.REDUCE);
if (redRep != null) {
getErrorMessages(redRep, "reduce", errNotDbg, pigContext);
totalHadoopTimeSpent += computeTimeSpent(redRep);
redRep = null;
}
} catch (IOException e) {
if (job.getState() == Job.SUCCESS) {
// if the job succeeded, let the user know that
// we were unable to get statistics
log.warn("Unable to get job related diagnostics");
} else {
throw new ExecException(e);
}
} catch (Exception e) {
throw new ExecException(e);
}
}
}
|
google/j2objc | 36,385 | jre_emul/android/platform/external/icu/android_icu4j/src/main/java/android/icu/impl/TimeZoneNamesImpl.java | /* GENERATED SOURCE. DO NOT MODIFY. */
// © 2016 and later: Unicode, Inc. and others.
// License & terms of use: http://www.unicode.org/copyright.html#License
/*
*******************************************************************************
* Copyright (C) 2011-2016, International Business Machines Corporation and
* others. All Rights Reserved.
*******************************************************************************
*/
package android.icu.impl;
import java.io.IOException;
import java.io.ObjectInputStream;
import java.io.ObjectOutputStream;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.EnumSet;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.MissingResourceException;
import java.util.Set;
import java.util.concurrent.ConcurrentHashMap;
import java.util.regex.Pattern;
import android.icu.impl.TextTrieMap.ResultHandler;
import android.icu.text.TimeZoneNames;
import android.icu.util.TimeZone;
import android.icu.util.TimeZone.SystemTimeZoneType;
import android.icu.util.ULocale;
import android.icu.util.UResourceBundle;
/**
* The standard ICU implementation of TimeZoneNames
* @hide Only a subset of ICU is exposed in Android
*/
public class TimeZoneNamesImpl extends TimeZoneNames {
private static final long serialVersionUID = -2179814848495897472L;
private static final String ZONE_STRINGS_BUNDLE = "zoneStrings";
private static final String MZ_PREFIX = "meta:";
private static volatile Set<String> METAZONE_IDS;
private static final TZ2MZsCache TZ_TO_MZS_CACHE = new TZ2MZsCache();
private static final MZ2TZsCache MZ_TO_TZS_CACHE = new MZ2TZsCache();
private transient ICUResourceBundle _zoneStrings;
// These are hard cache. We create only one TimeZoneNamesImpl per locale
// and it's stored in SoftCache, so we do not need to worry about the
// footprint much.
private transient ConcurrentHashMap<String, ZNames> _mzNamesMap;
private transient ConcurrentHashMap<String, ZNames> _tzNamesMap;
private transient boolean _namesFullyLoaded;
private transient TextTrieMap<NameInfo> _namesTrie;
private transient boolean _namesTrieFullyLoaded;
public TimeZoneNamesImpl(ULocale locale) {
initialize(locale);
}
/* (non-Javadoc)
* @see android.icu.text.TimeZoneNames#getAvailableMetaZoneIDs()
*/
@Override
public Set<String> getAvailableMetaZoneIDs() {
return _getAvailableMetaZoneIDs();
}
static Set<String> _getAvailableMetaZoneIDs() {
if (METAZONE_IDS == null) {
synchronized (TimeZoneNamesImpl.class) {
if (METAZONE_IDS == null) {
UResourceBundle bundle = UResourceBundle.getBundleInstance(ICUData.ICU_BASE_NAME, "metaZones");
UResourceBundle mapTimezones = bundle.get("mapTimezones");
Set<String> keys = mapTimezones.keySet();
METAZONE_IDS = Collections.unmodifiableSet(keys);
}
}
}
return METAZONE_IDS;
}
/* (non-Javadoc)
* @see android.icu.text.TimeZoneNames#getAvailableMetaZoneIDs(java.lang.String)
*/
@Override
public Set<String> getAvailableMetaZoneIDs(String tzID) {
return _getAvailableMetaZoneIDs(tzID);
}
static Set<String> _getAvailableMetaZoneIDs(String tzID) {
if (tzID == null || tzID.length() == 0) {
return Collections.emptySet();
}
List<MZMapEntry> maps = TZ_TO_MZS_CACHE.getInstance(tzID, tzID);
if (maps.isEmpty()) {
return Collections.emptySet();
}
Set<String> mzIDs = new HashSet<String>(maps.size());
for (MZMapEntry map : maps) {
mzIDs.add(map.mzID());
}
// make it unmodifiable because of the API contract. We may cache the results in futre.
return Collections.unmodifiableSet(mzIDs);
}
/* (non-Javadoc)
* @see android.icu.text.TimeZoneNames#getMetaZoneID(java.lang.String, long)
*/
@Override
public String getMetaZoneID(String tzID, long date) {
return _getMetaZoneID(tzID, date);
}
static String _getMetaZoneID(String tzID, long date) {
if (tzID == null || tzID.length() == 0) {
return null;
}
String mzID = null;
List<MZMapEntry> maps = TZ_TO_MZS_CACHE.getInstance(tzID, tzID);
for (MZMapEntry map : maps) {
if (date >= map.from() && date < map.to()) {
mzID = map.mzID();
break;
}
}
return mzID;
}
/* (non-Javadoc)
* @see android.icu.text.TimeZoneNames#getReferenceZoneID(java.lang.String, java.lang.String)
*/
@Override
public String getReferenceZoneID(String mzID, String region) {
return _getReferenceZoneID(mzID, region);
}
static String _getReferenceZoneID(String mzID, String region) {
if (mzID == null || mzID.length() == 0) {
return null;
}
String refID = null;
Map<String, String> regionTzMap = MZ_TO_TZS_CACHE.getInstance(mzID, mzID);
if (!regionTzMap.isEmpty()) {
refID = regionTzMap.get(region);
if (refID == null) {
refID = regionTzMap.get("001");
}
}
return refID;
}
/*
* (non-Javadoc)
* @see android.icu.text.TimeZoneNames#getMetaZoneDisplayName(java.lang.String, android.icu.text.TimeZoneNames.NameType)
*/
@Override
public String getMetaZoneDisplayName(String mzID, NameType type) {
if (mzID == null || mzID.length() == 0) {
return null;
}
return loadMetaZoneNames(mzID).getName(type);
}
/*
* (non-Javadoc)
* @see android.icu.text.TimeZoneNames#getTimeZoneDisplayName(java.lang.String, android.icu.text.TimeZoneNames.NameType)
*/
@Override
public String getTimeZoneDisplayName(String tzID, NameType type) {
if (tzID == null || tzID.length() == 0) {
return null;
}
return loadTimeZoneNames(tzID).getName(type);
}
/* (non-Javadoc)
* @see android.icu.text.TimeZoneNames#getExemplarLocationName(java.lang.String)
*/
@Override
public String getExemplarLocationName(String tzID) {
if (tzID == null || tzID.length() == 0) {
return null;
}
String locName = loadTimeZoneNames(tzID).getName(NameType.EXEMPLAR_LOCATION);
return locName;
}
/* (non-Javadoc)
* @see android.icu.text.TimeZoneNames#find(java.lang.CharSequence, int, java.util.Set)
*/
@Override
public synchronized Collection<MatchInfo> find(CharSequence text, int start, EnumSet<NameType> nameTypes) {
if (text == null || text.length() == 0 || start < 0 || start >= text.length()) {
throw new IllegalArgumentException("bad input text or range");
}
NameSearchHandler handler = new NameSearchHandler(nameTypes);
Collection<MatchInfo> matches;
// First try of lookup.
matches = doFind(handler, text, start);
if (matches != null) {
return matches;
}
// All names are not yet loaded into the trie.
// We may have loaded names for formatting several time zones,
// and might be parsing one of those.
// Populate the parsing trie from all of the already-loaded names.
addAllNamesIntoTrie();
// Second try of lookup.
matches = doFind(handler, text, start);
if (matches != null) {
return matches;
}
// There are still some names we haven't loaded into the trie yet.
// Load everything now.
internalLoadAllDisplayNames();
// Set default time zone location names
// for time zones without explicit display names.
// TODO: Should this logic be moved into internalLoadAllDisplayNames?
Set<String> tzIDs = TimeZone.getAvailableIDs(SystemTimeZoneType.CANONICAL, null, null);
for (String tzID : tzIDs) {
if (!_tzNamesMap.containsKey(tzID)) {
ZNames.createTimeZoneAndPutInCache(_tzNamesMap, null, tzID);
}
}
addAllNamesIntoTrie();
_namesTrieFullyLoaded = true;
// Third try: we must return this one.
return doFind(handler, text, start);
}
private Collection<MatchInfo> doFind(NameSearchHandler handler, CharSequence text, int start) {
handler.resetResults();
_namesTrie.find(text, start, handler);
if (handler.getMaxMatchLen() == (text.length() - start) || _namesTrieFullyLoaded) {
return handler.getMatches();
}
return null;
}
@Override
public synchronized void loadAllDisplayNames() {
internalLoadAllDisplayNames();
}
@Override
public void getDisplayNames(String tzID, NameType[] types, long date,
String[] dest, int destOffset) {
if (tzID == null || tzID.length() == 0) {
return;
}
ZNames tzNames = loadTimeZoneNames(tzID);
ZNames mzNames = null;
for (int i = 0; i < types.length; ++i) {
NameType type = types[i];
String name = tzNames.getName(type);
if (name == null) {
if (mzNames == null) {
String mzID = getMetaZoneID(tzID, date);
if (mzID == null || mzID.length() == 0) {
mzNames = ZNames.EMPTY_ZNAMES;
} else {
mzNames = loadMetaZoneNames(mzID);
}
}
name = mzNames.getName(type);
}
dest[destOffset + i] = name;
}
}
/** Caller must synchronize. */
private void internalLoadAllDisplayNames() {
if (!_namesFullyLoaded) {
_namesFullyLoaded = true;
new ZoneStringsLoader().load();
}
}
/** Caller must synchronize. */
private void addAllNamesIntoTrie() {
for (Map.Entry<String, ZNames> entry : _tzNamesMap.entrySet()) {
entry.getValue().addAsTimeZoneIntoTrie(entry.getKey(), _namesTrie);
}
for (Map.Entry<String, ZNames> entry : _mzNamesMap.entrySet()) {
entry.getValue().addAsMetaZoneIntoTrie(entry.getKey(), _namesTrie);
}
}
/**
* Loads all meta zone and time zone names for this TimeZoneNames' locale.
*/
private final class ZoneStringsLoader extends UResource.Sink {
/**
* Prepare for several hundred time zones and meta zones.
* _zoneStrings.getSize() is ineffective in a sparsely populated locale like en-GB.
*/
private static final int INITIAL_NUM_ZONES = 300;
private HashMap<UResource.Key, ZNamesLoader> keyToLoader =
new HashMap<UResource.Key, ZNamesLoader>(INITIAL_NUM_ZONES);
private StringBuilder sb = new StringBuilder(32);
/** Caller must synchronize. */
void load() {
_zoneStrings.getAllItemsWithFallback("", this);
for (Map.Entry<UResource.Key, ZNamesLoader> entry : keyToLoader.entrySet()) {
ZNamesLoader loader = entry.getValue();
if (loader == ZNamesLoader.DUMMY_LOADER) { continue; }
UResource.Key key = entry.getKey();
if (isMetaZone(key)) {
String mzID = mzIDFromKey(key);
ZNames.createMetaZoneAndPutInCache(_mzNamesMap, loader.getNames(), mzID);
} else {
String tzID = tzIDFromKey(key);
ZNames.createTimeZoneAndPutInCache(_tzNamesMap, loader.getNames(), tzID);
}
}
}
@Override
public void put(UResource.Key key, UResource.Value value, boolean noFallback) {
UResource.Table timeZonesTable = value.getTable();
for (int j = 0; timeZonesTable.getKeyAndValue(j, key, value); ++j) {
assert !value.isNoInheritanceMarker();
if (value.getType() == UResourceBundle.TABLE) {
consumeNamesTable(key, value, noFallback);
} else {
// Ignore fields that aren't tables (e.g., fallbackFormat and regionFormatStandard).
// All time zone fields are tables.
}
}
}
private void consumeNamesTable(UResource.Key key, UResource.Value value, boolean noFallback) {
ZNamesLoader loader = keyToLoader.get(key);
if (loader == null) {
if (isMetaZone(key)) {
String mzID = mzIDFromKey(key);
if (_mzNamesMap.containsKey(mzID)) {
// We have already loaded the names for this meta zone.
loader = ZNamesLoader.DUMMY_LOADER;
} else {
loader = new ZNamesLoader();
}
} else {
String tzID = tzIDFromKey(key);
if (_tzNamesMap.containsKey(tzID)) {
// We have already loaded the names for this time zone.
loader = ZNamesLoader.DUMMY_LOADER;
} else {
loader = new ZNamesLoader();
}
}
UResource.Key newKey = createKey(key);
keyToLoader.put(newKey, loader);
}
if (loader != ZNamesLoader.DUMMY_LOADER) {
// Let the ZNamesLoader consume the names table.
loader.put(key, value, noFallback);
}
}
UResource.Key createKey(UResource.Key key) {
return key.clone();
}
boolean isMetaZone(UResource.Key key) {
return key.startsWith(MZ_PREFIX);
}
/**
* Equivalent to key.substring(MZ_PREFIX.length())
* except reuses our StringBuilder.
*/
private String mzIDFromKey(UResource.Key key) {
sb.setLength(0);
for (int i = MZ_PREFIX.length(); i < key.length(); ++i) {
sb.append(key.charAt(i));
}
return sb.toString();
}
private String tzIDFromKey(UResource.Key key) {
sb.setLength(0);
for (int i = 0; i < key.length(); ++i) {
char c = key.charAt(i);
if (c == ':') {
c = '/';
}
sb.append(c);
}
return sb.toString();
}
}
/**
* Initialize the transient fields, called from the constructor and
* readObject.
*
* @param locale The locale
*/
private void initialize(ULocale locale) {
ICUResourceBundle bundle = (ICUResourceBundle)ICUResourceBundle.getBundleInstance(
ICUData.ICU_ZONE_BASE_NAME, locale);
_zoneStrings = (ICUResourceBundle)bundle.get(ZONE_STRINGS_BUNDLE);
// TODO: Access is synchronized, can we use a non-concurrent map?
_tzNamesMap = new ConcurrentHashMap<String, ZNames>();
_mzNamesMap = new ConcurrentHashMap<String, ZNames>();
_namesFullyLoaded = false;
_namesTrie = new TextTrieMap<NameInfo>(true);
_namesTrieFullyLoaded = false;
// Preload zone strings for the default time zone
TimeZone tz = TimeZone.getDefault();
String tzCanonicalID = ZoneMeta.getCanonicalCLDRID(tz);
if (tzCanonicalID != null) {
loadStrings(tzCanonicalID);
}
}
/**
* Load all strings used by the specified time zone.
* This is called from the initializer to load default zone's
* strings.
* @param tzCanonicalID the canonical time zone ID
*/
private synchronized void loadStrings(String tzCanonicalID) {
if (tzCanonicalID == null || tzCanonicalID.length() == 0) {
return;
}
loadTimeZoneNames(tzCanonicalID);
Set<String> mzIDs = getAvailableMetaZoneIDs(tzCanonicalID);
for (String mzID : mzIDs) {
loadMetaZoneNames(mzID);
}
}
/*
* The custom serialization method.
* This implementation only preserve locale object used for the names.
*/
private void writeObject(ObjectOutputStream out) throws IOException {
ULocale locale = _zoneStrings.getULocale();
out.writeObject(locale);
}
/*
* The custom deserialization method.
* This implementation only read locale object used by the object.
*/
private void readObject(ObjectInputStream in) throws IOException, ClassNotFoundException {
ULocale locale = (ULocale)in.readObject();
initialize(locale);
}
/**
* Returns a set of names for the given meta zone ID. This method loads
* the set of names into the internal map and trie for future references.
* @param mzID the meta zone ID
* @return An instance of ZNames that includes a set of meta zone display names.
*/
private synchronized ZNames loadMetaZoneNames(String mzID) {
ZNames mznames = _mzNamesMap.get(mzID);
if (mznames == null) {
ZNamesLoader loader = new ZNamesLoader();
loader.loadMetaZone(_zoneStrings, mzID);
mznames = ZNames.createMetaZoneAndPutInCache(_mzNamesMap, loader.getNames(), mzID);
}
return mznames;
}
/**
* Returns a set of names for the given time zone ID. This method loads
* the set of names into the internal map and trie for future references.
* @param tzID the canonical time zone ID
* @return An instance of ZNames that includes a set of time zone display names.
*/
private synchronized ZNames loadTimeZoneNames(String tzID) {
ZNames tznames = _tzNamesMap.get(tzID);
if (tznames == null) {
ZNamesLoader loader = new ZNamesLoader();
loader.loadTimeZone(_zoneStrings, tzID);
tznames = ZNames.createTimeZoneAndPutInCache(_tzNamesMap, loader.getNames(), tzID);
}
return tznames;
}
/**
* An instance of NameInfo is stored in the zone names trie.
*/
private static class NameInfo {
String tzID;
String mzID;
NameType type;
}
/**
* NameSearchHandler is used for collecting name matches.
*/
private static class NameSearchHandler implements ResultHandler<NameInfo> {
private EnumSet<NameType> _nameTypes;
private Collection<MatchInfo> _matches;
private int _maxMatchLen;
NameSearchHandler(EnumSet<NameType> nameTypes) {
_nameTypes = nameTypes;
}
/* (non-Javadoc)
* @see android.icu.impl.TextTrieMap.ResultHandler#handlePrefixMatch(int, java.util.Iterator)
*/
@Override
public boolean handlePrefixMatch(int matchLength, Iterator<NameInfo> values) {
while (values.hasNext()) {
NameInfo ninfo = values.next();
if (_nameTypes != null && !_nameTypes.contains(ninfo.type)) {
continue;
}
MatchInfo minfo;
if (ninfo.tzID != null) {
minfo = new MatchInfo(ninfo.type, ninfo.tzID, null, matchLength);
} else {
assert(ninfo.mzID != null);
minfo = new MatchInfo(ninfo.type, null, ninfo.mzID, matchLength);
}
if (_matches == null) {
_matches = new LinkedList<MatchInfo>();
}
_matches.add(minfo);
if (matchLength > _maxMatchLen) {
_maxMatchLen = matchLength;
}
}
return true;
}
/**
* Returns the match results
* @return the match results
*/
public Collection<MatchInfo> getMatches() {
if (_matches == null) {
return Collections.emptyList();
}
return _matches;
}
/**
* Returns the maximum match length, or 0 if no match was found
* @return the maximum match length
*/
public int getMaxMatchLen() {
return _maxMatchLen;
}
/**
* Resets the match results
*/
public void resetResults() {
_matches = null;
_maxMatchLen = 0;
}
}
private static final class ZNamesLoader extends UResource.Sink {
private String[] names;
/**
* Does not load any names, for no-fallback handling.
*/
private static ZNamesLoader DUMMY_LOADER = new ZNamesLoader();
void loadMetaZone(ICUResourceBundle zoneStrings, String mzID) {
String key = MZ_PREFIX + mzID;
loadNames(zoneStrings, key);
}
void loadTimeZone(ICUResourceBundle zoneStrings, String tzID) {
String key = tzID.replace('/', ':');
loadNames(zoneStrings, key);
}
void loadNames(ICUResourceBundle zoneStrings, String key) {
assert zoneStrings != null;
assert key != null;
assert key.length() > 0;
// Reset names so that this instance can be used to load data multiple times.
names = null;
try {
zoneStrings.getAllItemsWithFallback(key, this);
} catch (MissingResourceException e) {
}
}
private static ZNames.NameTypeIndex nameTypeIndexFromKey(UResource.Key key) {
// Avoid key.toString() object creation.
if (key.length() != 2) {
return null;
}
char c0 = key.charAt(0);
char c1 = key.charAt(1);
if (c0 == 'l') {
return c1 == 'g' ? ZNames.NameTypeIndex.LONG_GENERIC :
c1 == 's' ? ZNames.NameTypeIndex.LONG_STANDARD :
c1 == 'd' ? ZNames.NameTypeIndex.LONG_DAYLIGHT : null;
} else if (c0 == 's') {
return c1 == 'g' ? ZNames.NameTypeIndex.SHORT_GENERIC :
c1 == 's' ? ZNames.NameTypeIndex.SHORT_STANDARD :
c1 == 'd' ? ZNames.NameTypeIndex.SHORT_DAYLIGHT : null;
} else if (c0 == 'e' && c1 == 'c') {
return ZNames.NameTypeIndex.EXEMPLAR_LOCATION;
}
return null;
}
private void setNameIfEmpty(UResource.Key key, UResource.Value value) {
if (names == null) {
names = new String[ZNames.NUM_NAME_TYPES];
}
ZNames.NameTypeIndex index = nameTypeIndexFromKey(key);
if (index == null) { return; }
assert index.ordinal() < ZNames.NUM_NAME_TYPES;
if (names[index.ordinal()] == null) {
names[index.ordinal()] = value.getString();
}
}
@Override
public void put(UResource.Key key, UResource.Value value, boolean noFallback) {
UResource.Table namesTable = value.getTable();
for (int i = 0; namesTable.getKeyAndValue(i, key, value); ++i) {
assert value.getType() == UResourceBundle.STRING;
setNameIfEmpty(key, value); // could be value.isNoInheritanceMarker()
}
}
private String[] getNames() {
if (Utility.sameObjects(names, null)) {
return null;
}
int length = 0;
for (int i = 0; i < ZNames.NUM_NAME_TYPES; ++i) {
String name = names[i];
if (name != null) {
if (name.equals(ICUResourceBundle.NO_INHERITANCE_MARKER)) {
names[i] = null;
} else {
length = i + 1;
}
}
}
String[] result;
if (length == ZNames.NUM_NAME_TYPES) {
// Return the full array if the last name is set.
result = names;
} else if (length == 0) {
// Return null instead of a zero-length array.
result = null;
} else {
// Return a shorter array for permanent storage.
// Copy all names into the minimal array.
result = Arrays.copyOfRange(names, 0, length);
}
return result;
}
}
/**
* This class stores name data for a meta zone or time zone.
*/
private static class ZNames {
/**
* Private enum corresponding to the public TimeZoneNames::NameType for the order in
* which fields are stored in a ZNames instance. EXEMPLAR_LOCATION is stored first
* for efficiency.
*/
private static enum NameTypeIndex {
EXEMPLAR_LOCATION, LONG_GENERIC, LONG_STANDARD, LONG_DAYLIGHT, SHORT_GENERIC, SHORT_STANDARD, SHORT_DAYLIGHT;
/* J2ObjC: renamed to avoid collision with the generated array of values. */
static final NameTypeIndex values_temp[] = values();
};
public static final int NUM_NAME_TYPES = 7;
private static int getNameTypeIndex(NameType type) {
switch (type) {
case EXEMPLAR_LOCATION:
return NameTypeIndex.EXEMPLAR_LOCATION.ordinal();
case LONG_GENERIC:
return NameTypeIndex.LONG_GENERIC.ordinal();
case LONG_STANDARD:
return NameTypeIndex.LONG_STANDARD.ordinal();
case LONG_DAYLIGHT:
return NameTypeIndex.LONG_DAYLIGHT.ordinal();
case SHORT_GENERIC:
return NameTypeIndex.SHORT_GENERIC.ordinal();
case SHORT_STANDARD:
return NameTypeIndex.SHORT_STANDARD.ordinal();
case SHORT_DAYLIGHT:
return NameTypeIndex.SHORT_DAYLIGHT.ordinal();
default:
throw new AssertionError("No NameTypeIndex match for " + type);
}
}
private static NameType getNameType(int index) {
switch (NameTypeIndex.values_temp[index]) {
case EXEMPLAR_LOCATION:
return NameType.EXEMPLAR_LOCATION;
case LONG_GENERIC:
return NameType.LONG_GENERIC;
case LONG_STANDARD:
return NameType.LONG_STANDARD;
case LONG_DAYLIGHT:
return NameType.LONG_DAYLIGHT;
case SHORT_GENERIC:
return NameType.SHORT_GENERIC;
case SHORT_STANDARD:
return NameType.SHORT_STANDARD;
case SHORT_DAYLIGHT:
return NameType.SHORT_DAYLIGHT;
default:
throw new AssertionError("No NameType match for " + index);
}
}
static final ZNames EMPTY_ZNAMES = new ZNames(null);
// A meta zone names instance never has an exemplar location string.
private static final int EX_LOC_INDEX = NameTypeIndex.EXEMPLAR_LOCATION.ordinal();
private String[] _names;
private boolean didAddIntoTrie;
protected ZNames(String[] names) {
_names = names;
didAddIntoTrie = names == null;
}
public static ZNames createMetaZoneAndPutInCache(Map<String, ZNames> cache,
String[] names, String mzID) {
String key = mzID.intern();
ZNames value;
if (names == null) {
value = EMPTY_ZNAMES;
} else {
value = new ZNames(names);
}
cache.put(key, value);
return value;
}
public static ZNames createTimeZoneAndPutInCache(Map<String, ZNames> cache,
String[] names, String tzID) {
// For time zones, check that the exemplar city name is populated. If necessary, use
// "getDefaultExemplarLocationName" to extract it from the time zone name.
names = (names == null) ? new String[EX_LOC_INDEX + 1] : names;
if (names[EX_LOC_INDEX] == null) {
names[EX_LOC_INDEX] = getDefaultExemplarLocationName(tzID);
}
String key = tzID.intern();
ZNames value = new ZNames(names);
cache.put(key, value);
return value;
}
public String getName(NameType type) {
int index = getNameTypeIndex(type);
if (_names != null && index < _names.length) {
return _names[index];
} else {
return null;
}
}
public void addAsMetaZoneIntoTrie(String mzID, TextTrieMap<NameInfo> trie) {
addNamesIntoTrie(mzID, null, trie);
}
public void addAsTimeZoneIntoTrie(String tzID, TextTrieMap<NameInfo> trie) {
addNamesIntoTrie(null, tzID, trie);
}
private void addNamesIntoTrie(String mzID, String tzID, TextTrieMap<NameInfo> trie) {
if (_names == null || didAddIntoTrie) {
return;
}
didAddIntoTrie = true;
for (int i = 0; i < _names.length; ++i) {
String name = _names[i];
if (name != null) {
NameInfo info = new NameInfo();
info.mzID = mzID;
info.tzID = tzID;
info.type = getNameType(i);
trie.put(name, info);
}
}
}
}
//
// Canonical time zone ID -> meta zone ID
//
private static class MZMapEntry {
private String _mzID;
private long _from;
private long _to;
MZMapEntry(String mzID, long from, long to) {
_mzID = mzID;
_from = from;
_to = to;
}
String mzID() {
return _mzID;
}
long from() {
return _from;
}
long to() {
return _to;
}
}
private static class TZ2MZsCache extends SoftCache<String, List<MZMapEntry>, String> {
/* (non-Javadoc)
* @see android.icu.impl.CacheBase#createInstance(java.lang.Object, java.lang.Object)
*/
@Override
protected List<MZMapEntry> createInstance(String key, String data) {
List<MZMapEntry> mzMaps = null;
UResourceBundle bundle = UResourceBundle.getBundleInstance(ICUData.ICU_BASE_NAME, "metaZones");
UResourceBundle metazoneInfoBundle = bundle.get("metazoneInfo");
String tzkey = data.replace('/', ':');
try {
UResourceBundle zoneBundle = metazoneInfoBundle.get(tzkey);
mzMaps = new ArrayList<MZMapEntry>(zoneBundle.getSize());
for (int idx = 0; idx < zoneBundle.getSize(); idx++) {
UResourceBundle mz = zoneBundle.get(idx);
String mzid = mz.getString(0);
String fromStr = "1970-01-01 00:00";
String toStr = "9999-12-31 23:59";
if (mz.getSize() == 3) {
fromStr = mz.getString(1);
toStr = mz.getString(2);
}
long from, to;
from = parseDate(fromStr);
to = parseDate(toStr);
mzMaps.add(new MZMapEntry(mzid, from, to));
}
} catch (MissingResourceException mre) {
mzMaps = Collections.emptyList();
}
return mzMaps;
}
/**
* Private static method parsing the date text used by meta zone to
* time zone mapping data in locale resource.
*
* @param text the UTC date text in the format of "yyyy-MM-dd HH:mm",
* for example - "1970-01-01 00:00"
* @return the date
*/
private static long parseDate (String text) {
int year = 0, month = 0, day = 0, hour = 0, min = 0;
int idx;
int n;
// "yyyy" (0 - 3)
for (idx = 0; idx <= 3; idx++) {
n = text.charAt(idx) - '0';
if (n >= 0 && n < 10) {
year = 10*year + n;
} else {
throw new IllegalArgumentException("Bad year");
}
}
// "MM" (5 - 6)
for (idx = 5; idx <= 6; idx++) {
n = text.charAt(idx) - '0';
if (n >= 0 && n < 10) {
month = 10*month + n;
} else {
throw new IllegalArgumentException("Bad month");
}
}
// "dd" (8 - 9)
for (idx = 8; idx <= 9; idx++) {
n = text.charAt(idx) - '0';
if (n >= 0 && n < 10) {
day = 10*day + n;
} else {
throw new IllegalArgumentException("Bad day");
}
}
// "HH" (11 - 12)
for (idx = 11; idx <= 12; idx++) {
n = text.charAt(idx) - '0';
if (n >= 0 && n < 10) {
hour = 10*hour + n;
} else {
throw new IllegalArgumentException("Bad hour");
}
}
// "mm" (14 - 15)
for (idx = 14; idx <= 15; idx++) {
n = text.charAt(idx) - '0';
if (n >= 0 && n < 10) {
min = 10*min + n;
} else {
throw new IllegalArgumentException("Bad minute");
}
}
long date = Grego.fieldsToDay(year, month - 1, day) * Grego.MILLIS_PER_DAY
+ (long)hour * Grego.MILLIS_PER_HOUR + (long)min * Grego.MILLIS_PER_MINUTE;
return date;
}
}
//
// Meta zone ID -> time zone ID
//
private static class MZ2TZsCache extends SoftCache<String, Map<String, String>, String> {
/* (non-Javadoc)
* @see android.icu.impl.CacheBase#createInstance(java.lang.Object, java.lang.Object)
*/
@Override
protected Map<String, String> createInstance(String key, String data) {
Map<String, String> map = null;
UResourceBundle bundle = UResourceBundle.getBundleInstance(ICUData.ICU_BASE_NAME, "metaZones");
UResourceBundle mapTimezones = bundle.get("mapTimezones");
try {
UResourceBundle regionMap = mapTimezones.get(key);
Set<String> regions = regionMap.keySet();
map = new HashMap<String, String>(regions.size());
for (String region : regions) {
String tzID = regionMap.getString(region).intern();
map.put(region.intern(), tzID);
}
} catch (MissingResourceException e) {
map = Collections.emptyMap();
}
return map;
}
}
private static final Pattern LOC_EXCLUSION_PATTERN = Pattern.compile("Etc/.*|SystemV/.*|.*/Riyadh8[7-9]");
/**
* Default exemplar location name based on time zone ID.
* For example, "America/New_York" -> "New York"
* @param tzID the time zone ID
* @return the exemplar location name or null if location is not available.
*/
public static String getDefaultExemplarLocationName(String tzID) {
if (tzID == null || tzID.length() == 0 || LOC_EXCLUSION_PATTERN.matcher(tzID).matches()) {
return null;
}
String location = null;
int sep = tzID.lastIndexOf('/');
if (sep > 0 && sep + 1 < tzID.length()) {
location = tzID.substring(sep + 1).replace('_', ' ');
}
return location;
}
}
|
googleapis/google-cloud-java | 36,216 | java-compute/proto-google-cloud-compute-v1/src/main/java/com/google/cloud/compute/v1/GetRuleRegionNetworkFirewallPolicyRequest.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/compute/v1/compute.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.compute.v1;
/**
*
*
* <pre>
* A request message for RegionNetworkFirewallPolicies.GetRule. See the method description for details.
* </pre>
*
* Protobuf type {@code google.cloud.compute.v1.GetRuleRegionNetworkFirewallPolicyRequest}
*/
public final class GetRuleRegionNetworkFirewallPolicyRequest
extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.compute.v1.GetRuleRegionNetworkFirewallPolicyRequest)
GetRuleRegionNetworkFirewallPolicyRequestOrBuilder {
private static final long serialVersionUID = 0L;
// Use GetRuleRegionNetworkFirewallPolicyRequest.newBuilder() to construct.
private GetRuleRegionNetworkFirewallPolicyRequest(
com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private GetRuleRegionNetworkFirewallPolicyRequest() {
firewallPolicy_ = "";
project_ = "";
region_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new GetRuleRegionNetworkFirewallPolicyRequest();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.compute.v1.Compute
.internal_static_google_cloud_compute_v1_GetRuleRegionNetworkFirewallPolicyRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.compute.v1.Compute
.internal_static_google_cloud_compute_v1_GetRuleRegionNetworkFirewallPolicyRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.compute.v1.GetRuleRegionNetworkFirewallPolicyRequest.class,
com.google.cloud.compute.v1.GetRuleRegionNetworkFirewallPolicyRequest.Builder.class);
}
private int bitField0_;
public static final int FIREWALL_POLICY_FIELD_NUMBER = 498173265;
@SuppressWarnings("serial")
private volatile java.lang.Object firewallPolicy_ = "";
/**
*
*
* <pre>
* Name of the firewall policy to which the queried rule belongs.
* </pre>
*
* <code>string firewall_policy = 498173265 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The firewallPolicy.
*/
@java.lang.Override
public java.lang.String getFirewallPolicy() {
java.lang.Object ref = firewallPolicy_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
firewallPolicy_ = s;
return s;
}
}
/**
*
*
* <pre>
* Name of the firewall policy to which the queried rule belongs.
* </pre>
*
* <code>string firewall_policy = 498173265 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The bytes for firewallPolicy.
*/
@java.lang.Override
public com.google.protobuf.ByteString getFirewallPolicyBytes() {
java.lang.Object ref = firewallPolicy_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
firewallPolicy_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int PRIORITY_FIELD_NUMBER = 445151652;
private int priority_ = 0;
/**
*
*
* <pre>
* The priority of the rule to get from the firewall policy.
* </pre>
*
* <code>optional int32 priority = 445151652;</code>
*
* @return Whether the priority field is set.
*/
@java.lang.Override
public boolean hasPriority() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* The priority of the rule to get from the firewall policy.
* </pre>
*
* <code>optional int32 priority = 445151652;</code>
*
* @return The priority.
*/
@java.lang.Override
public int getPriority() {
return priority_;
}
public static final int PROJECT_FIELD_NUMBER = 227560217;
@SuppressWarnings("serial")
private volatile java.lang.Object project_ = "";
/**
*
*
* <pre>
* Project ID for this request.
* </pre>
*
* <code>string project = 227560217 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The project.
*/
@java.lang.Override
public java.lang.String getProject() {
java.lang.Object ref = project_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
project_ = s;
return s;
}
}
/**
*
*
* <pre>
* Project ID for this request.
* </pre>
*
* <code>string project = 227560217 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The bytes for project.
*/
@java.lang.Override
public com.google.protobuf.ByteString getProjectBytes() {
java.lang.Object ref = project_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
project_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int REGION_FIELD_NUMBER = 138946292;
@SuppressWarnings("serial")
private volatile java.lang.Object region_ = "";
/**
*
*
* <pre>
* Name of the region scoping this request.
* </pre>
*
* <code>string region = 138946292 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The region.
*/
@java.lang.Override
public java.lang.String getRegion() {
java.lang.Object ref = region_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
region_ = s;
return s;
}
}
/**
*
*
* <pre>
* Name of the region scoping this request.
* </pre>
*
* <code>string region = 138946292 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The bytes for region.
*/
@java.lang.Override
public com.google.protobuf.ByteString getRegionBytes() {
java.lang.Object ref = region_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
region_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(region_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 138946292, region_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(project_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 227560217, project_);
}
if (((bitField0_ & 0x00000001) != 0)) {
output.writeInt32(445151652, priority_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(firewallPolicy_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 498173265, firewallPolicy_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(region_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(138946292, region_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(project_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(227560217, project_);
}
if (((bitField0_ & 0x00000001) != 0)) {
size += com.google.protobuf.CodedOutputStream.computeInt32Size(445151652, priority_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(firewallPolicy_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(498173265, firewallPolicy_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.compute.v1.GetRuleRegionNetworkFirewallPolicyRequest)) {
return super.equals(obj);
}
com.google.cloud.compute.v1.GetRuleRegionNetworkFirewallPolicyRequest other =
(com.google.cloud.compute.v1.GetRuleRegionNetworkFirewallPolicyRequest) obj;
if (!getFirewallPolicy().equals(other.getFirewallPolicy())) return false;
if (hasPriority() != other.hasPriority()) return false;
if (hasPriority()) {
if (getPriority() != other.getPriority()) return false;
}
if (!getProject().equals(other.getProject())) return false;
if (!getRegion().equals(other.getRegion())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + FIREWALL_POLICY_FIELD_NUMBER;
hash = (53 * hash) + getFirewallPolicy().hashCode();
if (hasPriority()) {
hash = (37 * hash) + PRIORITY_FIELD_NUMBER;
hash = (53 * hash) + getPriority();
}
hash = (37 * hash) + PROJECT_FIELD_NUMBER;
hash = (53 * hash) + getProject().hashCode();
hash = (37 * hash) + REGION_FIELD_NUMBER;
hash = (53 * hash) + getRegion().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.compute.v1.GetRuleRegionNetworkFirewallPolicyRequest parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.compute.v1.GetRuleRegionNetworkFirewallPolicyRequest parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.compute.v1.GetRuleRegionNetworkFirewallPolicyRequest parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.compute.v1.GetRuleRegionNetworkFirewallPolicyRequest parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.compute.v1.GetRuleRegionNetworkFirewallPolicyRequest parseFrom(
byte[] data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.compute.v1.GetRuleRegionNetworkFirewallPolicyRequest parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.compute.v1.GetRuleRegionNetworkFirewallPolicyRequest parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.compute.v1.GetRuleRegionNetworkFirewallPolicyRequest parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.compute.v1.GetRuleRegionNetworkFirewallPolicyRequest
parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.compute.v1.GetRuleRegionNetworkFirewallPolicyRequest
parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.compute.v1.GetRuleRegionNetworkFirewallPolicyRequest parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.compute.v1.GetRuleRegionNetworkFirewallPolicyRequest parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.compute.v1.GetRuleRegionNetworkFirewallPolicyRequest prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* A request message for RegionNetworkFirewallPolicies.GetRule. See the method description for details.
* </pre>
*
* Protobuf type {@code google.cloud.compute.v1.GetRuleRegionNetworkFirewallPolicyRequest}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.compute.v1.GetRuleRegionNetworkFirewallPolicyRequest)
com.google.cloud.compute.v1.GetRuleRegionNetworkFirewallPolicyRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.compute.v1.Compute
.internal_static_google_cloud_compute_v1_GetRuleRegionNetworkFirewallPolicyRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.compute.v1.Compute
.internal_static_google_cloud_compute_v1_GetRuleRegionNetworkFirewallPolicyRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.compute.v1.GetRuleRegionNetworkFirewallPolicyRequest.class,
com.google.cloud.compute.v1.GetRuleRegionNetworkFirewallPolicyRequest.Builder.class);
}
// Construct using
// com.google.cloud.compute.v1.GetRuleRegionNetworkFirewallPolicyRequest.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
firewallPolicy_ = "";
priority_ = 0;
project_ = "";
region_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.compute.v1.Compute
.internal_static_google_cloud_compute_v1_GetRuleRegionNetworkFirewallPolicyRequest_descriptor;
}
@java.lang.Override
public com.google.cloud.compute.v1.GetRuleRegionNetworkFirewallPolicyRequest
getDefaultInstanceForType() {
return com.google.cloud.compute.v1.GetRuleRegionNetworkFirewallPolicyRequest
.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.compute.v1.GetRuleRegionNetworkFirewallPolicyRequest build() {
com.google.cloud.compute.v1.GetRuleRegionNetworkFirewallPolicyRequest result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.compute.v1.GetRuleRegionNetworkFirewallPolicyRequest buildPartial() {
com.google.cloud.compute.v1.GetRuleRegionNetworkFirewallPolicyRequest result =
new com.google.cloud.compute.v1.GetRuleRegionNetworkFirewallPolicyRequest(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(
com.google.cloud.compute.v1.GetRuleRegionNetworkFirewallPolicyRequest result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.firewallPolicy_ = firewallPolicy_;
}
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000002) != 0)) {
result.priority_ = priority_;
to_bitField0_ |= 0x00000001;
}
if (((from_bitField0_ & 0x00000004) != 0)) {
result.project_ = project_;
}
if (((from_bitField0_ & 0x00000008) != 0)) {
result.region_ = region_;
}
result.bitField0_ |= to_bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.compute.v1.GetRuleRegionNetworkFirewallPolicyRequest) {
return mergeFrom(
(com.google.cloud.compute.v1.GetRuleRegionNetworkFirewallPolicyRequest) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(
com.google.cloud.compute.v1.GetRuleRegionNetworkFirewallPolicyRequest other) {
if (other
== com.google.cloud.compute.v1.GetRuleRegionNetworkFirewallPolicyRequest
.getDefaultInstance()) return this;
if (!other.getFirewallPolicy().isEmpty()) {
firewallPolicy_ = other.firewallPolicy_;
bitField0_ |= 0x00000001;
onChanged();
}
if (other.hasPriority()) {
setPriority(other.getPriority());
}
if (!other.getProject().isEmpty()) {
project_ = other.project_;
bitField0_ |= 0x00000004;
onChanged();
}
if (!other.getRegion().isEmpty()) {
region_ = other.region_;
bitField0_ |= 0x00000008;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 1111570338:
{
region_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000008;
break;
} // case 1111570338
case 1820481738:
{
project_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000004;
break;
} // case 1820481738
case -733754080:
{
priority_ = input.readInt32();
bitField0_ |= 0x00000002;
break;
} // case -733754080
case -309581174:
{
firewallPolicy_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000001;
break;
} // case -309581174
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.lang.Object firewallPolicy_ = "";
/**
*
*
* <pre>
* Name of the firewall policy to which the queried rule belongs.
* </pre>
*
* <code>string firewall_policy = 498173265 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The firewallPolicy.
*/
public java.lang.String getFirewallPolicy() {
java.lang.Object ref = firewallPolicy_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
firewallPolicy_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Name of the firewall policy to which the queried rule belongs.
* </pre>
*
* <code>string firewall_policy = 498173265 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The bytes for firewallPolicy.
*/
public com.google.protobuf.ByteString getFirewallPolicyBytes() {
java.lang.Object ref = firewallPolicy_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
firewallPolicy_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Name of the firewall policy to which the queried rule belongs.
* </pre>
*
* <code>string firewall_policy = 498173265 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @param value The firewallPolicy to set.
* @return This builder for chaining.
*/
public Builder setFirewallPolicy(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
firewallPolicy_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Name of the firewall policy to which the queried rule belongs.
* </pre>
*
* <code>string firewall_policy = 498173265 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return This builder for chaining.
*/
public Builder clearFirewallPolicy() {
firewallPolicy_ = getDefaultInstance().getFirewallPolicy();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
return this;
}
/**
*
*
* <pre>
* Name of the firewall policy to which the queried rule belongs.
* </pre>
*
* <code>string firewall_policy = 498173265 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @param value The bytes for firewallPolicy to set.
* @return This builder for chaining.
*/
public Builder setFirewallPolicyBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
firewallPolicy_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
private int priority_;
/**
*
*
* <pre>
* The priority of the rule to get from the firewall policy.
* </pre>
*
* <code>optional int32 priority = 445151652;</code>
*
* @return Whether the priority field is set.
*/
@java.lang.Override
public boolean hasPriority() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
*
*
* <pre>
* The priority of the rule to get from the firewall policy.
* </pre>
*
* <code>optional int32 priority = 445151652;</code>
*
* @return The priority.
*/
@java.lang.Override
public int getPriority() {
return priority_;
}
/**
*
*
* <pre>
* The priority of the rule to get from the firewall policy.
* </pre>
*
* <code>optional int32 priority = 445151652;</code>
*
* @param value The priority to set.
* @return This builder for chaining.
*/
public Builder setPriority(int value) {
priority_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* The priority of the rule to get from the firewall policy.
* </pre>
*
* <code>optional int32 priority = 445151652;</code>
*
* @return This builder for chaining.
*/
public Builder clearPriority() {
bitField0_ = (bitField0_ & ~0x00000002);
priority_ = 0;
onChanged();
return this;
}
private java.lang.Object project_ = "";
/**
*
*
* <pre>
* Project ID for this request.
* </pre>
*
* <code>string project = 227560217 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The project.
*/
public java.lang.String getProject() {
java.lang.Object ref = project_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
project_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Project ID for this request.
* </pre>
*
* <code>string project = 227560217 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The bytes for project.
*/
public com.google.protobuf.ByteString getProjectBytes() {
java.lang.Object ref = project_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
project_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Project ID for this request.
* </pre>
*
* <code>string project = 227560217 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @param value The project to set.
* @return This builder for chaining.
*/
public Builder setProject(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
project_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
*
*
* <pre>
* Project ID for this request.
* </pre>
*
* <code>string project = 227560217 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return This builder for chaining.
*/
public Builder clearProject() {
project_ = getDefaultInstance().getProject();
bitField0_ = (bitField0_ & ~0x00000004);
onChanged();
return this;
}
/**
*
*
* <pre>
* Project ID for this request.
* </pre>
*
* <code>string project = 227560217 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @param value The bytes for project to set.
* @return This builder for chaining.
*/
public Builder setProjectBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
project_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
private java.lang.Object region_ = "";
/**
*
*
* <pre>
* Name of the region scoping this request.
* </pre>
*
* <code>string region = 138946292 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The region.
*/
public java.lang.String getRegion() {
java.lang.Object ref = region_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
region_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Name of the region scoping this request.
* </pre>
*
* <code>string region = 138946292 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The bytes for region.
*/
public com.google.protobuf.ByteString getRegionBytes() {
java.lang.Object ref = region_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
region_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Name of the region scoping this request.
* </pre>
*
* <code>string region = 138946292 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @param value The region to set.
* @return This builder for chaining.
*/
public Builder setRegion(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
region_ = value;
bitField0_ |= 0x00000008;
onChanged();
return this;
}
/**
*
*
* <pre>
* Name of the region scoping this request.
* </pre>
*
* <code>string region = 138946292 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return This builder for chaining.
*/
public Builder clearRegion() {
region_ = getDefaultInstance().getRegion();
bitField0_ = (bitField0_ & ~0x00000008);
onChanged();
return this;
}
/**
*
*
* <pre>
* Name of the region scoping this request.
* </pre>
*
* <code>string region = 138946292 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @param value The bytes for region to set.
* @return This builder for chaining.
*/
public Builder setRegionBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
region_ = value;
bitField0_ |= 0x00000008;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.compute.v1.GetRuleRegionNetworkFirewallPolicyRequest)
}
// @@protoc_insertion_point(class_scope:google.cloud.compute.v1.GetRuleRegionNetworkFirewallPolicyRequest)
private static final com.google.cloud.compute.v1.GetRuleRegionNetworkFirewallPolicyRequest
DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.compute.v1.GetRuleRegionNetworkFirewallPolicyRequest();
}
public static com.google.cloud.compute.v1.GetRuleRegionNetworkFirewallPolicyRequest
getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<GetRuleRegionNetworkFirewallPolicyRequest>
PARSER =
new com.google.protobuf.AbstractParser<GetRuleRegionNetworkFirewallPolicyRequest>() {
@java.lang.Override
public GetRuleRegionNetworkFirewallPolicyRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException()
.setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<GetRuleRegionNetworkFirewallPolicyRequest> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<GetRuleRegionNetworkFirewallPolicyRequest> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.compute.v1.GetRuleRegionNetworkFirewallPolicyRequest
getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
apache/geode | 36,577 | geode-core/src/main/java/org/apache/geode/cache/client/ClientCacheFactory.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more contributor license
* agreements. See the NOTICE file distributed with this work for additional information regarding
* copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance with the License. You may obtain a
* copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
package org.apache.geode.cache.client;
import static org.apache.geode.distributed.ConfigurationProperties.LOCATORS;
import static org.apache.geode.distributed.ConfigurationProperties.MCAST_PORT;
import java.util.Properties;
import org.apache.geode.cache.CacheClosedException;
import org.apache.geode.cache.CacheWriterException;
import org.apache.geode.cache.CacheXmlException;
import org.apache.geode.cache.Region;
import org.apache.geode.cache.RegionExistsException;
import org.apache.geode.cache.TimeoutException;
import org.apache.geode.cache.client.internal.InternalClientCache;
import org.apache.geode.cache.client.proxy.ProxySocketFactories;
import org.apache.geode.cache.server.CacheServer;
import org.apache.geode.distributed.internal.InternalDistributedSystem;
import org.apache.geode.internal.GemFireVersion;
import org.apache.geode.internal.cache.CacheConfig;
import org.apache.geode.internal.cache.GemFireCacheImpl;
import org.apache.geode.internal.cache.InternalCacheBuilder;
import org.apache.geode.metrics.internal.InternalDistributedSystemMetricsService;
import org.apache.geode.metrics.internal.MetricsService;
import org.apache.geode.net.SSLParameterExtension;
import org.apache.geode.pdx.PdxInstance;
import org.apache.geode.pdx.PdxSerializer;
import org.apache.geode.security.AuthenticationFailedException;
import org.apache.geode.security.AuthenticationRequiredException;
/**
* Factory class used to create the singleton {@link ClientCache client cache} and connect to one or
* more GemFire Cache Servers. If the application wants to connect to GemFire as a peer it should
* use {@link org.apache.geode.cache.CacheFactory} instead.
* <p>
* Once the factory has been configured using its set* methods you produce a {@link ClientCache} by
* calling the {@link #create} method. The
* {@link org.apache.geode.distributed.ConfigurationProperties#CACHE_XML_FILE} property can be used
* to specify a cache.xml file to initialize the cache with. The contents of this file must comply
* with the <code>"doc-files/cache8_0.dtd"</code> file and the top level element must be a
* <code>client-cache</code> element.
* <p>
* Client connections are managed through connection {@link Pool pools}. ClientCacheFactory creates
* a single pool to use by default on the cache it creates. ClientCacheFactory can also be used to
* configure the default connection pool using its <code>setPool*</code> and <code>addPool*</code>
* methods. In most cases, the defaults used by this implementation will suffice. For the default
* pool attributes see {@link PoolFactory}. If no pool is configured and a pool was not declared in
* cache.xml or created using {@link PoolManager} then a default one will be created that connects
* to a server on the default cache server port and local host. If multiple pools are declared in
* cache.xml or created by the PoolFactory then no default pool will exist and
* <code>ClientRegionFactory.setPoolName</code> will need to be called on each region created.
* <p>
* To get the existing unclosed singleton client cache instance call {@link #getAnyInstance}.
* <p>
* The following examples illustrate bootstrapping the client cache using region shortcuts:
* <p>
* Example 1: Connect to a CacheServer on the default host and port and access a region "customers"
*
* <PRE>
* ClientCache c = new ClientCacheFactory().create();
* Region r = c.createClientRegionFactory(PROXY).create("customers");
* // The PROXY shortcut tells GemFire to route all requests to the servers
* // . i.e. there is no local caching
* </PRE>
*
* Example 2: Connect using the GemFire locator and create a local LRU cache
*
* <PRE>
* ClientCache c = new ClientCacheFactory().addPoolLocator(host, port).create();
* Region r = c.createClientRegionFactory(CACHING_PROXY_HEAP_LRU).create("customers");
* // The local LRU "customers" data region will automatically start evicting, by default, at 80%
* // heap utilization threshold
* </PRE>
*
* Example 3: Access the query service
*
* <PRE>
* QueryService qs = new ClientCacheFactory().create().getQueryService();
* </PRE>
*
* Example 4: Construct the client cache region declaratively in cache.xml
*
* <PRE>
* <!DOCTYPE client-cache PUBLIC
* "-//GemStone Systems, Inc.//GemFire Declarative Caching 6.5//EN"
* "http://www.gemstone.com/dtd/cache8_0.dtd">
* <client-cache>
* <pool name="myPool">
* <locator host="hostName" port="10334"/>
* </pool>
* <region name="myRegion" refid="PROXY"/>
* <!-- you can override or add to the PROXY attributes by adding
* a region-attributes sub element here -->
* </client-cache>
* </PRE>
*
* Now, create the cache telling it to read your cache.xml file:
*
* <PRE>
* ClientCache c = new ClientCacheFactory().set("cache-xml-file", "myCache.xml").create();
* Region r = c.getRegion("myRegion");
* </PRE>
* <p>
* For a complete list of all client region shortcuts see {@link ClientRegionShortcut}. Applications
* that need to explicitly control the individual region attributes can do this declaratively in XML
* or using API.
* <p>
* Example 5: Define custom region attributes for persistence in XML and create region using API.
* Define new region attributes with ID "MYAPP_CACHING_PROXY_MEM_LRU" that overrides the
* "CACHING_PROXY" shortcut
*
* <PRE>
* <!DOCTYPE client-cache PUBLIC
* "-//GemStone Systems, Inc.//GemFire Declarative Caching 8.0//EN"
* "http://www.gemstone.com/dtd/cache8_0.dtd">
* <client-cache>
* <!-- now create a named region attributes that uses the CACHING_PROXY shortcut
* and adds a memory LRU limited to 900 megabytes -->
* <region-attributes id="MYAPP_CACHING_PROXY_MEM_LRU" refid="CACHING_PROXY" >
* <lru-memory-size maximum="900"/>
* </region-attributes>
* </client-cache>
* </PRE>
*
* Now, create the data region in the client cache using this new attributes ID.
*
* <PRE>
* ClientCache c = new ClientCacheFactory().set("cache-xml-file", "myCache.xml")
* .addPoolLocator(host, port).create();
* Region r = c.createClientRegionFactory("MYAPP_CACHING_PROXY_MEM_LRU").create("customers");
* </PRE>
*
* @since 6.5
*/
public class ClientCacheFactory {
private PoolFactory pf;
private final Properties dsProps;
private final CacheConfig cacheConfig = new CacheConfig();
/**
* Creates a new client cache factory.
*/
public ClientCacheFactory() {
dsProps = new Properties();
}
/**
* Create a new client cache factory given the initial gemfire properties.
*
* @param props The initial gemfire properties to be used. These properties can be overridden
* using the {@link #set} method For a full list of valid gemfire properties see
* {@link org.apache.geode.distributed.ConfigurationProperties}.
*/
public ClientCacheFactory(Properties props) {
if (props == null) {
props = new Properties();
}
dsProps = props;
}
/**
* Sets a gemfire property that will be used when creating the ClientCache. For a full list of
* valid gemfire properties see {@link org.apache.geode.distributed.ConfigurationProperties}.
*
* @param name the name of the gemfire property
* @param value the value of the gemfire property
* @return a reference to this ClientCacheFactory object
*/
public ClientCacheFactory set(String name, String value) {
dsProps.setProperty(name, value);
return this;
}
/**
* Create a singleton client cache. If a client cache already exists in this vm that is not
* compatible with this factory's configuration then create will fail.
* <p>
* While creating the cache instance any declarative cache configuration (cache.xml) is processed
* and used to initialize the created cache.
* <P>
* Note that the cache that is produced is a singleton. Before a different instance can be
* produced the old one must be {@link ClientCache#close closed}.
*
* @return the singleton client cache
* @throws CacheXmlException If a problem occurs while parsing the declarative caching XML file.
* @throws TimeoutException If a {@link Region#put(Object, Object)} times out while initializing
* the cache.
* @throws CacheWriterException If a <code>CacheWriterException</code> is thrown while
* initializing the cache.
* @throws RegionExistsException If the declarative caching XML file describes a region that
* already exists (including the root region).
* @throws IllegalStateException if a client cache already exists and it is not compatible with
* this factory's configuration.
* @throws IllegalStateException if mcast-port or locator is set on client cache.
* @throws AuthenticationFailedException if authentication fails.
* @throws AuthenticationRequiredException if server is in secure mode and client cache is not
* configured with security credentials.
*/
public ClientCache create() {
return basicCreate();
}
@SuppressWarnings("deprecation")
private static InternalClientCache getInternalClientCache() {
return GemFireCacheImpl.getInstance();
}
private ClientCache basicCreate() {
synchronized (ClientCacheFactory.class) {
InternalClientCache instance = getInternalClientCache();
{
String propValue = dsProps.getProperty(MCAST_PORT);
if (propValue != null) {
int mcastPort = Integer.parseInt(propValue);
if (mcastPort != 0) {
throw new IllegalStateException(
"On a client cache the mcast-port must be set to 0 or not set. It was set to "
+ mcastPort);
}
}
}
{
String propValue = dsProps.getProperty(LOCATORS);
if (propValue != null && !propValue.isEmpty()) {
throw new IllegalStateException(
"On a client cache the locators property must be set to an empty string or not set."
+ " It was set to \""
+ propValue + "\".");
}
}
dsProps.setProperty(MCAST_PORT, "0");
dsProps.setProperty(LOCATORS, "");
InternalDistributedSystem system = connectInternalDistributedSystem();
if (instance != null && !instance.isClosed()) {
// this is ok; just make sure it is a client cache
if (!instance.isClient()) {
throw new IllegalStateException(
"A client cache can not be created because a non-client cache already exists.");
}
// check if pool is compatible
instance.validatePoolFactory(pf);
// Check if cache configuration matches.
cacheConfig.validateCacheConfig(instance);
return instance;
} else {
return (InternalClientCache) new InternalCacheBuilder(cacheConfig)
.setIsClient(true)
.setPoolFactory(pf)
.create(system);
}
}
}
private InternalDistributedSystem connectInternalDistributedSystem() {
MetricsService.Builder metricsServiceBuilder =
new InternalDistributedSystemMetricsService.Builder()
.setIsClient(true);
return InternalDistributedSystem.connectInternal(dsProps, null, metricsServiceBuilder);
}
private PoolFactory getPoolFactory() {
if (pf == null) {
pf = PoolManager.createFactory();
}
return pf;
}
/**
* Sets the socket connect timeout for this pool. The number of milli seconds specified as socket
* timeout when the client connects to the servers/locators. A timeout of zero is interpreted as
* an infinite timeout. The connection will then block until established or an error occurs.
*
* @param socketConnectTimeout timeout in milliseconds when the client connects to the servers
* @return a reference to <code>this</code>
* @throws IllegalArgumentException if <code>socketConnectTimeout</code> is less than or equal to
* <code>-1</code>.
*/
public ClientCacheFactory setPoolSocketConnectTimeout(int socketConnectTimeout) {
getPoolFactory().setSocketConnectTimeout(socketConnectTimeout);
return this;
}
/**
* Sets the free connection timeout for this pool. If the pool has a max connections setting,
* operations will block if all of the connections are in use. The free connection timeout
* specifies how long those operations will block waiting for a free connection before receiving
* an {@link AllConnectionsInUseException}. If max connections is not set this setting has no
* effect.
*
* @param connectionTimeout the connection timeout in milliseconds
* @return a reference to <code>this</code>
* @throws IllegalArgumentException if <code>connectionTimeout</code> is less than or equal to
* <code>0</code>.
* @see #setPoolMaxConnections(int)
*/
public ClientCacheFactory setPoolFreeConnectionTimeout(int connectionTimeout) {
getPoolFactory().setFreeConnectionTimeout(connectionTimeout);
return this;
}
/**
* Sets the server connection timeout for this pool. If the pool has a max connections setting,
* operations will block if there is no free connections toward designated server. The server
* connection timeout
* specifies how long those operations will block waiting for a connection toward server before
* receiving an {@link AllConnectionsInUseException}. If max connections is not set this setting
* has no effect.
*
* @param connectionTimeout the connection timeout in milliseconds
* @return a reference to <code>this</code>
* @throws IllegalArgumentException if <code>connectionTimeout</code> is less than
* <code>0</code>.
* @see #setPoolMaxConnections(int)
*/
public ClientCacheFactory setPoolServerConnectionTimeout(int connectionTimeout) {
getPoolFactory().setServerConnectionTimeout(connectionTimeout);
return this;
}
/**
* Sets the load conditioning interval for this pool. This interval controls how frequently the
* pool will check to see if a connection to a given server should be moved to a different server
* to improve the load balance.
* <p>
* A value of <code>-1</code> disables load conditioning
*
* @param loadConditioningInterval the connection lifetime in milliseconds
* @return a reference to <code>this</code>
* @throws IllegalArgumentException if <code>connectionLifetime</code> is less than
* <code>-1</code>.
*/
public ClientCacheFactory setPoolLoadConditioningInterval(int loadConditioningInterval) {
getPoolFactory().setLoadConditioningInterval(loadConditioningInterval);
return this;
}
/**
* Sets the socket buffer size for each connection made in this pool. Large messages can be
* received and sent faster when this buffer is larger. Larger buffers also optimize the rate at
* which servers can send events for client subscriptions.
*
* @param bufferSize the size of the socket buffers used for reading and writing on each
* connection in this pool.
* @return a reference to <code>this</code>
* @throws IllegalArgumentException if <code>bufferSize</code> is less than or equal to
* <code>0</code>.
*/
public ClientCacheFactory setPoolSocketBufferSize(int bufferSize) {
getPoolFactory().setSocketBufferSize(bufferSize);
return this;
}
/**
* Sets the thread local connections policy for this pool. If <code>true</code> then any time a
* thread goes to use a connection from this pool it will check a thread local cache and see if it
* already has a connection in it. If so it will use it. If not it will get one from this pool and
* cache it in the thread local. This gets rid of thread contention for the connections but
* increases the number of connections the servers see.
* <p>
* If <code>false</code> then connections are returned to the pool as soon as the operation being
* done with the connection completes. This allows connections to be shared amonst multiple
* threads keeping the number of connections down.
*
* @param threadLocalConnections if <code>true</code> then enable thread local connections.
* @return a reference to <code>this</code>
* @deprecated Since Geode 1.10.0. Thread local connections are ignored. Will be removed in future
* major release.
*/
@Deprecated
public ClientCacheFactory setPoolThreadLocalConnections(boolean threadLocalConnections) {
getPoolFactory().setThreadLocalConnections(threadLocalConnections);
return this;
}
/**
* Sets the number of milliseconds to wait for a response from a server before timing out the
* operation and trying another server (if any are available).
*
* @param timeout number of milliseconds to wait for a response from a server
* @return a reference to <code>this</code>
* @throws IllegalArgumentException if <code>timeout</code> is less than <code>0</code>.
*/
public ClientCacheFactory setPoolReadTimeout(int timeout) {
getPoolFactory().setReadTimeout(timeout);
return this;
}
/**
* Set the minimum number of connections to keep available at all times. When the pool is created,
* it will create this many connections. If <code>0</code> then connections will not be made until
* an actual operation is done that requires client-to-server communication.
*
* @param minConnections the initial number of connections this pool will create.
* @return a reference to <code>this</code>
* @throws IllegalArgumentException if <code>minConnections</code> is less than <code>0</code>.
*/
public ClientCacheFactory setPoolMinConnections(int minConnections) {
getPoolFactory().setMinConnections(minConnections);
return this;
}
/**
* Set the max number of client to server connections that the pool will create. If all of the
* connections are in use, an operation requiring a client to server connection will block until a
* connection is available.
*
* @param maxConnections the maximum number of connections in the pool. this pool will create. -1
* indicates that there is no maximum number of connections
* @return a reference to <code>this</code>
* @throws IllegalArgumentException if <code>maxConnections</code> is less than
* <code>minConnections</code>.
* @see #setPoolFreeConnectionTimeout(int)
* @see #setPoolServerConnectionTimeout(int)
*/
public ClientCacheFactory setPoolMaxConnections(int maxConnections) {
getPoolFactory().setMaxConnections(maxConnections);
return this;
}
/**
* Set the amount of time a connection can be idle before expiring the connection. If the pool
* size is greater than the minimum specified by {@link #setPoolMinConnections(int)}, connections
* which have been idle for longer than the idleTimeout will be closed.
*
* @param idleTimeout The amount of time in milliseconds that an idle connection should live
* before expiring. -1 indicates that connections should never expire.
* @return a reference to <code>this</code>
* @throws IllegalArgumentException if <code>idleTimout</code> is less than <code>-1</code>.
*/
public ClientCacheFactory setPoolIdleTimeout(long idleTimeout) {
getPoolFactory().setIdleTimeout(idleTimeout);
return this;
}
/**
* Set the number of times to retry a request after timeout/exception.
*
* @param retryAttempts The number of times to retry a request after timeout/exception. -1
* indicates that a request should be tried against every available server before failing
* @return a reference to <code>this</code>
* @throws IllegalArgumentException if <code>idleTimout</code> is less than <code>-1</code>.
*/
public ClientCacheFactory setPoolRetryAttempts(int retryAttempts) {
getPoolFactory().setRetryAttempts(retryAttempts);
return this;
}
/**
* How often to ping servers to verify that they are still alive. Each server will be sent a ping
* every pingInterval if there has not been any other communication with the server.
* <p>
* These pings are used by the server to monitor the health of the client. Make sure that the
* pingInterval is less than the maximum time between pings allowed by the cache server.
*
* @param pingInterval The amount of time in milliseconds between pings.
* @return a reference to <code>this</code>
* @throws IllegalArgumentException if <code>pingInterval</code> is less than or equal to
* <code>0</code>.
* @see CacheServer#setMaximumTimeBetweenPings(int)
*/
public ClientCacheFactory setPoolPingInterval(long pingInterval) {
getPoolFactory().setPingInterval(pingInterval);
return this;
}
/**
* How often to send client statistics to the server. Doing this allows <code>gfmon</code> to
* monitor clients.
* <p>
* A value of <code>-1</code> disables the sending of client statistics to the server.
*
* @param statisticInterval The amount of time in milliseconds between sends of client statistics
* to the server.
* @return a reference to <code>this</code>
* @throws IllegalArgumentException if <code>statisticInterval</code> is less than
* <code>-1</code>.
*/
public ClientCacheFactory setPoolStatisticInterval(int statisticInterval) {
getPoolFactory().setStatisticInterval(statisticInterval);
return this;
}
/**
* Configures the group that all servers this pool connects to must belong to.
*
* @param group the server group that this pool will connect to. If <code>null</code> or
* <code>""</code> then all servers will be connected to.
* @return a reference to <code>this</code>
*/
public ClientCacheFactory setPoolServerGroup(String group) {
getPoolFactory().setServerGroup(group);
return this;
}
/**
* Add a locator, given its host and port, to this factory. The locator must be a server locator
* and will be used to discover other running cache servers and locators. Note that if the host is
* unknown at the time of this call the locator will still be added. When the pool is used for an
* operation if the host is still unknown an exception will be thrown.
*
* @param host the host name or ip address that the locator is listening on.
* @param port the port that the locator is listening on
* @return a reference to <code>this</code>
* @throws IllegalArgumentException if port is outside the valid range of [0..65535] inclusive.
* @throws IllegalStateException if a server has already been {@link #addPoolServer added} to this
* factory.
*/
public ClientCacheFactory addPoolLocator(String host, int port) {
getPoolFactory().addLocator(host, port);
return this;
}
/**
* Add a server, given its host and port, to this factory. The server must be a cache server and
* this client will directly connect to without consulting a server locator. Note that if the host
* is unknown at the time of this call the server will still be added. When the pool is used for
* an operation if the host is still unknown an exception will be thrown.
*
* @param host the host name or ip address that the server is listening on.
* @param port the port that the server is listening on
* @return a reference to <code>this</code>
* @throws IllegalArgumentException if port is outside the valid range of [0..65535] inclusive.
* @throws IllegalStateException if a locator has already been {@link #addPoolLocator added} to
* this factory.
*/
public ClientCacheFactory addPoolServer(String host, int port) {
getPoolFactory().addServer(host, port);
return this;
}
/**
* If set to <code>true</code> then the created pool will have server-to-client subscriptions
* enabled. If set to <code>false</code> then all <code>Subscription*</code> attributes are
* ignored at create time.
*
* @param enabled whether the created pool will have server-to-client subscriptions enabled
* @return a reference to <code>this</code>
*/
public ClientCacheFactory setPoolSubscriptionEnabled(boolean enabled) {
getPoolFactory().setSubscriptionEnabled(enabled);
return this;
}
/**
* Sets the redundancy level for this pools server-to-client subscriptions. If <code>0</code> then
* no redundant copies will be kept on the servers. Otherwise an effort will be made to maintain
* the requested number of copies of the server-to-client subscriptions. At most one copy per
* server will be made up to the requested level.
*
* @param redundancy the number of redundant servers for this client's subscriptions.
* @return a reference to <code>this</code>
* @throws IllegalArgumentException if <code>redundancyLevel</code> is less than <code>-1</code>.
*/
public ClientCacheFactory setPoolSubscriptionRedundancy(int redundancy) {
getPoolFactory().setSubscriptionRedundancy(redundancy);
return this;
}
/**
* A server has an inactivity monitor that ensures a message is sent to a client at least once a
* minute (60,000 milliseconds). If a subscription timeout multipler is set in the client it
* enables timing out of the subscription feed with failover to another server.
* <p>
* The client will time out it's subscription connection after a number of seconds equal to this
* multiplier times the server's subscription-timeout.
* <p>
* Set this to 2 or more to make sure the client will receive pings from the server before the
* timeout.
* <p>
* A value of zero (the default) disables timeouts
* <p>
* The resulting timeout will be multiplied by 1.25 in order to avoid race conditions with the
* server sending its "ping" message.
*
* @param multiplier the subscription timeout multiplier to set
* @return a reference to <code>this</code>
*/
public ClientCacheFactory setPoolSubscriptionTimeoutMultiplier(int multiplier) {
getPoolFactory().setSubscriptionTimeoutMultiplier(multiplier);
return this;
}
/**
* Sets the messageTrackingTimeout attribute which is the time-to-live period, in milliseconds,
* for subscription events the client has received from the server. It's used to minimize
* duplicate events. Entries that have not been modified for this amount of time are expired from
* the list
*
* @param messageTrackingTimeout number of milliseconds to set the timeout to.
* @return a reference to <code>this</code>
* @throws IllegalArgumentException if <code>messageTrackingTimeout</code> is less than or equal
* to <code>0</code>.
*/
public ClientCacheFactory setPoolSubscriptionMessageTrackingTimeout(int messageTrackingTimeout) {
getPoolFactory().setSubscriptionMessageTrackingTimeout(messageTrackingTimeout);
return this;
}
/**
* Set the socket factory used by this pool to create connections to both locators (if
* configured using {@link #addPoolLocator(String, int)} (String, int)}) and servers.
*
* Sockets returned by this factory will have the rest of the configuration options
* specified on this pool and on the {@link ClientCache} applied to them. In particular,
* sockets returned by this factory will be wrapped with SSLSockets if ssl is enabled
* for this client cache.
*
* This factory can be used for configuring a proxy, or overriding various socket settings.
* For modifying SSL settings, see {@link SSLParameterExtension}
*
* See {@link ProxySocketFactories}
*
* @param socketFactory The {@link SocketFactory} to use
* @return a reference to <code> this </code>
* @see PoolFactory#setSocketFactory(SocketFactory)
* @since Geode 1.13
*/
public ClientCacheFactory setPoolSocketFactory(SocketFactory socketFactory) {
getPoolFactory().setSocketFactory(socketFactory);
return this;
}
/**
* Sets the interval in milliseconds to wait before sending acknowledgements to the cache server
* for events received from the server subscriptions.
*
* @param ackInterval number of milliseconds to wait before sending event acknowledgements.
* @return a reference to <code>this</code>
* @throws IllegalArgumentException if <code>ackInterval</code> is less than or equal to
* <code>0</code>.
*/
public ClientCacheFactory setPoolSubscriptionAckInterval(int ackInterval) {
getPoolFactory().setSubscriptionAckInterval(ackInterval);
return this;
}
/**
* By default setPRSingleHopEnabled is <code>true</code> in which case the client is aware of the
* location of partitions on servers hosting {@link Region regions} with
* {@link org.apache.geode.cache.DataPolicy#PARTITION}. Using this information, the client routes
* the client cache operations directly to the server which is hosting the required partition for
* the cache operation using a single network hop. This mode works best when
* {@link #setPoolMaxConnections(int)} is set to <code>-1</code> which is the default. This mode
* causes the client to have more connections to the servers.
* <p>
* If setPRSingleHopEnabled is <code>false</code> the client may need to do an extra network hop
* on servers to go to the required partition for that cache operation. The client will use fewer
* network connections to the servers.
* <p>
* Caution: for {@link org.apache.geode.cache.DataPolicy#PARTITION partition} regions with
* {@link org.apache.geode.cache.PartitionAttributesFactory#setLocalMaxMemory(int)
* local-max-memory} equal to zero, no cache operations mentioned above will be routed to those
* servers as they do not host any partitions.
*
* @param enabled whether Partition Region single hop is enabled
* @return the newly created pool.
*/
public ClientCacheFactory setPoolPRSingleHopEnabled(boolean enabled) {
getPoolFactory().setPRSingleHopEnabled(enabled);
return this;
}
/**
* If set to <code>true</code> then the created pool can be used by multiple users. <br>
* <br>
* Note: If set to true, all the client side regions must be {@link ClientRegionShortcut#PROXY
* proxies}. No client side storage is allowed.
*
* @param enabled whether the created pool can be used by multiple users
* @return a reference to <code>this</code>
*/
public ClientCacheFactory setPoolMultiuserAuthentication(boolean enabled) {
getPoolFactory().setMultiuserAuthentication(enabled);
return this;
}
/**
* Returns the version of the cache implementation.
*
* @return the version of the cache implementation as a <code>String</code>
*/
public static String getVersion() {
return GemFireVersion.getGemFireVersion();
}
/**
* Gets an arbitrary open instance of {@link ClientCache} produced by an earlier call to
* {@link #create}.
*
* @return an arbitrary open instance of {@link ClientCache} produced by an earlier call to
* {@link #create}
*
* @throws CacheClosedException if a cache has not been created or the only created one is
* {@link ClientCache#isClosed closed}
* @throws IllegalStateException if the cache was created by CacheFactory instead of
* ClientCacheFactory
*/
public static synchronized ClientCache getAnyInstance() {
InternalClientCache instance = getInternalClientCache();
if (instance == null) {
throw new CacheClosedException(
"A cache has not yet been created.");
} else {
if (!instance.isClient()) {
throw new IllegalStateException(
"The singleton cache was created by CacheFactory not ClientCacheFactory.");
}
instance.getCancelCriterion().checkCancelInProgress(null);
return instance;
}
}
/**
* Sets the object preference to PdxInstance type. When a cached object that was serialized as a
* PDX is read from the cache a {@link PdxInstance} will be returned instead of the actual domain
* class. The PdxInstance is an interface that provides run time access to the fields of a PDX
* without deserializing the entire PDX. The PdxInstance implementation is a light weight wrapper
* that simply refers to the raw bytes of the PDX that are kept in the cache. Using this method
* applications can choose to access PdxInstance instead of Java object.
* <p>
* Note that a PdxInstance is only returned if a serialized PDX is found in the cache. If the
* cache contains a deserialized PDX, then a domain class instance is returned instead of a
* PdxInstance.
*
* @param pdxReadSerialized true to prefer PdxInstance
* @return this ClientCacheFactory
* @see org.apache.geode.pdx.PdxInstance
* @since GemFire 6.6
*/
public ClientCacheFactory setPdxReadSerialized(boolean pdxReadSerialized) {
cacheConfig.setPdxReadSerialized(pdxReadSerialized);
return this;
}
/**
* Set the PDX serializer for the cache. If this serializer is set, it will be consulted to see if
* it can serialize any domain classes which are added to the cache in portable data exchange
* format.
*
* @param serializer the serializer to use
* @return this ClientCacheFactory
* @see PdxSerializer
* @since GemFire 6.6
*/
public ClientCacheFactory setPdxSerializer(PdxSerializer serializer) {
cacheConfig.setPdxSerializer(serializer);
return this;
}
/**
* Set the disk store that is used for PDX meta data. When serializing objects in the PDX format,
* the type definitions are persisted to disk. This setting controls which disk store is used for
* that persistence.
* <p>
* If not set, the metadata will go in the default disk store.
*
* @param diskStoreName the name of the disk store to use for the PDX metadata.
* @return this ClientCacheFactory
* @since GemFire 6.6
* @deprecated Pdx Persistence is not supported on client side. Even when set, it's internally
* ignored.
*/
@Deprecated
public ClientCacheFactory setPdxDiskStore(String diskStoreName) {
cacheConfig.setPdxDiskStore(diskStoreName);
return this;
}
/**
* Control whether the type metadata for PDX objects is persisted to disk. The default for this
* setting is false. If you are using persistent regions with PDX then you must set this to true.
* If you are using a WAN gateway with PDX then you should set this to true.
*
* @param isPersistent true if the metadata should be persistent
* @return this ClientCacheFactory
* @since GemFire 6.6
* @deprecated Pdx Persistence is not supported on client side. Even when set, it's internally
* ignored.
*/
@Deprecated
public ClientCacheFactory setPdxPersistent(boolean isPersistent) {
cacheConfig.setPdxPersistent(isPersistent);
return this;
}
/**
* Control whether pdx ignores fields that were unread during deserialization. The default is to
* preserve unread fields be including their data during serialization. But if you configure the
* cache to ignore unread fields then their data will be lost during serialization.
* <P>
* You should only set this attribute to <code>true</code> if you know this member will only be
* reading cache data. In this use case you do not need to pay the cost of preserving the unread
* fields since you will never be reserializing pdx data.
*
* @param ignore <code>true</code> if fields not read during pdx deserialization should be
* ignored; <code>false</code>, the default, if they should be preserved.
* @return this ClientCacheFactory
* @since GemFire 6.6
*/
public ClientCacheFactory setPdxIgnoreUnreadFields(boolean ignore) {
cacheConfig.setPdxIgnoreUnreadFields(ignore);
return this;
}
}
|
googleapis/google-cloud-java | 35,329 | java-iot/proto-google-cloud-iot-v1/src/main/java/com/google/cloud/iot/v1/DeviceManagerProto.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/iot/v1/device_manager.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.iot.v1;
public final class DeviceManagerProto {
private DeviceManagerProto() {}
public static void registerAllExtensions(com.google.protobuf.ExtensionRegistryLite registry) {}
public static void registerAllExtensions(com.google.protobuf.ExtensionRegistry registry) {
registerAllExtensions((com.google.protobuf.ExtensionRegistryLite) registry);
}
static final com.google.protobuf.Descriptors.Descriptor
internal_static_google_cloud_iot_v1_CreateDeviceRegistryRequest_descriptor;
static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_google_cloud_iot_v1_CreateDeviceRegistryRequest_fieldAccessorTable;
static final com.google.protobuf.Descriptors.Descriptor
internal_static_google_cloud_iot_v1_GetDeviceRegistryRequest_descriptor;
static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_google_cloud_iot_v1_GetDeviceRegistryRequest_fieldAccessorTable;
static final com.google.protobuf.Descriptors.Descriptor
internal_static_google_cloud_iot_v1_DeleteDeviceRegistryRequest_descriptor;
static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_google_cloud_iot_v1_DeleteDeviceRegistryRequest_fieldAccessorTable;
static final com.google.protobuf.Descriptors.Descriptor
internal_static_google_cloud_iot_v1_UpdateDeviceRegistryRequest_descriptor;
static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_google_cloud_iot_v1_UpdateDeviceRegistryRequest_fieldAccessorTable;
static final com.google.protobuf.Descriptors.Descriptor
internal_static_google_cloud_iot_v1_ListDeviceRegistriesRequest_descriptor;
static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_google_cloud_iot_v1_ListDeviceRegistriesRequest_fieldAccessorTable;
static final com.google.protobuf.Descriptors.Descriptor
internal_static_google_cloud_iot_v1_ListDeviceRegistriesResponse_descriptor;
static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_google_cloud_iot_v1_ListDeviceRegistriesResponse_fieldAccessorTable;
static final com.google.protobuf.Descriptors.Descriptor
internal_static_google_cloud_iot_v1_CreateDeviceRequest_descriptor;
static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_google_cloud_iot_v1_CreateDeviceRequest_fieldAccessorTable;
static final com.google.protobuf.Descriptors.Descriptor
internal_static_google_cloud_iot_v1_GetDeviceRequest_descriptor;
static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_google_cloud_iot_v1_GetDeviceRequest_fieldAccessorTable;
static final com.google.protobuf.Descriptors.Descriptor
internal_static_google_cloud_iot_v1_UpdateDeviceRequest_descriptor;
static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_google_cloud_iot_v1_UpdateDeviceRequest_fieldAccessorTable;
static final com.google.protobuf.Descriptors.Descriptor
internal_static_google_cloud_iot_v1_DeleteDeviceRequest_descriptor;
static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_google_cloud_iot_v1_DeleteDeviceRequest_fieldAccessorTable;
static final com.google.protobuf.Descriptors.Descriptor
internal_static_google_cloud_iot_v1_ListDevicesRequest_descriptor;
static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_google_cloud_iot_v1_ListDevicesRequest_fieldAccessorTable;
static final com.google.protobuf.Descriptors.Descriptor
internal_static_google_cloud_iot_v1_GatewayListOptions_descriptor;
static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_google_cloud_iot_v1_GatewayListOptions_fieldAccessorTable;
static final com.google.protobuf.Descriptors.Descriptor
internal_static_google_cloud_iot_v1_ListDevicesResponse_descriptor;
static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_google_cloud_iot_v1_ListDevicesResponse_fieldAccessorTable;
static final com.google.protobuf.Descriptors.Descriptor
internal_static_google_cloud_iot_v1_ModifyCloudToDeviceConfigRequest_descriptor;
static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_google_cloud_iot_v1_ModifyCloudToDeviceConfigRequest_fieldAccessorTable;
static final com.google.protobuf.Descriptors.Descriptor
internal_static_google_cloud_iot_v1_ListDeviceConfigVersionsRequest_descriptor;
static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_google_cloud_iot_v1_ListDeviceConfigVersionsRequest_fieldAccessorTable;
static final com.google.protobuf.Descriptors.Descriptor
internal_static_google_cloud_iot_v1_ListDeviceConfigVersionsResponse_descriptor;
static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_google_cloud_iot_v1_ListDeviceConfigVersionsResponse_fieldAccessorTable;
static final com.google.protobuf.Descriptors.Descriptor
internal_static_google_cloud_iot_v1_ListDeviceStatesRequest_descriptor;
static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_google_cloud_iot_v1_ListDeviceStatesRequest_fieldAccessorTable;
static final com.google.protobuf.Descriptors.Descriptor
internal_static_google_cloud_iot_v1_ListDeviceStatesResponse_descriptor;
static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_google_cloud_iot_v1_ListDeviceStatesResponse_fieldAccessorTable;
static final com.google.protobuf.Descriptors.Descriptor
internal_static_google_cloud_iot_v1_SendCommandToDeviceRequest_descriptor;
static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_google_cloud_iot_v1_SendCommandToDeviceRequest_fieldAccessorTable;
static final com.google.protobuf.Descriptors.Descriptor
internal_static_google_cloud_iot_v1_SendCommandToDeviceResponse_descriptor;
static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_google_cloud_iot_v1_SendCommandToDeviceResponse_fieldAccessorTable;
static final com.google.protobuf.Descriptors.Descriptor
internal_static_google_cloud_iot_v1_BindDeviceToGatewayRequest_descriptor;
static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_google_cloud_iot_v1_BindDeviceToGatewayRequest_fieldAccessorTable;
static final com.google.protobuf.Descriptors.Descriptor
internal_static_google_cloud_iot_v1_BindDeviceToGatewayResponse_descriptor;
static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_google_cloud_iot_v1_BindDeviceToGatewayResponse_fieldAccessorTable;
static final com.google.protobuf.Descriptors.Descriptor
internal_static_google_cloud_iot_v1_UnbindDeviceFromGatewayRequest_descriptor;
static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_google_cloud_iot_v1_UnbindDeviceFromGatewayRequest_fieldAccessorTable;
static final com.google.protobuf.Descriptors.Descriptor
internal_static_google_cloud_iot_v1_UnbindDeviceFromGatewayResponse_descriptor;
static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_google_cloud_iot_v1_UnbindDeviceFromGatewayResponse_fieldAccessorTable;
public static com.google.protobuf.Descriptors.FileDescriptor getDescriptor() {
return descriptor;
}
private static com.google.protobuf.Descriptors.FileDescriptor descriptor;
static {
java.lang.String[] descriptorData = {
"\n(google/cloud/iot/v1/device_manager.pro"
+ "to\022\023google.cloud.iot.v1\032\034google/api/anno"
+ "tations.proto\032\027google/api/client.proto\032\037"
+ "google/api/field_behavior.proto\032\031google/"
+ "api/resource.proto\032#google/cloud/iot/v1/"
+ "resources.proto\032\036google/iam/v1/iam_polic"
+ "y.proto\032\032google/iam/v1/policy.proto\032\033goo"
+ "gle/protobuf/empty.proto\032 google/protobu"
+ "f/field_mask.proto\"\233\001\n\033CreateDeviceRegis"
+ "tryRequest\0229\n\006parent\030\001 \001(\tB)\340A\002\372A#\n!loca"
+ "tions.googleapis.com/Location\022A\n\017device_"
+ "registry\030\002 \001(\0132#.google.cloud.iot.v1.Dev"
+ "iceRegistryB\003\340A\002\"R\n\030GetDeviceRegistryReq"
+ "uest\0226\n\004name\030\001 \001(\tB(\340A\002\372A\"\n cloudiot.goo"
+ "gleapis.com/Registry\"U\n\033DeleteDeviceRegi"
+ "stryRequest\0226\n\004name\030\001 \001(\tB(\340A\002\372A\"\n cloud"
+ "iot.googleapis.com/Registry\"\226\001\n\033UpdateDe"
+ "viceRegistryRequest\022A\n\017device_registry\030\001"
+ " \001(\0132#.google.cloud.iot.v1.DeviceRegistr"
+ "yB\003\340A\002\0224\n\013update_mask\030\002 \001(\0132\032.google.pro"
+ "tobuf.FieldMaskB\003\340A\002\"\177\n\033ListDeviceRegist"
+ "riesRequest\0229\n\006parent\030\001 \001(\tB)\340A\002\372A#\n!loc"
+ "ations.googleapis.com/Location\022\021\n\tpage_s"
+ "ize\030\002 \001(\005\022\022\n\npage_token\030\003 \001(\t\"w\n\034ListDev"
+ "iceRegistriesResponse\022>\n\021device_registri"
+ "es\030\001 \003(\0132#.google.cloud.iot.v1.DeviceReg"
+ "istry\022\027\n\017next_page_token\030\002 \001(\t\"\201\001\n\023Creat"
+ "eDeviceRequest\0228\n\006parent\030\001 \001(\tB(\340A\002\372A\"\n "
+ "cloudiot.googleapis.com/Registry\0220\n\006devi"
+ "ce\030\002 \001(\0132\033.google.cloud.iot.v1.DeviceB\003\340"
+ "A\002\"x\n\020GetDeviceRequest\0224\n\004name\030\001 \001(\tB&\340A"
+ "\002\372A \n\036cloudiot.googleapis.com/Device\022.\n\n"
+ "field_mask\030\002 \001(\0132\032.google.protobuf.Field"
+ "Mask\"}\n\023UpdateDeviceRequest\0220\n\006device\030\002 "
+ "\001(\0132\033.google.cloud.iot.v1.DeviceB\003\340A\002\0224\n"
+ "\013update_mask\030\003 \001(\0132\032.google.protobuf.Fie"
+ "ldMaskB\003\340A\002\"K\n\023DeleteDeviceRequest\0224\n\004na"
+ "me\030\001 \001(\tB&\340A\002\372A \n\036cloudiot.googleapis.co"
+ "m/Device\"\230\002\n\022ListDevicesRequest\0228\n\006paren"
+ "t\030\001 \001(\tB(\340A\002\372A\"\n cloudiot.googleapis.com"
+ "/Registry\022\026\n\016device_num_ids\030\002 \003(\004\022\022\n\ndev"
+ "ice_ids\030\003 \003(\t\022.\n\nfield_mask\030\004 \001(\0132\032.goog"
+ "le.protobuf.FieldMask\022E\n\024gateway_list_op"
+ "tions\030\006 \001(\0132\'.google.cloud.iot.v1.Gatewa"
+ "yListOptions\022\021\n\tpage_size\030d \001(\005\022\022\n\npage_"
+ "token\030e \001(\t\"\235\001\n\022GatewayListOptions\0228\n\014ga"
+ "teway_type\030\001 \001(\0162 .google.cloud.iot.v1.G"
+ "atewayTypeH\000\022!\n\027associations_gateway_id\030"
+ "\002 \001(\tH\000\022 \n\026associations_device_id\030\003 \001(\tH"
+ "\000B\010\n\006filter\"\\\n\023ListDevicesResponse\022,\n\007de"
+ "vices\030\001 \003(\0132\033.google.cloud.iot.v1.Device"
+ "\022\027\n\017next_page_token\030\002 \001(\t\"\215\001\n ModifyClou"
+ "dToDeviceConfigRequest\0224\n\004name\030\001 \001(\tB&\340A"
+ "\002\372A \n\036cloudiot.googleapis.com/Device\022\031\n\021"
+ "version_to_update\030\002 \001(\003\022\030\n\013binary_data\030\003"
+ " \001(\014B\003\340A\002\"m\n\037ListDeviceConfigVersionsReq"
+ "uest\0224\n\004name\030\001 \001(\tB&\340A\002\372A \n\036cloudiot.goo"
+ "gleapis.com/Device\022\024\n\014num_versions\030\002 \001(\005"
+ "\"]\n ListDeviceConfigVersionsResponse\0229\n\016"
+ "device_configs\030\001 \003(\0132!.google.cloud.iot."
+ "v1.DeviceConfig\"c\n\027ListDeviceStatesReque"
+ "st\0224\n\004name\030\001 \001(\tB&\340A\002\372A \n\036cloudiot.googl"
+ "eapis.com/Device\022\022\n\nnum_states\030\002 \001(\005\"S\n\030"
+ "ListDeviceStatesResponse\0227\n\rdevice_state"
+ "s\030\001 \003(\0132 .google.cloud.iot.v1.DeviceStat"
+ "e\"\177\n\032SendCommandToDeviceRequest\0224\n\004name\030"
+ "\001 \001(\tB&\340A\002\372A \n\036cloudiot.googleapis.com/D"
+ "evice\022\030\n\013binary_data\030\002 \001(\014B\003\340A\002\022\021\n\tsubfo"
+ "lder\030\003 \001(\t\"\035\n\033SendCommandToDeviceRespons"
+ "e\"\207\001\n\032BindDeviceToGatewayRequest\0228\n\006pare"
+ "nt\030\001 \001(\tB(\340A\002\372A\"\n cloudiot.googleapis.co"
+ "m/Registry\022\027\n\ngateway_id\030\002 \001(\tB\003\340A\002\022\026\n\td"
+ "evice_id\030\003 \001(\tB\003\340A\002\"\035\n\033BindDeviceToGatew"
+ "ayResponse\"\213\001\n\036UnbindDeviceFromGatewayRe"
+ "quest\0228\n\006parent\030\001 \001(\tB(\340A\002\372A\"\n cloudiot."
+ "googleapis.com/Registry\022\027\n\ngateway_id\030\002 "
+ "\001(\tB\003\340A\002\022\026\n\tdevice_id\030\003 \001(\tB\003\340A\002\"!\n\037Unbi"
+ "ndDeviceFromGatewayResponse2\246&\n\rDeviceMa"
+ "nager\022\317\001\n\024CreateDeviceRegistry\0220.google."
+ "cloud.iot.v1.CreateDeviceRegistryRequest"
+ "\032#.google.cloud.iot.v1.DeviceRegistry\"`\332"
+ "A\026parent,device_registry\202\323\344\223\002A\"./v1/{par"
+ "ent=projects/*/locations/*}/registries:\017"
+ "device_registry\022\246\001\n\021GetDeviceRegistry\022-."
+ "google.cloud.iot.v1.GetDeviceRegistryReq"
+ "uest\032#.google.cloud.iot.v1.DeviceRegistr"
+ "y\"=\332A\004name\202\323\344\223\0020\022./v1/{name=projects/*/l"
+ "ocations/*/registries/*}\022\344\001\n\024UpdateDevic"
+ "eRegistry\0220.google.cloud.iot.v1.UpdateDe"
+ "viceRegistryRequest\032#.google.cloud.iot.v"
+ "1.DeviceRegistry\"u\332A\033device_registry,upd"
+ "ate_mask\202\323\344\223\002Q2>/v1/{device_registry.nam"
+ "e=projects/*/locations/*/registries/*}:\017"
+ "device_registry\022\237\001\n\024DeleteDeviceRegistry"
+ "\0220.google.cloud.iot.v1.DeleteDeviceRegis"
+ "tryRequest\032\026.google.protobuf.Empty\"=\332A\004n"
+ "ame\202\323\344\223\0020*./v1/{name=projects/*/location"
+ "s/*/registries/*}\022\274\001\n\024ListDeviceRegistri"
+ "es\0220.google.cloud.iot.v1.ListDeviceRegis"
+ "triesRequest\0321.google.cloud.iot.v1.ListD"
+ "eviceRegistriesResponse\"?\332A\006parent\202\323\344\223\0020"
+ "\022./v1/{parent=projects/*/locations/*}/re"
+ "gistries\022\257\001\n\014CreateDevice\022(.google.cloud"
+ ".iot.v1.CreateDeviceRequest\032\033.google.clo"
+ "ud.iot.v1.Device\"X\332A\rparent,device\202\323\344\223\002B"
+ "\"8/v1/{parent=projects/*/locations/*/reg"
+ "istries/*}/devices:\006device\022\336\001\n\tGetDevice"
+ "\022%.google.cloud.iot.v1.GetDeviceRequest\032"
+ "\033.google.cloud.iot.v1.Device\"\214\001\332A\004name\202\323"
+ "\344\223\002\177\0228/v1/{name=projects/*/locations/*/r"
+ "egistries/*/devices/*}ZC\022A/v1/{name=proj"
+ "ects/*/locations/*/registries/*/groups/*"
+ "/devices/*}\022\221\002\n\014UpdateDevice\022(.google.cl"
+ "oud.iot.v1.UpdateDeviceRequest\032\033.google."
+ "cloud.iot.v1.Device\"\271\001\332A\022device,update_m"
+ "ask\202\323\344\223\002\235\0012?/v1/{device.name=projects/*/"
+ "locations/*/registries/*/devices/*}:\006dev"
+ "iceZR2H/v1/{device.name=projects/*/locat"
+ "ions/*/registries/*/groups/*/devices/*}:"
+ "\006device\022\231\001\n\014DeleteDevice\022(.google.cloud."
+ "iot.v1.DeleteDeviceRequest\032\026.google.prot"
+ "obuf.Empty\"G\332A\004name\202\323\344\223\002:*8/v1/{name=pro"
+ "jects/*/locations/*/registries/*/devices"
+ "/*}\022\361\001\n\013ListDevices\022\'.google.cloud.iot.v"
+ "1.ListDevicesRequest\032(.google.cloud.iot."
+ "v1.ListDevicesResponse\"\216\001\332A\006parent\202\323\344\223\002\177"
+ "\0228/v1/{parent=projects/*/locations/*/reg"
+ "istries/*}/devicesZC\022A/v1/{parent=projec"
+ "ts/*/locations/*/registries/*/groups/*}/"
+ "devices\022\313\002\n\031ModifyCloudToDeviceConfig\0225."
+ "google.cloud.iot.v1.ModifyCloudToDeviceC"
+ "onfigRequest\032!.google.cloud.iot.v1.Devic"
+ "eConfig\"\323\001\332A\020name,binary_data\202\323\344\223\002\271\001\"R/v"
+ "1/{name=projects/*/locations/*/registrie"
+ "s/*/devices/*}:modifyCloudToDeviceConfig"
+ ":\001*Z`\"[/v1/{name=projects/*/locations/*/"
+ "registries/*/groups/*/devices/*}:modifyC"
+ "loudToDeviceConfig:\001*\022\265\002\n\030ListDeviceConf"
+ "igVersions\0224.google.cloud.iot.v1.ListDev"
+ "iceConfigVersionsRequest\0325.google.cloud."
+ "iot.v1.ListDeviceConfigVersionsResponse\""
+ "\253\001\332A\004name\202\323\344\223\002\235\001\022G/v1/{name=projects/*/l"
+ "ocations/*/registries/*/devices/*}/confi"
+ "gVersionsZR\022P/v1/{name=projects/*/locati"
+ "ons/*/registries/*/groups/*/devices/*}/c"
+ "onfigVersions\022\215\002\n\020ListDeviceStates\022,.goo"
+ "gle.cloud.iot.v1.ListDeviceStatesRequest"
+ "\032-.google.cloud.iot.v1.ListDeviceStatesR"
+ "esponse\"\233\001\332A\004name\202\323\344\223\002\215\001\022?/v1/{name=proj"
+ "ects/*/locations/*/registries/*/devices/"
+ "*}/statesZJ\022H/v1/{name=projects/*/locati"
+ "ons/*/registries/*/groups/*/devices/*}/s"
+ "tates\022\370\001\n\014SetIamPolicy\022\".google.iam.v1.S"
+ "etIamPolicyRequest\032\025.google.iam.v1.Polic"
+ "y\"\254\001\332A\017resource,policy\202\323\344\223\002\223\001\"?/v1/{reso"
+ "urce=projects/*/locations/*/registries/*"
+ "}:setIamPolicy:\001*ZM\"H/v1/{resource=proje"
+ "cts/*/locations/*/registries/*/groups/*}"
+ ":setIamPolicy:\001*\022\361\001\n\014GetIamPolicy\022\".goog"
+ "le.iam.v1.GetIamPolicyRequest\032\025.google.i"
+ "am.v1.Policy\"\245\001\332A\010resource\202\323\344\223\002\223\001\"?/v1/{"
+ "resource=projects/*/locations/*/registri"
+ "es/*}:getIamPolicy:\001*ZM\"H/v1/{resource=p"
+ "rojects/*/locations/*/registries/*/group"
+ "s/*}:getIamPolicy:\001*\022\251\002\n\022TestIamPermissi"
+ "ons\022(.google.iam.v1.TestIamPermissionsRe"
+ "quest\032).google.iam.v1.TestIamPermissions"
+ "Response\"\275\001\332A\024resource,permissions\202\323\344\223\002\237"
+ "\001\"E/v1/{resource=projects/*/locations/*/"
+ "registries/*}:testIamPermissions:\001*ZS\"N/"
+ "v1/{resource=projects/*/locations/*/regi"
+ "stries/*/groups/*}:testIamPermissions:\001*"
+ "\022\337\002\n\023SendCommandToDevice\022/.google.cloud."
+ "iot.v1.SendCommandToDeviceRequest\0320.goog"
+ "le.cloud.iot.v1.SendCommandToDeviceRespo"
+ "nse\"\344\001\332A\020name,binary_data\332A\032name,binary_"
+ "data,subfolder\202\323\344\223\002\255\001\"L/v1/{name=project"
+ "s/*/locations/*/registries/*/devices/*}:"
+ "sendCommandToDevice:\001*ZZ\"U/v1/{name=proj"
+ "ects/*/locations/*/registries/*/groups/*"
+ "/devices/*}:sendCommandToDevice:\001*\022\275\002\n\023B"
+ "indDeviceToGateway\022/.google.cloud.iot.v1"
+ ".BindDeviceToGatewayRequest\0320.google.clo"
+ "ud.iot.v1.BindDeviceToGatewayResponse\"\302\001"
+ "\332A\033parent,gateway_id,device_id\202\323\344\223\002\235\001\"D/"
+ "v1/{parent=projects/*/locations/*/regist"
+ "ries/*}:bindDeviceToGateway:\001*ZR\"M/v1/{p"
+ "arent=projects/*/locations/*/registries/"
+ "*/groups/*}:bindDeviceToGateway:\001*\022\321\002\n\027U"
+ "nbindDeviceFromGateway\0223.google.cloud.io"
+ "t.v1.UnbindDeviceFromGatewayRequest\0324.go"
+ "ogle.cloud.iot.v1.UnbindDeviceFromGatewa"
+ "yResponse\"\312\001\332A\033parent,gateway_id,device_"
+ "id\202\323\344\223\002\245\001\"H/v1/{parent=projects/*/locati"
+ "ons/*/registries/*}:unbindDeviceFromGate"
+ "way:\001*ZV\"Q/v1/{parent=projects/*/locatio"
+ "ns/*/registries/*/groups/*}:unbindDevice"
+ "FromGateway:\001*\032t\312A\027cloudiot.googleapis.c"
+ "om\322AWhttps://www.googleapis.com/auth/clo"
+ "ud-platform,https://www.googleapis.com/a"
+ "uth/cloudiotB]\n\027com.google.cloud.iot.v1B"
+ "\022DeviceManagerProtoP\001Z)cloud.google.com/"
+ "go/iot/apiv1/iotpb;iotpb\370\001\001b\006proto3"
};
descriptor =
com.google.protobuf.Descriptors.FileDescriptor.internalBuildGeneratedFileFrom(
descriptorData,
new com.google.protobuf.Descriptors.FileDescriptor[] {
com.google.api.AnnotationsProto.getDescriptor(),
com.google.api.ClientProto.getDescriptor(),
com.google.api.FieldBehaviorProto.getDescriptor(),
com.google.api.ResourceProto.getDescriptor(),
com.google.cloud.iot.v1.ResourcesProto.getDescriptor(),
com.google.iam.v1.IamPolicyProto.getDescriptor(),
com.google.iam.v1.PolicyProto.getDescriptor(),
com.google.protobuf.EmptyProto.getDescriptor(),
com.google.protobuf.FieldMaskProto.getDescriptor(),
});
internal_static_google_cloud_iot_v1_CreateDeviceRegistryRequest_descriptor =
getDescriptor().getMessageTypes().get(0);
internal_static_google_cloud_iot_v1_CreateDeviceRegistryRequest_fieldAccessorTable =
new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_google_cloud_iot_v1_CreateDeviceRegistryRequest_descriptor,
new java.lang.String[] {
"Parent", "DeviceRegistry",
});
internal_static_google_cloud_iot_v1_GetDeviceRegistryRequest_descriptor =
getDescriptor().getMessageTypes().get(1);
internal_static_google_cloud_iot_v1_GetDeviceRegistryRequest_fieldAccessorTable =
new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_google_cloud_iot_v1_GetDeviceRegistryRequest_descriptor,
new java.lang.String[] {
"Name",
});
internal_static_google_cloud_iot_v1_DeleteDeviceRegistryRequest_descriptor =
getDescriptor().getMessageTypes().get(2);
internal_static_google_cloud_iot_v1_DeleteDeviceRegistryRequest_fieldAccessorTable =
new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_google_cloud_iot_v1_DeleteDeviceRegistryRequest_descriptor,
new java.lang.String[] {
"Name",
});
internal_static_google_cloud_iot_v1_UpdateDeviceRegistryRequest_descriptor =
getDescriptor().getMessageTypes().get(3);
internal_static_google_cloud_iot_v1_UpdateDeviceRegistryRequest_fieldAccessorTable =
new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_google_cloud_iot_v1_UpdateDeviceRegistryRequest_descriptor,
new java.lang.String[] {
"DeviceRegistry", "UpdateMask",
});
internal_static_google_cloud_iot_v1_ListDeviceRegistriesRequest_descriptor =
getDescriptor().getMessageTypes().get(4);
internal_static_google_cloud_iot_v1_ListDeviceRegistriesRequest_fieldAccessorTable =
new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_google_cloud_iot_v1_ListDeviceRegistriesRequest_descriptor,
new java.lang.String[] {
"Parent", "PageSize", "PageToken",
});
internal_static_google_cloud_iot_v1_ListDeviceRegistriesResponse_descriptor =
getDescriptor().getMessageTypes().get(5);
internal_static_google_cloud_iot_v1_ListDeviceRegistriesResponse_fieldAccessorTable =
new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_google_cloud_iot_v1_ListDeviceRegistriesResponse_descriptor,
new java.lang.String[] {
"DeviceRegistries", "NextPageToken",
});
internal_static_google_cloud_iot_v1_CreateDeviceRequest_descriptor =
getDescriptor().getMessageTypes().get(6);
internal_static_google_cloud_iot_v1_CreateDeviceRequest_fieldAccessorTable =
new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_google_cloud_iot_v1_CreateDeviceRequest_descriptor,
new java.lang.String[] {
"Parent", "Device",
});
internal_static_google_cloud_iot_v1_GetDeviceRequest_descriptor =
getDescriptor().getMessageTypes().get(7);
internal_static_google_cloud_iot_v1_GetDeviceRequest_fieldAccessorTable =
new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_google_cloud_iot_v1_GetDeviceRequest_descriptor,
new java.lang.String[] {
"Name", "FieldMask",
});
internal_static_google_cloud_iot_v1_UpdateDeviceRequest_descriptor =
getDescriptor().getMessageTypes().get(8);
internal_static_google_cloud_iot_v1_UpdateDeviceRequest_fieldAccessorTable =
new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_google_cloud_iot_v1_UpdateDeviceRequest_descriptor,
new java.lang.String[] {
"Device", "UpdateMask",
});
internal_static_google_cloud_iot_v1_DeleteDeviceRequest_descriptor =
getDescriptor().getMessageTypes().get(9);
internal_static_google_cloud_iot_v1_DeleteDeviceRequest_fieldAccessorTable =
new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_google_cloud_iot_v1_DeleteDeviceRequest_descriptor,
new java.lang.String[] {
"Name",
});
internal_static_google_cloud_iot_v1_ListDevicesRequest_descriptor =
getDescriptor().getMessageTypes().get(10);
internal_static_google_cloud_iot_v1_ListDevicesRequest_fieldAccessorTable =
new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_google_cloud_iot_v1_ListDevicesRequest_descriptor,
new java.lang.String[] {
"Parent",
"DeviceNumIds",
"DeviceIds",
"FieldMask",
"GatewayListOptions",
"PageSize",
"PageToken",
});
internal_static_google_cloud_iot_v1_GatewayListOptions_descriptor =
getDescriptor().getMessageTypes().get(11);
internal_static_google_cloud_iot_v1_GatewayListOptions_fieldAccessorTable =
new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_google_cloud_iot_v1_GatewayListOptions_descriptor,
new java.lang.String[] {
"GatewayType", "AssociationsGatewayId", "AssociationsDeviceId", "Filter",
});
internal_static_google_cloud_iot_v1_ListDevicesResponse_descriptor =
getDescriptor().getMessageTypes().get(12);
internal_static_google_cloud_iot_v1_ListDevicesResponse_fieldAccessorTable =
new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_google_cloud_iot_v1_ListDevicesResponse_descriptor,
new java.lang.String[] {
"Devices", "NextPageToken",
});
internal_static_google_cloud_iot_v1_ModifyCloudToDeviceConfigRequest_descriptor =
getDescriptor().getMessageTypes().get(13);
internal_static_google_cloud_iot_v1_ModifyCloudToDeviceConfigRequest_fieldAccessorTable =
new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_google_cloud_iot_v1_ModifyCloudToDeviceConfigRequest_descriptor,
new java.lang.String[] {
"Name", "VersionToUpdate", "BinaryData",
});
internal_static_google_cloud_iot_v1_ListDeviceConfigVersionsRequest_descriptor =
getDescriptor().getMessageTypes().get(14);
internal_static_google_cloud_iot_v1_ListDeviceConfigVersionsRequest_fieldAccessorTable =
new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_google_cloud_iot_v1_ListDeviceConfigVersionsRequest_descriptor,
new java.lang.String[] {
"Name", "NumVersions",
});
internal_static_google_cloud_iot_v1_ListDeviceConfigVersionsResponse_descriptor =
getDescriptor().getMessageTypes().get(15);
internal_static_google_cloud_iot_v1_ListDeviceConfigVersionsResponse_fieldAccessorTable =
new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_google_cloud_iot_v1_ListDeviceConfigVersionsResponse_descriptor,
new java.lang.String[] {
"DeviceConfigs",
});
internal_static_google_cloud_iot_v1_ListDeviceStatesRequest_descriptor =
getDescriptor().getMessageTypes().get(16);
internal_static_google_cloud_iot_v1_ListDeviceStatesRequest_fieldAccessorTable =
new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_google_cloud_iot_v1_ListDeviceStatesRequest_descriptor,
new java.lang.String[] {
"Name", "NumStates",
});
internal_static_google_cloud_iot_v1_ListDeviceStatesResponse_descriptor =
getDescriptor().getMessageTypes().get(17);
internal_static_google_cloud_iot_v1_ListDeviceStatesResponse_fieldAccessorTable =
new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_google_cloud_iot_v1_ListDeviceStatesResponse_descriptor,
new java.lang.String[] {
"DeviceStates",
});
internal_static_google_cloud_iot_v1_SendCommandToDeviceRequest_descriptor =
getDescriptor().getMessageTypes().get(18);
internal_static_google_cloud_iot_v1_SendCommandToDeviceRequest_fieldAccessorTable =
new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_google_cloud_iot_v1_SendCommandToDeviceRequest_descriptor,
new java.lang.String[] {
"Name", "BinaryData", "Subfolder",
});
internal_static_google_cloud_iot_v1_SendCommandToDeviceResponse_descriptor =
getDescriptor().getMessageTypes().get(19);
internal_static_google_cloud_iot_v1_SendCommandToDeviceResponse_fieldAccessorTable =
new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_google_cloud_iot_v1_SendCommandToDeviceResponse_descriptor,
new java.lang.String[] {});
internal_static_google_cloud_iot_v1_BindDeviceToGatewayRequest_descriptor =
getDescriptor().getMessageTypes().get(20);
internal_static_google_cloud_iot_v1_BindDeviceToGatewayRequest_fieldAccessorTable =
new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_google_cloud_iot_v1_BindDeviceToGatewayRequest_descriptor,
new java.lang.String[] {
"Parent", "GatewayId", "DeviceId",
});
internal_static_google_cloud_iot_v1_BindDeviceToGatewayResponse_descriptor =
getDescriptor().getMessageTypes().get(21);
internal_static_google_cloud_iot_v1_BindDeviceToGatewayResponse_fieldAccessorTable =
new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_google_cloud_iot_v1_BindDeviceToGatewayResponse_descriptor,
new java.lang.String[] {});
internal_static_google_cloud_iot_v1_UnbindDeviceFromGatewayRequest_descriptor =
getDescriptor().getMessageTypes().get(22);
internal_static_google_cloud_iot_v1_UnbindDeviceFromGatewayRequest_fieldAccessorTable =
new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_google_cloud_iot_v1_UnbindDeviceFromGatewayRequest_descriptor,
new java.lang.String[] {
"Parent", "GatewayId", "DeviceId",
});
internal_static_google_cloud_iot_v1_UnbindDeviceFromGatewayResponse_descriptor =
getDescriptor().getMessageTypes().get(23);
internal_static_google_cloud_iot_v1_UnbindDeviceFromGatewayResponse_fieldAccessorTable =
new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_google_cloud_iot_v1_UnbindDeviceFromGatewayResponse_descriptor,
new java.lang.String[] {});
com.google.protobuf.ExtensionRegistry registry =
com.google.protobuf.ExtensionRegistry.newInstance();
registry.add(com.google.api.ClientProto.defaultHost);
registry.add(com.google.api.FieldBehaviorProto.fieldBehavior);
registry.add(com.google.api.AnnotationsProto.http);
registry.add(com.google.api.ClientProto.methodSignature);
registry.add(com.google.api.ClientProto.oauthScopes);
registry.add(com.google.api.ResourceProto.resourceReference);
com.google.protobuf.Descriptors.FileDescriptor.internalUpdateFileDescriptor(
descriptor, registry);
com.google.api.AnnotationsProto.getDescriptor();
com.google.api.ClientProto.getDescriptor();
com.google.api.FieldBehaviorProto.getDescriptor();
com.google.api.ResourceProto.getDescriptor();
com.google.cloud.iot.v1.ResourcesProto.getDescriptor();
com.google.iam.v1.IamPolicyProto.getDescriptor();
com.google.iam.v1.PolicyProto.getDescriptor();
com.google.protobuf.EmptyProto.getDescriptor();
com.google.protobuf.FieldMaskProto.getDescriptor();
}
// @@protoc_insertion_point(outer_class_scope)
}
|
openjdk/jdk8 | 36,670 | jdk/src/share/classes/java/time/temporal/ChronoField.java | /*
* Copyright (c) 2012, 2013, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License version 2 only, as
* published by the Free Software Foundation. Oracle designates this
* particular file as subject to the "Classpath" exception as provided
* by Oracle in the LICENSE file that accompanied this code.
*
* This code is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* version 2 for more details (a copy is included in the LICENSE file that
* accompanied this code).
*
* You should have received a copy of the GNU General Public License version
* 2 along with this work; if not, write to the Free Software Foundation,
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
*
* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
* or visit www.oracle.com if you need additional information or have any
* questions.
*/
/*
* Copyright (c) 2012, Stephen Colebourne & Michael Nascimento Santos
*
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* * Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
*
* * Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
*
* * Neither the name of JSR-310 nor the names of its contributors
* may be used to endorse or promote products derived from this software
* without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
* LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
* A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
* CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
* EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
* PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
* LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
* NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package java.time.temporal;
import static java.time.temporal.ChronoUnit.DAYS;
import static java.time.temporal.ChronoUnit.ERAS;
import static java.time.temporal.ChronoUnit.FOREVER;
import static java.time.temporal.ChronoUnit.HALF_DAYS;
import static java.time.temporal.ChronoUnit.HOURS;
import static java.time.temporal.ChronoUnit.MICROS;
import static java.time.temporal.ChronoUnit.MILLIS;
import static java.time.temporal.ChronoUnit.MINUTES;
import static java.time.temporal.ChronoUnit.MONTHS;
import static java.time.temporal.ChronoUnit.NANOS;
import static java.time.temporal.ChronoUnit.SECONDS;
import static java.time.temporal.ChronoUnit.WEEKS;
import static java.time.temporal.ChronoUnit.YEARS;
import java.time.DayOfWeek;
import java.time.Instant;
import java.time.Year;
import java.time.ZoneOffset;
import java.time.chrono.ChronoLocalDate;
import java.time.chrono.Chronology;
import java.util.Locale;
import java.util.Objects;
import java.util.ResourceBundle;
import sun.util.locale.provider.LocaleProviderAdapter;
import sun.util.locale.provider.LocaleResources;
/**
* A standard set of fields.
* <p>
* This set of fields provide field-based access to manipulate a date, time or date-time.
* The standard set of fields can be extended by implementing {@link TemporalField}.
* <p>
* These fields are intended to be applicable in multiple calendar systems.
* For example, most non-ISO calendar systems define dates as a year, month and day,
* just with slightly different rules.
* The documentation of each field explains how it operates.
*
* @implSpec
* This is a final, immutable and thread-safe enum.
*
* @since 1.8
*/
public enum ChronoField implements TemporalField {
/**
* The nano-of-second.
* <p>
* This counts the nanosecond within the second, from 0 to 999,999,999.
* This field has the same meaning for all calendar systems.
* <p>
* This field is used to represent the nano-of-second handling any fraction of the second.
* Implementations of {@code TemporalAccessor} should provide a value for this field if
* they can return a value for {@link #SECOND_OF_MINUTE}, {@link #SECOND_OF_DAY} or
* {@link #INSTANT_SECONDS} filling unknown precision with zero.
* <p>
* When this field is used for setting a value, it should set as much precision as the
* object stores, using integer division to remove excess precision.
* For example, if the {@code TemporalAccessor} stores time to millisecond precision,
* then the nano-of-second must be divided by 1,000,000 before replacing the milli-of-second.
* <p>
* When parsing this field it behaves equivalent to the following:
* The value is validated in strict and smart mode but not in lenient mode.
* The field is resolved in combination with {@code MILLI_OF_SECOND} and {@code MICRO_OF_SECOND}.
*/
NANO_OF_SECOND("NanoOfSecond", NANOS, SECONDS, ValueRange.of(0, 999_999_999)),
/**
* The nano-of-day.
* <p>
* This counts the nanosecond within the day, from 0 to (24 * 60 * 60 * 1,000,000,000) - 1.
* This field has the same meaning for all calendar systems.
* <p>
* This field is used to represent the nano-of-day handling any fraction of the second.
* Implementations of {@code TemporalAccessor} should provide a value for this field if
* they can return a value for {@link #SECOND_OF_DAY} filling unknown precision with zero.
* <p>
* When parsing this field it behaves equivalent to the following:
* The value is validated in strict and smart mode but not in lenient mode.
* The value is split to form {@code NANO_OF_SECOND}, {@code SECOND_OF_MINUTE},
* {@code MINUTE_OF_HOUR} and {@code HOUR_OF_DAY} fields.
*/
NANO_OF_DAY("NanoOfDay", NANOS, DAYS, ValueRange.of(0, 86400L * 1000_000_000L - 1)),
/**
* The micro-of-second.
* <p>
* This counts the microsecond within the second, from 0 to 999,999.
* This field has the same meaning for all calendar systems.
* <p>
* This field is used to represent the micro-of-second handling any fraction of the second.
* Implementations of {@code TemporalAccessor} should provide a value for this field if
* they can return a value for {@link #SECOND_OF_MINUTE}, {@link #SECOND_OF_DAY} or
* {@link #INSTANT_SECONDS} filling unknown precision with zero.
* <p>
* When this field is used for setting a value, it should behave in the same way as
* setting {@link #NANO_OF_SECOND} with the value multiplied by 1,000.
* <p>
* When parsing this field it behaves equivalent to the following:
* The value is validated in strict and smart mode but not in lenient mode.
* The field is resolved in combination with {@code MILLI_OF_SECOND} to produce
* {@code NANO_OF_SECOND}.
*/
MICRO_OF_SECOND("MicroOfSecond", MICROS, SECONDS, ValueRange.of(0, 999_999)),
/**
* The micro-of-day.
* <p>
* This counts the microsecond within the day, from 0 to (24 * 60 * 60 * 1,000,000) - 1.
* This field has the same meaning for all calendar systems.
* <p>
* This field is used to represent the micro-of-day handling any fraction of the second.
* Implementations of {@code TemporalAccessor} should provide a value for this field if
* they can return a value for {@link #SECOND_OF_DAY} filling unknown precision with zero.
* <p>
* When this field is used for setting a value, it should behave in the same way as
* setting {@link #NANO_OF_DAY} with the value multiplied by 1,000.
* <p>
* When parsing this field it behaves equivalent to the following:
* The value is validated in strict and smart mode but not in lenient mode.
* The value is split to form {@code MICRO_OF_SECOND}, {@code SECOND_OF_MINUTE},
* {@code MINUTE_OF_HOUR} and {@code HOUR_OF_DAY} fields.
*/
MICRO_OF_DAY("MicroOfDay", MICROS, DAYS, ValueRange.of(0, 86400L * 1000_000L - 1)),
/**
* The milli-of-second.
* <p>
* This counts the millisecond within the second, from 0 to 999.
* This field has the same meaning for all calendar systems.
* <p>
* This field is used to represent the milli-of-second handling any fraction of the second.
* Implementations of {@code TemporalAccessor} should provide a value for this field if
* they can return a value for {@link #SECOND_OF_MINUTE}, {@link #SECOND_OF_DAY} or
* {@link #INSTANT_SECONDS} filling unknown precision with zero.
* <p>
* When this field is used for setting a value, it should behave in the same way as
* setting {@link #NANO_OF_SECOND} with the value multiplied by 1,000,000.
* <p>
* When parsing this field it behaves equivalent to the following:
* The value is validated in strict and smart mode but not in lenient mode.
* The field is resolved in combination with {@code MICRO_OF_SECOND} to produce
* {@code NANO_OF_SECOND}.
*/
MILLI_OF_SECOND("MilliOfSecond", MILLIS, SECONDS, ValueRange.of(0, 999)),
/**
* The milli-of-day.
* <p>
* This counts the millisecond within the day, from 0 to (24 * 60 * 60 * 1,000) - 1.
* This field has the same meaning for all calendar systems.
* <p>
* This field is used to represent the milli-of-day handling any fraction of the second.
* Implementations of {@code TemporalAccessor} should provide a value for this field if
* they can return a value for {@link #SECOND_OF_DAY} filling unknown precision with zero.
* <p>
* When this field is used for setting a value, it should behave in the same way as
* setting {@link #NANO_OF_DAY} with the value multiplied by 1,000,000.
* <p>
* When parsing this field it behaves equivalent to the following:
* The value is validated in strict and smart mode but not in lenient mode.
* The value is split to form {@code MILLI_OF_SECOND}, {@code SECOND_OF_MINUTE},
* {@code MINUTE_OF_HOUR} and {@code HOUR_OF_DAY} fields.
*/
MILLI_OF_DAY("MilliOfDay", MILLIS, DAYS, ValueRange.of(0, 86400L * 1000L - 1)),
/**
* The second-of-minute.
* <p>
* This counts the second within the minute, from 0 to 59.
* This field has the same meaning for all calendar systems.
* <p>
* When parsing this field it behaves equivalent to the following:
* The value is validated in strict and smart mode but not in lenient mode.
*/
SECOND_OF_MINUTE("SecondOfMinute", SECONDS, MINUTES, ValueRange.of(0, 59), "second"),
/**
* The second-of-day.
* <p>
* This counts the second within the day, from 0 to (24 * 60 * 60) - 1.
* This field has the same meaning for all calendar systems.
* <p>
* When parsing this field it behaves equivalent to the following:
* The value is validated in strict and smart mode but not in lenient mode.
* The value is split to form {@code SECOND_OF_MINUTE}, {@code MINUTE_OF_HOUR}
* and {@code HOUR_OF_DAY} fields.
*/
SECOND_OF_DAY("SecondOfDay", SECONDS, DAYS, ValueRange.of(0, 86400L - 1)),
/**
* The minute-of-hour.
* <p>
* This counts the minute within the hour, from 0 to 59.
* This field has the same meaning for all calendar systems.
* <p>
* When parsing this field it behaves equivalent to the following:
* The value is validated in strict and smart mode but not in lenient mode.
*/
MINUTE_OF_HOUR("MinuteOfHour", MINUTES, HOURS, ValueRange.of(0, 59), "minute"),
/**
* The minute-of-day.
* <p>
* This counts the minute within the day, from 0 to (24 * 60) - 1.
* This field has the same meaning for all calendar systems.
* <p>
* When parsing this field it behaves equivalent to the following:
* The value is validated in strict and smart mode but not in lenient mode.
* The value is split to form {@code MINUTE_OF_HOUR} and {@code HOUR_OF_DAY} fields.
*/
MINUTE_OF_DAY("MinuteOfDay", MINUTES, DAYS, ValueRange.of(0, (24 * 60) - 1)),
/**
* The hour-of-am-pm.
* <p>
* This counts the hour within the AM/PM, from 0 to 11.
* This is the hour that would be observed on a standard 12-hour digital clock.
* This field has the same meaning for all calendar systems.
* <p>
* When parsing this field it behaves equivalent to the following:
* The value is validated from 0 to 11 in strict and smart mode.
* In lenient mode the value is not validated. It is combined with
* {@code AMPM_OF_DAY} to form {@code HOUR_OF_DAY} by multiplying
* the {AMPM_OF_DAY} value by 12.
*/
HOUR_OF_AMPM("HourOfAmPm", HOURS, HALF_DAYS, ValueRange.of(0, 11)),
/**
* The clock-hour-of-am-pm.
* <p>
* This counts the hour within the AM/PM, from 1 to 12.
* This is the hour that would be observed on a standard 12-hour analog wall clock.
* This field has the same meaning for all calendar systems.
* <p>
* When parsing this field it behaves equivalent to the following:
* The value is validated from 1 to 12 in strict mode and from
* 0 to 12 in smart mode. In lenient mode the value is not validated.
* The field is converted to an {@code HOUR_OF_AMPM} with the same value,
* unless the value is 12, in which case it is converted to 0.
*/
CLOCK_HOUR_OF_AMPM("ClockHourOfAmPm", HOURS, HALF_DAYS, ValueRange.of(1, 12)),
/**
* The hour-of-day.
* <p>
* This counts the hour within the day, from 0 to 23.
* This is the hour that would be observed on a standard 24-hour digital clock.
* This field has the same meaning for all calendar systems.
* <p>
* When parsing this field it behaves equivalent to the following:
* The value is validated in strict and smart mode but not in lenient mode.
* The field is combined with {@code MINUTE_OF_HOUR}, {@code SECOND_OF_MINUTE} and
* {@code NANO_OF_SECOND} to produce a {@code LocalTime}.
* In lenient mode, any excess days are added to the parsed date, or
* made available via {@link java.time.format.DateTimeFormatter#parsedExcessDays()}.
*/
HOUR_OF_DAY("HourOfDay", HOURS, DAYS, ValueRange.of(0, 23), "hour"),
/**
* The clock-hour-of-day.
* <p>
* This counts the hour within the AM/PM, from 1 to 24.
* This is the hour that would be observed on a 24-hour analog wall clock.
* This field has the same meaning for all calendar systems.
* <p>
* When parsing this field it behaves equivalent to the following:
* The value is validated from 1 to 24 in strict mode and from
* 0 to 24 in smart mode. In lenient mode the value is not validated.
* The field is converted to an {@code HOUR_OF_DAY} with the same value,
* unless the value is 24, in which case it is converted to 0.
*/
CLOCK_HOUR_OF_DAY("ClockHourOfDay", HOURS, DAYS, ValueRange.of(1, 24)),
/**
* The am-pm-of-day.
* <p>
* This counts the AM/PM within the day, from 0 (AM) to 1 (PM).
* This field has the same meaning for all calendar systems.
* <p>
* When parsing this field it behaves equivalent to the following:
* The value is validated from 0 to 1 in strict and smart mode.
* In lenient mode the value is not validated. It is combined with
* {@code HOUR_OF_AMPM} to form {@code HOUR_OF_DAY} by multiplying
* the {AMPM_OF_DAY} value by 12.
*/
AMPM_OF_DAY("AmPmOfDay", HALF_DAYS, DAYS, ValueRange.of(0, 1), "dayperiod"),
/**
* The day-of-week, such as Tuesday.
* <p>
* This represents the standard concept of the day of the week.
* In the default ISO calendar system, this has values from Monday (1) to Sunday (7).
* The {@link DayOfWeek} class can be used to interpret the result.
* <p>
* Most non-ISO calendar systems also define a seven day week that aligns with ISO.
* Those calendar systems must also use the same numbering system, from Monday (1) to
* Sunday (7), which allows {@code DayOfWeek} to be used.
* <p>
* Calendar systems that do not have a standard seven day week should implement this field
* if they have a similar concept of named or numbered days within a period similar
* to a week. It is recommended that the numbering starts from 1.
*/
DAY_OF_WEEK("DayOfWeek", DAYS, WEEKS, ValueRange.of(1, 7), "weekday"),
/**
* The aligned day-of-week within a month.
* <p>
* This represents concept of the count of days within the period of a week
* where the weeks are aligned to the start of the month.
* This field is typically used with {@link #ALIGNED_WEEK_OF_MONTH}.
* <p>
* For example, in a calendar systems with a seven day week, the first aligned-week-of-month
* starts on day-of-month 1, the second aligned-week starts on day-of-month 8, and so on.
* Within each of these aligned-weeks, the days are numbered from 1 to 7 and returned
* as the value of this field.
* As such, day-of-month 1 to 7 will have aligned-day-of-week values from 1 to 7.
* And day-of-month 8 to 14 will repeat this with aligned-day-of-week values from 1 to 7.
* <p>
* Calendar systems that do not have a seven day week should typically implement this
* field in the same way, but using the alternate week length.
*/
ALIGNED_DAY_OF_WEEK_IN_MONTH("AlignedDayOfWeekInMonth", DAYS, WEEKS, ValueRange.of(1, 7)),
/**
* The aligned day-of-week within a year.
* <p>
* This represents concept of the count of days within the period of a week
* where the weeks are aligned to the start of the year.
* This field is typically used with {@link #ALIGNED_WEEK_OF_YEAR}.
* <p>
* For example, in a calendar systems with a seven day week, the first aligned-week-of-year
* starts on day-of-year 1, the second aligned-week starts on day-of-year 8, and so on.
* Within each of these aligned-weeks, the days are numbered from 1 to 7 and returned
* as the value of this field.
* As such, day-of-year 1 to 7 will have aligned-day-of-week values from 1 to 7.
* And day-of-year 8 to 14 will repeat this with aligned-day-of-week values from 1 to 7.
* <p>
* Calendar systems that do not have a seven day week should typically implement this
* field in the same way, but using the alternate week length.
*/
ALIGNED_DAY_OF_WEEK_IN_YEAR("AlignedDayOfWeekInYear", DAYS, WEEKS, ValueRange.of(1, 7)),
/**
* The day-of-month.
* <p>
* This represents the concept of the day within the month.
* In the default ISO calendar system, this has values from 1 to 31 in most months.
* April, June, September, November have days from 1 to 30, while February has days
* from 1 to 28, or 29 in a leap year.
* <p>
* Non-ISO calendar systems should implement this field using the most recognized
* day-of-month values for users of the calendar system.
* Normally, this is a count of days from 1 to the length of the month.
*/
DAY_OF_MONTH("DayOfMonth", DAYS, MONTHS, ValueRange.of(1, 28, 31), "day"),
/**
* The day-of-year.
* <p>
* This represents the concept of the day within the year.
* In the default ISO calendar system, this has values from 1 to 365 in standard
* years and 1 to 366 in leap years.
* <p>
* Non-ISO calendar systems should implement this field using the most recognized
* day-of-year values for users of the calendar system.
* Normally, this is a count of days from 1 to the length of the year.
* <p>
* Note that a non-ISO calendar system may have year numbering system that changes
* at a different point to the natural reset in the month numbering. An example
* of this is the Japanese calendar system where a change of era, which resets
* the year number to 1, can happen on any date. The era and year reset also cause
* the day-of-year to be reset to 1, but not the month-of-year or day-of-month.
*/
DAY_OF_YEAR("DayOfYear", DAYS, YEARS, ValueRange.of(1, 365, 366)),
/**
* The epoch-day, based on the Java epoch of 1970-01-01 (ISO).
* <p>
* This field is the sequential count of days where 1970-01-01 (ISO) is zero.
* Note that this uses the <i>local</i> time-line, ignoring offset and time-zone.
* <p>
* This field is strictly defined to have the same meaning in all calendar systems.
* This is necessary to ensure interoperation between calendars.
*/
EPOCH_DAY("EpochDay", DAYS, FOREVER, ValueRange.of((long) (Year.MIN_VALUE * 365.25), (long) (Year.MAX_VALUE * 365.25))),
/**
* The aligned week within a month.
* <p>
* This represents concept of the count of weeks within the period of a month
* where the weeks are aligned to the start of the month.
* This field is typically used with {@link #ALIGNED_DAY_OF_WEEK_IN_MONTH}.
* <p>
* For example, in a calendar systems with a seven day week, the first aligned-week-of-month
* starts on day-of-month 1, the second aligned-week starts on day-of-month 8, and so on.
* Thus, day-of-month values 1 to 7 are in aligned-week 1, while day-of-month values
* 8 to 14 are in aligned-week 2, and so on.
* <p>
* Calendar systems that do not have a seven day week should typically implement this
* field in the same way, but using the alternate week length.
*/
ALIGNED_WEEK_OF_MONTH("AlignedWeekOfMonth", WEEKS, MONTHS, ValueRange.of(1, 4, 5)),
/**
* The aligned week within a year.
* <p>
* This represents concept of the count of weeks within the period of a year
* where the weeks are aligned to the start of the year.
* This field is typically used with {@link #ALIGNED_DAY_OF_WEEK_IN_YEAR}.
* <p>
* For example, in a calendar systems with a seven day week, the first aligned-week-of-year
* starts on day-of-year 1, the second aligned-week starts on day-of-year 8, and so on.
* Thus, day-of-year values 1 to 7 are in aligned-week 1, while day-of-year values
* 8 to 14 are in aligned-week 2, and so on.
* <p>
* Calendar systems that do not have a seven day week should typically implement this
* field in the same way, but using the alternate week length.
*/
ALIGNED_WEEK_OF_YEAR("AlignedWeekOfYear", WEEKS, YEARS, ValueRange.of(1, 53)),
/**
* The month-of-year, such as March.
* <p>
* This represents the concept of the month within the year.
* In the default ISO calendar system, this has values from January (1) to December (12).
* <p>
* Non-ISO calendar systems should implement this field using the most recognized
* month-of-year values for users of the calendar system.
* Normally, this is a count of months starting from 1.
*/
MONTH_OF_YEAR("MonthOfYear", MONTHS, YEARS, ValueRange.of(1, 12), "month"),
/**
* The proleptic-month based, counting months sequentially from year 0.
* <p>
* This field is the sequential count of months where the first month
* in proleptic-year zero has the value zero.
* Later months have increasingly larger values.
* Earlier months have increasingly small values.
* There are no gaps or breaks in the sequence of months.
* Note that this uses the <i>local</i> time-line, ignoring offset and time-zone.
* <p>
* In the default ISO calendar system, June 2012 would have the value
* {@code (2012 * 12 + 6 - 1)}. This field is primarily for internal use.
* <p>
* Non-ISO calendar systems must implement this field as per the definition above.
* It is just a simple zero-based count of elapsed months from the start of proleptic-year 0.
* All calendar systems with a full proleptic-year definition will have a year zero.
* If the calendar system has a minimum year that excludes year zero, then one must
* be extrapolated in order for this method to be defined.
*/
PROLEPTIC_MONTH("ProlepticMonth", MONTHS, FOREVER, ValueRange.of(Year.MIN_VALUE * 12L, Year.MAX_VALUE * 12L + 11)),
/**
* The year within the era.
* <p>
* This represents the concept of the year within the era.
* This field is typically used with {@link #ERA}.
* <p>
* The standard mental model for a date is based on three concepts - year, month and day.
* These map onto the {@code YEAR}, {@code MONTH_OF_YEAR} and {@code DAY_OF_MONTH} fields.
* Note that there is no reference to eras.
* The full model for a date requires four concepts - era, year, month and day. These map onto
* the {@code ERA}, {@code YEAR_OF_ERA}, {@code MONTH_OF_YEAR} and {@code DAY_OF_MONTH} fields.
* Whether this field or {@code YEAR} is used depends on which mental model is being used.
* See {@link ChronoLocalDate} for more discussion on this topic.
* <p>
* In the default ISO calendar system, there are two eras defined, 'BCE' and 'CE'.
* The era 'CE' is the one currently in use and year-of-era runs from 1 to the maximum value.
* The era 'BCE' is the previous era, and the year-of-era runs backwards.
* <p>
* For example, subtracting a year each time yield the following:<br>
* - year-proleptic 2 = 'CE' year-of-era 2<br>
* - year-proleptic 1 = 'CE' year-of-era 1<br>
* - year-proleptic 0 = 'BCE' year-of-era 1<br>
* - year-proleptic -1 = 'BCE' year-of-era 2<br>
* <p>
* Note that the ISO-8601 standard does not actually define eras.
* Note also that the ISO eras do not align with the well-known AD/BC eras due to the
* change between the Julian and Gregorian calendar systems.
* <p>
* Non-ISO calendar systems should implement this field using the most recognized
* year-of-era value for users of the calendar system.
* Since most calendar systems have only two eras, the year-of-era numbering approach
* will typically be the same as that used by the ISO calendar system.
* The year-of-era value should typically always be positive, however this is not required.
*/
YEAR_OF_ERA("YearOfEra", YEARS, FOREVER, ValueRange.of(1, Year.MAX_VALUE, Year.MAX_VALUE + 1)),
/**
* The proleptic year, such as 2012.
* <p>
* This represents the concept of the year, counting sequentially and using negative numbers.
* The proleptic year is not interpreted in terms of the era.
* See {@link #YEAR_OF_ERA} for an example showing the mapping from proleptic year to year-of-era.
* <p>
* The standard mental model for a date is based on three concepts - year, month and day.
* These map onto the {@code YEAR}, {@code MONTH_OF_YEAR} and {@code DAY_OF_MONTH} fields.
* Note that there is no reference to eras.
* The full model for a date requires four concepts - era, year, month and day. These map onto
* the {@code ERA}, {@code YEAR_OF_ERA}, {@code MONTH_OF_YEAR} and {@code DAY_OF_MONTH} fields.
* Whether this field or {@code YEAR_OF_ERA} is used depends on which mental model is being used.
* See {@link ChronoLocalDate} for more discussion on this topic.
* <p>
* Non-ISO calendar systems should implement this field as follows.
* If the calendar system has only two eras, before and after a fixed date, then the
* proleptic-year value must be the same as the year-of-era value for the later era,
* and increasingly negative for the earlier era.
* If the calendar system has more than two eras, then the proleptic-year value may be
* defined with any appropriate value, although defining it to be the same as ISO may be
* the best option.
*/
YEAR("Year", YEARS, FOREVER, ValueRange.of(Year.MIN_VALUE, Year.MAX_VALUE), "year"),
/**
* The era.
* <p>
* This represents the concept of the era, which is the largest division of the time-line.
* This field is typically used with {@link #YEAR_OF_ERA}.
* <p>
* In the default ISO calendar system, there are two eras defined, 'BCE' and 'CE'.
* The era 'CE' is the one currently in use and year-of-era runs from 1 to the maximum value.
* The era 'BCE' is the previous era, and the year-of-era runs backwards.
* See {@link #YEAR_OF_ERA} for a full example.
* <p>
* Non-ISO calendar systems should implement this field to define eras.
* The value of the era that was active on 1970-01-01 (ISO) must be assigned the value 1.
* Earlier eras must have sequentially smaller values.
* Later eras must have sequentially larger values,
*/
ERA("Era", ERAS, FOREVER, ValueRange.of(0, 1), "era"),
/**
* The instant epoch-seconds.
* <p>
* This represents the concept of the sequential count of seconds where
* 1970-01-01T00:00Z (ISO) is zero.
* This field may be used with {@link #NANO_OF_SECOND} to represent the fraction of the second.
* <p>
* An {@link Instant} represents an instantaneous point on the time-line.
* On their own, an instant has insufficient information to allow a local date-time to be obtained.
* Only when paired with an offset or time-zone can the local date or time be calculated.
* <p>
* This field is strictly defined to have the same meaning in all calendar systems.
* This is necessary to ensure interoperation between calendars.
*/
INSTANT_SECONDS("InstantSeconds", SECONDS, FOREVER, ValueRange.of(Long.MIN_VALUE, Long.MAX_VALUE)),
/**
* The offset from UTC/Greenwich.
* <p>
* This represents the concept of the offset in seconds of local time from UTC/Greenwich.
* <p>
* A {@link ZoneOffset} represents the period of time that local time differs from UTC/Greenwich.
* This is usually a fixed number of hours and minutes.
* It is equivalent to the {@link ZoneOffset#getTotalSeconds() total amount} of the offset in seconds.
* For example, during the winter Paris has an offset of {@code +01:00}, which is 3600 seconds.
* <p>
* This field is strictly defined to have the same meaning in all calendar systems.
* This is necessary to ensure interoperation between calendars.
*/
OFFSET_SECONDS("OffsetSeconds", SECONDS, FOREVER, ValueRange.of(-18 * 3600, 18 * 3600));
private final String name;
private final TemporalUnit baseUnit;
private final TemporalUnit rangeUnit;
private final ValueRange range;
private final String displayNameKey;
private ChronoField(String name, TemporalUnit baseUnit, TemporalUnit rangeUnit, ValueRange range) {
this.name = name;
this.baseUnit = baseUnit;
this.rangeUnit = rangeUnit;
this.range = range;
this.displayNameKey = null;
}
private ChronoField(String name, TemporalUnit baseUnit, TemporalUnit rangeUnit,
ValueRange range, String displayNameKey) {
this.name = name;
this.baseUnit = baseUnit;
this.rangeUnit = rangeUnit;
this.range = range;
this.displayNameKey = displayNameKey;
}
@Override
public String getDisplayName(Locale locale) {
Objects.requireNonNull(locale, "locale");
if (displayNameKey == null) {
return name;
}
LocaleResources lr = LocaleProviderAdapter.getResourceBundleBased()
.getLocaleResources(locale);
ResourceBundle rb = lr.getJavaTimeFormatData();
String key = "field." + displayNameKey;
return rb.containsKey(key) ? rb.getString(key) : name;
}
@Override
public TemporalUnit getBaseUnit() {
return baseUnit;
}
@Override
public TemporalUnit getRangeUnit() {
return rangeUnit;
}
/**
* Gets the range of valid values for the field.
* <p>
* All fields can be expressed as a {@code long} integer.
* This method returns an object that describes the valid range for that value.
* <p>
* This method returns the range of the field in the ISO-8601 calendar system.
* This range may be incorrect for other calendar systems.
* Use {@link Chronology#range(ChronoField)} to access the correct range
* for a different calendar system.
* <p>
* Note that the result only describes the minimum and maximum valid values
* and it is important not to read too much into them. For example, there
* could be values within the range that are invalid for the field.
*
* @return the range of valid values for the field, not null
*/
@Override
public ValueRange range() {
return range;
}
//-----------------------------------------------------------------------
/**
* Checks if this field represents a component of a date.
* <p>
* Fields from day-of-week to era are date-based.
*
* @return true if it is a component of a date
*/
@Override
public boolean isDateBased() {
return ordinal() >= DAY_OF_WEEK.ordinal() && ordinal() <= ERA.ordinal();
}
/**
* Checks if this field represents a component of a time.
* <p>
* Fields from nano-of-second to am-pm-of-day are time-based.
*
* @return true if it is a component of a time
*/
@Override
public boolean isTimeBased() {
return ordinal() < DAY_OF_WEEK.ordinal();
}
//-----------------------------------------------------------------------
/**
* Checks that the specified value is valid for this field.
* <p>
* This validates that the value is within the outer range of valid values
* returned by {@link #range()}.
* <p>
* This method checks against the range of the field in the ISO-8601 calendar system.
* This range may be incorrect for other calendar systems.
* Use {@link Chronology#range(ChronoField)} to access the correct range
* for a different calendar system.
*
* @param value the value to check
* @return the value that was passed in
*/
public long checkValidValue(long value) {
return range().checkValidValue(value, this);
}
/**
* Checks that the specified value is valid and fits in an {@code int}.
* <p>
* This validates that the value is within the outer range of valid values
* returned by {@link #range()}.
* It also checks that all valid values are within the bounds of an {@code int}.
* <p>
* This method checks against the range of the field in the ISO-8601 calendar system.
* This range may be incorrect for other calendar systems.
* Use {@link Chronology#range(ChronoField)} to access the correct range
* for a different calendar system.
*
* @param value the value to check
* @return the value that was passed in
*/
public int checkValidIntValue(long value) {
return range().checkValidIntValue(value, this);
}
//-----------------------------------------------------------------------
@Override
public boolean isSupportedBy(TemporalAccessor temporal) {
return temporal.isSupported(this);
}
@Override
public ValueRange rangeRefinedBy(TemporalAccessor temporal) {
return temporal.range(this);
}
@Override
public long getFrom(TemporalAccessor temporal) {
return temporal.getLong(this);
}
@SuppressWarnings("unchecked")
@Override
public <R extends Temporal> R adjustInto(R temporal, long newValue) {
return (R) temporal.with(this, newValue);
}
//-----------------------------------------------------------------------
@Override
public String toString() {
return name;
}
}
|
apache/flink | 36,427 | flink-core/src/test/java/org/apache/flink/api/common/io/FileInputFormatTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.api.common.io;
import org.apache.flink.api.common.io.FileInputFormat.FileBaseStatistics;
import org.apache.flink.api.common.io.statistics.BaseStatistics;
import org.apache.flink.configuration.ConfigConstants;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.core.fs.FSDataInputStream;
import org.apache.flink.core.fs.FileInputSplit;
import org.apache.flink.core.fs.FileStatus;
import org.apache.flink.core.fs.FileSystem;
import org.apache.flink.core.fs.Path;
import org.apache.flink.testutils.TestFileUtils;
import org.apache.flink.testutils.junit.utils.TempDirUtils;
import org.apache.flink.types.IntValue;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.io.TempDir;
import java.io.BufferedOutputStream;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.net.URI;
import java.nio.file.Files;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
import java.util.Set;
import static org.assertj.core.api.Assertions.assertThat;
import static org.assertj.core.api.Assertions.assertThatThrownBy;
import static org.assertj.core.api.Fail.fail;
/** Tests for the FileInputFormat */
class FileInputFormatTest {
@TempDir private java.nio.file.Path temporaryFolder;
@Test
void testGetPathWithoutSettingFirst() {
final DummyFileInputFormat format = new DummyFileInputFormat();
assertThat(format.getFilePaths()).as("Path should be null.").isEmpty();
}
@Test
void testGetPathsWithoutSettingFirst() {
final DummyFileInputFormat format = new DummyFileInputFormat();
Path[] paths = format.getFilePaths();
assertThat(paths).as("Paths should not be null.").isNotNull();
assertThat(paths).as("Paths should be empty.").isEmpty();
}
@Test
void testToStringWithoutPathSet() {
final DummyFileInputFormat format = new DummyFileInputFormat();
assertThat(format.toString())
.as("The toString() should be correct.")
.isEqualTo("File Input (unknown file)");
}
@Test
void testSetPathsNull() {
assertThatThrownBy(() -> new MultiDummyFileInputFormat().setFilePaths((String) null))
.isInstanceOf(IllegalArgumentException.class);
}
@Test
void testSetPathNullString() {
assertThatThrownBy(() -> new DummyFileInputFormat().setFilePath((String) null))
.isInstanceOf(IllegalArgumentException.class);
}
@Test
void testSetPathNullPath() {
assertThatThrownBy(() -> new DummyFileInputFormat().setFilePath((Path) null))
.isInstanceOf(IllegalArgumentException.class);
}
@Test
void testSetPathsOnePathNull() {
assertThatThrownBy(
() ->
new MultiDummyFileInputFormat()
.setFilePaths("/an/imaginary/path", null))
.isInstanceOf(IllegalArgumentException.class);
}
@Test
void testSetPathsEmptyArray() {
assertThatThrownBy(() -> new MultiDummyFileInputFormat().setFilePaths(new String[0]))
.isInstanceOf(IllegalArgumentException.class);
}
@Test
void testSetPath() {
final DummyFileInputFormat format = new DummyFileInputFormat();
format.setFilePath("/some/imaginary/path");
assertThat("/some/imaginary/path").isEqualTo(format.getFilePaths()[0].toString());
}
@Test
void testSetPathOnMulti() {
final MultiDummyFileInputFormat format = new MultiDummyFileInputFormat();
final String myPath = "/an/imaginary/path";
format.setFilePath(myPath);
final Path[] filePaths = format.getFilePaths();
assertThat(filePaths).hasSize(1);
assertThat(filePaths[0].toUri().toString()).isEqualTo(myPath);
// ensure backwards compatibility
assertThat(format.getFilePaths()[0].toUri().toString()).isEqualTo(myPath);
}
@Test
void testSetPathsSingleWithMulti() {
final MultiDummyFileInputFormat format = new MultiDummyFileInputFormat();
final String myPath = "/an/imaginary/path";
format.setFilePaths(myPath);
final Path[] filePaths = format.getFilePaths();
assertThat(filePaths).hasSize(1);
assertThat(filePaths[0].toUri().toString()).isEqualTo(myPath);
// ensure backwards compatibility
assertThat(format.getFilePaths()[0].toUri().toString()).isEqualTo(myPath);
}
@Test
void testSetPathsMulti() {
final MultiDummyFileInputFormat format = new MultiDummyFileInputFormat();
final String myPath = "/an/imaginary/path";
final String myPath2 = "/an/imaginary/path2";
format.setFilePaths(myPath, myPath2);
final Path[] filePaths = format.getFilePaths();
assertThat(filePaths).hasSize(2);
assertThat(filePaths[0].toUri().toString()).isEqualTo(myPath);
assertThat(filePaths[1].toUri().toString()).isEqualTo(myPath2);
}
@Test
void testSetFileViaConfiguration() {
final DummyFileInputFormat format = new DummyFileInputFormat();
final String filePath = "file:///some/none/existing/directory/";
Configuration conf = new Configuration();
conf.setString("input.file.path", filePath);
format.configure(conf);
assertThat(format.getFilePaths()[0]).isEqualTo(new Path(filePath));
}
@Test
void testSetFileViaConfigurationEmptyPath() {
assertThatThrownBy(
() -> {
final DummyFileInputFormat format = new DummyFileInputFormat();
final String filePath = null;
Configuration conf = new Configuration();
conf.setString("input.file.path", filePath);
format.configure(conf);
})
.isInstanceOf(RuntimeException.class);
}
// ------------------------------------------------------------------------
// Input Splits
// ------------------------------------------------------------------------
@Test
void testCreateInputSplitSingleFile() throws IOException {
String tempFile = TestFileUtils.createTempFile("Hello World");
FileInputFormat<IntValue> fif = new DummyFileInputFormat();
fif.setFilePath(tempFile);
fif.configure(new Configuration());
FileInputSplit[] splits = fif.createInputSplits(2);
assertThat(splits).hasSize(2);
assertThat(splits[0].getPath().toString()).isEqualTo(tempFile);
assertThat(splits[1].getPath().toString()).isEqualTo(tempFile);
}
@Test
void testCreateInputSplitMultiFiles() throws IOException {
String tempFile1 = TestFileUtils.createTempFile(21);
String tempFile2 = TestFileUtils.createTempFile(22);
String tempFile3 = TestFileUtils.createTempFile(23);
FileInputFormat<IntValue> fif = new MultiDummyFileInputFormat();
fif.setFilePaths(tempFile1, tempFile2, tempFile3);
fif.configure(new Configuration());
FileInputSplit[] splits = fif.createInputSplits(3);
int numSplitsFile1 = 0;
int numSplitsFile2 = 0;
int numSplitsFile3 = 0;
assertThat(splits).hasSize(3);
for (FileInputSplit fis : splits) {
assertThat(fis.getStart()).isZero();
if (fis.getPath().toString().equals(tempFile1)) {
numSplitsFile1++;
assertThat(fis.getLength()).isEqualTo(21);
} else if (fis.getPath().toString().equals(tempFile2)) {
numSplitsFile2++;
assertThat(fis.getLength()).isEqualTo(22);
} else if (fis.getPath().toString().equals(tempFile3)) {
numSplitsFile3++;
assertThat(fis.getLength()).isEqualTo(23);
} else {
fail("Got split for unknown file.");
}
}
assertThat(numSplitsFile1).isOne();
assertThat(numSplitsFile2).isOne();
assertThat(numSplitsFile3).isOne();
}
// ------------------------------------------------------------------------
// Statistics
// ------------------------------------------------------------------------
@Test
void testGetStatisticsNonExistingFile() throws IOException {
final DummyFileInputFormat format = new DummyFileInputFormat();
format.setFilePath("file:///some/none/existing/directory/");
format.configure(new Configuration());
BaseStatistics stats = format.getStatistics(null);
assertThat(stats).as("The file statistics should be null.").isNull();
}
@Test
void testGetStatisticsOneFileNoCachedVersion() throws IOException {
final long SIZE = 1024 * 500;
String tempFile = TestFileUtils.createTempFile(SIZE);
final DummyFileInputFormat format = new DummyFileInputFormat();
format.setFilePath(tempFile);
format.configure(new Configuration());
BaseStatistics stats = format.getStatistics(null);
assertThat(stats.getTotalInputSize())
.as("The file size from the statistics is wrong.")
.isEqualTo(SIZE);
}
@Test
void testGetStatisticsMultipleFilesNoCachedVersion() throws IOException {
final long SIZE1 = 2077;
final long SIZE2 = 31909;
final long SIZE3 = 10;
final long TOTAL = SIZE1 + SIZE2 + SIZE3;
String tempDir =
TestFileUtils.createTempFileDir(
TempDirUtils.newFolder(temporaryFolder), SIZE1, SIZE2, SIZE3);
final DummyFileInputFormat format = new DummyFileInputFormat();
format.setFilePath(tempDir);
format.configure(new Configuration());
BaseStatistics stats = format.getStatistics(null);
assertThat(stats.getTotalInputSize())
.as("The file size from the statistics is wrong.")
.isEqualTo(TOTAL);
}
@Test
void testGetStatisticsOneFileWithCachedVersion() throws IOException {
final long SIZE = 50873;
final long FAKE_SIZE = 10065;
String tempFile = TestFileUtils.createTempFile(SIZE);
DummyFileInputFormat format = new DummyFileInputFormat();
format.setFilePath(tempFile);
format.configure(new Configuration());
FileBaseStatistics stats = format.getStatistics(null);
assertThat(stats.getTotalInputSize())
.as("The file size from the statistics is wrong.")
.isEqualTo(SIZE);
format = new DummyFileInputFormat();
format.setFilePath(tempFile);
format.configure(new Configuration());
FileBaseStatistics newStats = format.getStatistics(stats);
assertThat(stats).as("Statistics object was changed").isSameAs(newStats);
// insert fake stats with the correct modification time. the call should return the fake
// stats
format = new DummyFileInputFormat();
format.setFilePath(tempFile);
format.configure(new Configuration());
FileBaseStatistics fakeStats =
new FileBaseStatistics(
stats.getLastModificationTime(),
FAKE_SIZE,
BaseStatistics.AVG_RECORD_BYTES_UNKNOWN);
BaseStatistics latest = format.getStatistics(fakeStats);
assertThat(latest.getTotalInputSize())
.as("The file size from the statistics is wrong.")
.isEqualTo(FAKE_SIZE);
// insert fake stats with the expired modification time. the call should return new
// accurate stats
format = new DummyFileInputFormat();
format.setFilePath(tempFile);
format.configure(new Configuration());
FileBaseStatistics outDatedFakeStats =
new FileBaseStatistics(
stats.getLastModificationTime() - 1,
FAKE_SIZE,
BaseStatistics.AVG_RECORD_BYTES_UNKNOWN);
BaseStatistics reGathered = format.getStatistics(outDatedFakeStats);
assertThat(reGathered.getTotalInputSize())
.as("The file size from the statistics is wrong.")
.isEqualTo(SIZE);
}
@Test
void testGetStatisticsMultipleFilesWithCachedVersion() throws IOException {
FileSystem fs = FileSystem.getLocalFileSystem();
final long SIZE1 = 2077;
final long SIZE2 = 31909;
final long SIZE3 = 10;
final long TOTAL = SIZE1 + SIZE2 + SIZE3;
final long FAKE_SIZE = 10065;
File tempDirFile = TempDirUtils.newFolder(temporaryFolder);
String tempDir = tempDirFile.getAbsolutePath();
String f1 = TestFileUtils.createTempFileInDirectory(tempDir, SIZE1);
long modTime1 = fs.getFileStatus(new Path(f1)).getModificationTime();
String f2 = TestFileUtils.createTempFileInDirectory(tempDir, SIZE2);
long modTime2 = fs.getFileStatus(new Path(f2)).getModificationTime();
String f3 = TestFileUtils.createTempFileInDirectory(tempDir, SIZE3);
long modTime3 = fs.getFileStatus(new Path(f3)).getModificationTime();
DummyFileInputFormat format = new DummyFileInputFormat();
format.setFilePath(tempDir);
format.configure(new Configuration());
FileBaseStatistics stats = format.getStatistics(null);
assertThat(stats.getTotalInputSize())
.as("The file size from the statistics is wrong.")
.isEqualTo(TOTAL);
format = new DummyFileInputFormat();
format.setFilePath(tempDir);
format.configure(new Configuration());
FileBaseStatistics newStats = format.getStatistics(stats);
assertThat(stats).as("Statistics object was changed").isSameAs(newStats);
// insert fake stats with the correct modification time. the call should return the fake
// stats
format = new DummyFileInputFormat();
format.setFilePath(tempDir);
format.configure(new Configuration());
FileBaseStatistics fakeStats =
new FileBaseStatistics(
stats.getLastModificationTime(),
FAKE_SIZE,
BaseStatistics.AVG_RECORD_BYTES_UNKNOWN);
BaseStatistics latest = format.getStatistics(fakeStats);
assertThat(latest.getTotalInputSize())
.as("The file size from the statistics is wrong.")
.isEqualTo(FAKE_SIZE);
// insert fake stats with the correct modification time. the call should return the fake
// stats
format = new DummyFileInputFormat();
format.setFilePath(tempDir);
format.configure(new Configuration());
FileBaseStatistics outDatedFakeStats =
new FileBaseStatistics(
Math.min(Math.min(modTime1, modTime2), modTime3) - 1,
FAKE_SIZE,
BaseStatistics.AVG_RECORD_BYTES_UNKNOWN);
BaseStatistics reGathered = format.getStatistics(outDatedFakeStats);
assertThat(reGathered.getTotalInputSize())
.as("The file size from the statistics is wrong.")
.isEqualTo(TOTAL);
}
// -- Multiple Files -- //
@Test
void testGetStatisticsMultipleNonExistingFile() throws IOException {
final MultiDummyFileInputFormat format = new MultiDummyFileInputFormat();
format.setFilePaths(
"file:///some/none/existing/directory/", "file:///another/non/existing/directory/");
format.configure(new Configuration());
BaseStatistics stats = format.getStatistics(null);
assertThat(stats).as("The file statistics should be null.").isNull();
}
@Test
void testGetStatisticsMultipleOneFileNoCachedVersion() throws IOException {
final long size1 = 1024 * 500;
String tempFile = TestFileUtils.createTempFile(size1);
final long size2 = 1024 * 505;
String tempFile2 = TestFileUtils.createTempFile(size2);
final long totalSize = size1 + size2;
final MultiDummyFileInputFormat format = new MultiDummyFileInputFormat();
format.setFilePaths(tempFile, tempFile2);
format.configure(new Configuration());
BaseStatistics stats = format.getStatistics(null);
assertThat(stats.getTotalInputSize())
.as("The file size from the statistics is wrong.")
.isEqualTo(totalSize);
}
@Test
void testGetStatisticsMultipleFilesMultiplePathsNoCachedVersion() throws IOException {
final long size1 = 2077;
final long size2 = 31909;
final long size3 = 10;
final long totalSize123 = size1 + size2 + size3;
String tempDir =
TestFileUtils.createTempFileDir(
TempDirUtils.newFolder(temporaryFolder), size1, size2, size3);
final long size4 = 2051;
final long size5 = 31902;
final long size6 = 15;
final long totalSize456 = size4 + size5 + size6;
String tempDir2 =
TestFileUtils.createTempFileDir(
TempDirUtils.newFolder(temporaryFolder), size4, size5, size6);
final MultiDummyFileInputFormat format = new MultiDummyFileInputFormat();
format.setFilePaths(tempDir, tempDir2);
format.configure(new Configuration());
BaseStatistics stats = format.getStatistics(null);
assertThat(stats.getTotalInputSize())
.as("The file size from the statistics is wrong.")
.isEqualTo(totalSize123 + totalSize456);
}
@Test
void testGetStatisticsMultipleOneFileWithCachedVersion() throws IOException {
FileSystem fs = FileSystem.getLocalFileSystem();
final long size1 = 50873;
final long fakeSize = 10065;
String tempFile1 = TestFileUtils.createTempFile(size1);
final long lastModTime1 = fs.getFileStatus(new Path(tempFile1)).getModificationTime();
final long size2 = 52573;
String tempFile2 = TestFileUtils.createTempFile(size2);
final long lastModTime2 = fs.getFileStatus(new Path(tempFile2)).getModificationTime();
final long sizeTotal = size1 + size2;
MultiDummyFileInputFormat format = new MultiDummyFileInputFormat();
format.setFilePaths(tempFile1, tempFile2);
format.configure(new Configuration());
FileBaseStatistics stats = format.getStatistics(null);
assertThat(stats.getTotalInputSize())
.as("The file size from the statistics is wrong.")
.isEqualTo(sizeTotal);
format = new MultiDummyFileInputFormat();
format.setFilePath(tempFile1);
format.configure(new Configuration());
FileBaseStatistics newStats = format.getStatistics(stats);
assertThat(stats).as("Statistics object was changed").isSameAs(newStats);
// insert fake stats with the correct modification time. the call should return the fake
// stats
format = new MultiDummyFileInputFormat();
format.setFilePath(tempFile1);
format.configure(new Configuration());
FileBaseStatistics fakeStats =
new FileBaseStatistics(
stats.getLastModificationTime(),
fakeSize,
BaseStatistics.AVG_RECORD_BYTES_UNKNOWN);
BaseStatistics latest = format.getStatistics(fakeStats);
assertThat(latest.getTotalInputSize())
.as("The file size from the statistics is wrong.")
.isEqualTo(fakeSize);
// insert fake stats with the expired modification time. the call should return new accurate
// stats
format = new MultiDummyFileInputFormat();
format.setFilePaths(tempFile1, tempFile2);
format.configure(new Configuration());
FileBaseStatistics outDatedFakeStats =
new FileBaseStatistics(
Math.min(lastModTime1, lastModTime2) - 1,
fakeSize,
BaseStatistics.AVG_RECORD_BYTES_UNKNOWN);
BaseStatistics reGathered = format.getStatistics(outDatedFakeStats);
assertThat(reGathered.getTotalInputSize())
.as("The file size from the statistics is wrong.")
.isEqualTo(sizeTotal);
}
// ------------------------------------------------------------------------
// Unsplittable input files
// ------------------------------------------------------------------------
// ---- Tests for compressed files ---------
/**
* Create directory with compressed files and see if it creates a split for each file. Each
* split has to start from the beginning.
*/
@Test
void testFileInputFormatWithCompression() throws IOException {
String tempFile =
TestFileUtils.createTempFileDirForProvidedFormats(
TempDirUtils.newFolder(temporaryFolder),
FileInputFormat.getSupportedCompressionFormats());
final DummyFileInputFormat format = new DummyFileInputFormat();
format.setFilePath(tempFile);
format.configure(new Configuration());
FileInputSplit[] splits = format.createInputSplits(2);
final Set<String> supportedCompressionFormats =
FileInputFormat.getSupportedCompressionFormats();
assertThat(splits).hasSameSizeAs(supportedCompressionFormats);
for (FileInputSplit split : splits) {
assertThat(split.getLength())
.isEqualTo(
FileInputFormat.READ_WHOLE_SPLIT_FLAG); // unsplittable compressed files
// have this size as a
// flag for "read whole file"
assertThat(split.getStart()).isZero(); // always read from the beginning.
}
// test if this also works for "mixed" directories
TestFileUtils.createTempFileInDirectory(
tempFile.replace("file:", ""),
"this creates a test file with a random extension (at least not .deflate)");
final DummyFileInputFormat formatMixed = new DummyFileInputFormat();
formatMixed.setFilePath(tempFile);
formatMixed.configure(new Configuration());
FileInputSplit[] splitsMixed = formatMixed.createInputSplits(2);
assertThat(splitsMixed).hasSize(supportedCompressionFormats.size() + 1);
for (FileInputSplit split : splitsMixed) {
final String extension =
FileInputFormat.extractFileExtension(split.getPath().getName());
if (supportedCompressionFormats.contains(extension)) {
assertThat(split.getLength())
.isEqualTo(
FileInputFormat.READ_WHOLE_SPLIT_FLAG); // unsplittable compressed
// files have this size as a
// flag for "read whole file"
assertThat(split.getStart()).isZero(); // always read from the beginning.
} else {
assertThat(split.getStart()).isEqualTo(0L);
assertThat(split.getLength() > 0).as("split size not correct").isTrue();
}
}
}
/**
* Some FileInputFormats don't use FileInputFormat#createSplits (that would detect that the file
* is non-splittable and deal with reading boundaries correctly), they all create splits
* manually from FileSourceSplit. If input files are compressed, ensure that the size of the
* split is not the compressed file size and that the compression decorator is called.
*/
@Test
void testFileInputFormatWithCompressionFromFileSource() throws IOException {
String tempFile =
TestFileUtils.createTempFileDirForProvidedFormats(
TempDirUtils.newFolder(temporaryFolder),
FileInputFormat.getSupportedCompressionFormats());
DummyFileInputFormat format = new DummyFileInputFormat();
format.setFilePath(tempFile);
format.configure(new Configuration());
// manually create a FileInputSplit per file as FileSource would do
// see org.apache.flink.connector.file.table.DeserializationSchemaAdapter.Reader()
List<FileInputSplit> splits = manuallyCreateSplits(tempFile);
final Set<String> supportedCompressionFormats =
FileInputFormat.getSupportedCompressionFormats();
// one file per compression format, one split per file
assertThat(splits).hasSameSizeAs(supportedCompressionFormats);
for (FileInputSplit split : splits) {
assertThat(split.getStart()).isZero(); // always read from the beginning.
format.open(split);
assertThat(format.compressedRead).isTrue();
assertThat(format.getSplitLength())
.isEqualTo(
FileInputFormat.READ_WHOLE_SPLIT_FLAG); // unsplittable compressed files
// have this size
// as flag for "read whole file"
}
}
/**
* Simulates splits created by org.apache.flink.connector.file.src.FileSource (one split per
* file with length = size of the file). For compressed file, the input format should override
* it when it detects that the file is unsplittable in {@link
* FileInputFormat#open(FileInputSplit)}.
*/
private List<FileInputSplit> manuallyCreateSplits(String pathString) throws IOException {
List<FileInputSplit> splits = new ArrayList<>();
final Path path = new Path(pathString);
final FileSystem fs = path.getFileSystem();
for (FileStatus file : fs.listStatus(path)) {
// split created like in DeserializationSchemaAdapter.Reader()
splits.add(new FileInputSplit(0, file.getPath(), 0, file.getLen(), null));
}
return splits;
}
// ------------------------------------------------------------------------
// Ignored Files
// ------------------------------------------------------------------------
@Test
void testIgnoredUnderscoreFiles() throws IOException {
final String contents = "CONTENTS";
// create some accepted, some ignored files
File child1 = TempDirUtils.newFile(temporaryFolder, "dataFile1.txt");
File child2 = TempDirUtils.newFile(temporaryFolder, "another_file.bin");
File luigiFile = TempDirUtils.newFile(temporaryFolder, "_luigi");
File success = TempDirUtils.newFile(temporaryFolder, "_SUCCESS");
createTempFiles(
contents.getBytes(ConfigConstants.DEFAULT_CHARSET),
child1,
child2,
luigiFile,
success);
// test that only the valid files are accepted
final DummyFileInputFormat format = new DummyFileInputFormat();
format.setFilePath(temporaryFolder.toFile().getPath());
format.configure(new Configuration());
FileInputSplit[] splits = format.createInputSplits(1);
assertThat(splits).hasSize(2);
final URI uri1 = splits[0].getPath().toUri();
final URI uri2 = splits[1].getPath().toUri();
final URI childUri1 = child1.toURI();
final URI childUri2 = child2.toURI();
assertThat(
(uri1.equals(childUri1) && uri2.equals(childUri2))
|| (uri1.equals(childUri2) && uri2.equals(childUri1)))
.isTrue();
}
@Test
void testExcludeFiles() throws IOException {
final String contents = "CONTENTS";
// create some accepted, some ignored files
File child1 = TempDirUtils.newFile(temporaryFolder, "dataFile1.txt");
File child2 = TempDirUtils.newFile(temporaryFolder, "another_file.bin");
File[] files = {child1, child2};
createTempFiles(contents.getBytes(ConfigConstants.DEFAULT_CHARSET), files);
// test that only the valid files are accepted
Configuration configuration = new Configuration();
final DummyFileInputFormat format = new DummyFileInputFormat();
format.setFilePath(temporaryFolder.toFile().getPath());
format.configure(configuration);
format.setFilesFilter(
new GlobFilePathFilter(
Collections.singletonList("**"),
Collections.singletonList("**/another_file.bin")));
FileInputSplit[] splits = format.createInputSplits(1);
assertThat(splits).hasSize(1);
final URI uri1 = splits[0].getPath().toUri();
final URI childUri1 = child1.toURI();
assertThat(childUri1).isEqualTo(uri1);
}
@Test
void testReadMultiplePatterns() throws Exception {
final String contents = "CONTENTS";
// create some accepted, some ignored files
File child1 = TempDirUtils.newFile(temporaryFolder, "dataFile1.txt");
File child2 = TempDirUtils.newFile(temporaryFolder, "another_file.bin");
createTempFiles(contents.getBytes(ConfigConstants.DEFAULT_CHARSET), child1, child2);
// test that only the valid files are accepted
Configuration configuration = new Configuration();
final DummyFileInputFormat format = new DummyFileInputFormat();
format.setFilePath(temporaryFolder.toFile().getPath());
format.configure(configuration);
format.setFilesFilter(
new GlobFilePathFilter(
Collections.singletonList("**"),
Arrays.asList("**/another_file.bin", "**/dataFile1.txt")));
FileInputSplit[] splits = format.createInputSplits(1);
assertThat(splits.length).isZero();
}
@Test
void testGetStatsIgnoredUnderscoreFiles() throws IOException {
final int SIZE = 2048;
final long TOTAL = 2 * SIZE;
// create two accepted and two ignored files
File child1 = TempDirUtils.newFile(temporaryFolder, "dataFile1.txt");
File child2 = TempDirUtils.newFile(temporaryFolder, "another_file.bin");
File luigiFile = TempDirUtils.newFile(temporaryFolder, "_luigi");
File success = TempDirUtils.newFile(temporaryFolder, "_SUCCESS");
createTempFiles(new byte[SIZE], child1, child2, luigiFile, success);
final DummyFileInputFormat format = new DummyFileInputFormat();
format.setFilePath(temporaryFolder.toFile().getPath());
format.configure(new Configuration());
// check that only valid files are used for statistics computation
BaseStatistics stats = format.getStatistics(null);
assertThat(stats.getTotalInputSize()).isEqualTo(TOTAL);
}
// ------------------------------------------------------------------------
// Stream Decoration
// ------------------------------------------------------------------------
@Test
void testDecorateInputStream() throws IOException {
// create temporary file with 3 blocks
final File tempFile = File.createTempFile("input-stream-decoration-test", "tmp");
tempFile.deleteOnExit();
final int blockSize = 8;
final int numBlocks = 3;
FileOutputStream fileOutputStream = new FileOutputStream(tempFile);
for (int i = 0; i < blockSize * numBlocks; i++) {
fileOutputStream.write(new byte[] {(byte) i});
}
fileOutputStream.close();
final Configuration config = new Configuration();
final FileInputFormat<byte[]> inputFormat = new MyDecoratedInputFormat();
inputFormat.setFilePath(tempFile.toURI().toString());
inputFormat.configure(config);
inputFormat.openInputFormat();
FileInputSplit[] inputSplits = inputFormat.createInputSplits(3);
byte[] bytes = null;
byte prev = 0;
for (FileInputSplit inputSplit : inputSplits) {
inputFormat.open(inputSplit);
while (!inputFormat.reachedEnd()) {
if ((bytes = inputFormat.nextRecord(bytes)) != null) {
assertThat(bytes).isEqualTo(new byte[] {--prev});
}
}
}
inputFormat.closeInputFormat();
}
// ------------------------------------------------------------------------
private void createTempFiles(byte[] contents, File... files) throws IOException {
for (File child : files) {
child.deleteOnExit();
try (BufferedOutputStream out =
new BufferedOutputStream(Files.newOutputStream(child.toPath()))) {
out.write(contents);
}
}
}
private static class DummyFileInputFormat extends FileInputFormat<IntValue> {
private static final long serialVersionUID = 1L;
private boolean compressedRead = false;
@Override
public boolean reachedEnd() {
return true;
}
@Override
public IntValue nextRecord(IntValue record) {
return null;
}
@Override
public void open(FileInputSplit split) throws IOException {
compressedRead = false;
super.open(split);
}
@Override
protected FSDataInputStream decorateInputStream(
FSDataInputStream inputStream, FileInputSplit fileSplit) {
compressedRead =
getInflaterInputStreamFactory(
extractFileExtension(fileSplit.getPath().getName()))
!= null;
return inputStream;
}
}
private class MultiDummyFileInputFormat extends DummyFileInputFormat {
private static final long serialVersionUID = 1L;
}
private static final class MyDecoratedInputFormat extends FileInputFormat<byte[]> {
private static final long serialVersionUID = 1L;
@Override
public boolean reachedEnd() throws IOException {
return this.stream.getPos() >= this.splitStart + this.splitLength;
}
@Override
public byte[] nextRecord(byte[] reuse) throws IOException {
int read = this.stream.read();
if (read == -1) throw new IllegalStateException();
return new byte[] {(byte) read};
}
@Override
protected FSDataInputStream decorateInputStream(
FSDataInputStream inputStream, FileInputSplit fileSplit) throws Throwable {
inputStream = super.decorateInputStream(inputStream, fileSplit);
return new InputStreamFSInputWrapper(new InvertedInputStream(inputStream));
}
}
private static final class InvertedInputStream extends InputStream {
private final InputStream originalStream;
private InvertedInputStream(InputStream originalStream) {
this.originalStream = originalStream;
}
@Override
public int read() throws IOException {
int read = this.originalStream.read();
return read == -1 ? -1 : (~read & 0xFF);
}
@Override
public int available() throws IOException {
return this.originalStream.available();
}
}
}
|
apache/jackrabbit-oak | 36,388 | oak-store-document/src/test/java/org/apache/jackrabbit/oak/plugins/document/ClusterNodeInfoTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.jackrabbit.oak.plugins.document;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.concurrent.Callable;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.function.Function;
import java.util.stream.Collectors;
import org.apache.jackrabbit.oak.plugins.document.memory.MemoryDocumentStore;
import org.apache.jackrabbit.oak.plugins.document.spi.lease.LeaseFailureHandler;
import org.apache.jackrabbit.oak.stats.Clock;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.junit.runners.Parameterized;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.Matchers.containsInAnyOrder;
import static org.hamcrest.Matchers.containsString;
import static org.hamcrest.Matchers.lessThan;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertNotEquals;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
@RunWith(Parameterized.class)
public class ClusterNodeInfoTest {
private Clock clock;
private TestStore store;
private FailureHandler handler = new FailureHandler();
private boolean invisible;
private long reuseAfterRecoverMillis = ClusterNodeInfo.DEFAULT_REUSE_DELAY_AFTER_RECOVERY_MILLIS;
public ClusterNodeInfoTest(boolean invisible) {
this.invisible = invisible;
}
@Parameterized.Parameters(name="{index}: ({0})")
public static List<Boolean> fixtures() {
return List.of(false, true);
}
@Before
public void before() throws Exception {
clock = new Clock.Virtual();
clock.waitUntil(System.currentTimeMillis());
ClusterNodeInfo.setClock(clock);
store = new TestStore();
}
@After
public void after() {
ClusterNodeInfo.resetClockToDefault();
ClusterNodeInfo.resetRecoveryDelayMillisToDefault();
}
@Test
public void renewLease() throws Exception {
ClusterNodeInfo info = newClusterNodeInfo(1);
long leaseEnd = info.getLeaseEndTime();
waitLeaseUpdateInterval();
assertTrue(info.renewLease());
assertTrue(info.getLeaseEndTime() > leaseEnd);
assertFalse(handler.isLeaseFailure());
}
@Test
public void renewLeaseExceptionBefore() throws Exception {
ClusterNodeInfo info = newClusterNodeInfo(1);
waitLeaseUpdateInterval();
store.setFailBeforeUpdate(1);
try {
info.renewLease();
fail("must fail with DocumentStoreException");
} catch (DocumentStoreException e) {
// expected
}
assertEquals(0, store.getFailBeforeUpdate());
long leaseEnd = info.getLeaseEndTime();
// must succeed next time
waitLeaseUpdateInterval();
assertTrue(info.renewLease());
assertTrue(info.getLeaseEndTime() > leaseEnd);
assertFalse(handler.isLeaseFailure());
}
// OAK-4770
@Test
public void renewLeaseExceptionAfter() throws Exception {
ClusterNodeInfo info = newClusterNodeInfo(1);
waitLeaseUpdateInterval();
store.setFailAfterUpdate(1);
try {
info.renewLease();
fail("must fail with DocumentStoreException");
} catch (DocumentStoreException e) {
// expected
}
assertEquals(0, store.getFailAfterUpdate());
long leaseEnd = info.getLeaseEndTime();
// must succeed next time
waitLeaseUpdateInterval();
assertTrue(info.renewLease());
assertTrue(info.getLeaseEndTime() > leaseEnd);
assertFalse(handler.isLeaseFailure());
}
@Test
public void renewLeaseExceptionBeforeWithDelay() throws Exception {
ClusterNodeInfo info = newClusterNodeInfo(1);
waitLeaseUpdateInterval();
store.setFailBeforeUpdate(1);
// delay operations by half the lease time, this will
// first delay the update and then delay the subsequent
// find because of the exception on update. afterwards the
// lease must be expired
store.setDelayMillis(info.getLeaseTime() / 2);
try {
info.renewLease();
fail("must throw DocumentStoreException");
} catch (DocumentStoreException e) {
// expected
}
assertTrue(info.getLeaseEndTime() < clock.getTime());
}
@Test
public void renewLeaseExceptionAfterWithDelay() throws Exception {
ClusterNodeInfo info = newClusterNodeInfo(1);
long leaseEnd = info.getLeaseEndTime();
waitLeaseUpdateInterval();
store.setFailAfterUpdate(1);
// delay operations by half the lease time, this will
// first delay the update and then delay the subsequent
// find because of the exception on update. afterwards
// the leaseEnd must reflect the updated value
store.setDelayMillis(info.getLeaseTime() / 2);
try {
info.renewLease();
fail("must throw DocumentStoreException");
} catch (DocumentStoreException e) {
// expected
}
assertTrue(info.getLeaseEndTime() > leaseEnd);
}
@Test
public void renewLeaseExceptionAfterFindFails() throws Exception {
ClusterNodeInfo info = newClusterNodeInfo(1);
long leaseEnd = info.getLeaseEndTime();
waitLeaseUpdateInterval();
store.setFailAfterUpdate(1);
store.setFailFind(1);
// delay operations by half the lease time, this will
// first delay the update and then delay and fail the
// subsequent find once.
store.setDelayMillis(info.getLeaseTime() / 2);
try {
info.renewLease();
fail("must throw DocumentStoreException");
} catch (DocumentStoreException e) {
// expected
}
assertEquals(0, store.getFailFind());
// must not reflect the updated value, because retries
// to read the current cluster node info document stops
// once lease expires
assertEquals(leaseEnd, info.getLeaseEndTime());
}
@Test
public void renewLeaseExceptionAfterFindSucceedsEventually() throws Exception {
ClusterNodeInfo info = newClusterNodeInfo(1);
waitLeaseUpdateInterval();
// delay operations by a sixth of the lease time, this will
// first delay the update and then delay and fail the
// subsequent find calls. find retries should eventually
// succeed within the lease time
store.setDelayMillis(info.getLeaseTime() / 6);
store.setFailAfterUpdate(1);
store.setFailFind(3);
try {
info.renewLease();
fail("must throw DocumentStoreException");
} catch (DocumentStoreException e) {
// expected
}
// the three retries must eventually succeed within the lease time
assertEquals(0, store.getFailFind());
// must reflect the updated value
assertTrue(info.getLeaseEndTime() > clock.getTime());
}
@Test
public void renewLeaseDelayed() throws Exception {
ClusterNodeInfo info = newClusterNodeInfo(1);
clock.waitUntil(info.getLeaseEndTime() + ClusterNodeInfo.DEFAULT_LEASE_UPDATE_INTERVAL_MILLIS);
recoverClusterNode(1);
try {
info.renewLease();
fail("must fail with DocumentStoreException");
} catch (DocumentStoreException e) {
// expected
}
assertLeaseFailure();
}
// OAK-4779
@Test
public void renewLeaseWhileRecoveryRunning() throws Exception {
ClusterNodeInfo info = newClusterNodeInfo(1);
// wait until after lease end
clock.waitUntil(info.getLeaseEndTime() + ClusterNodeInfo.DEFAULT_LEASE_UPDATE_INTERVAL_MILLIS);
// simulate a started recovery
MissingLastRevSeeker seeker = new MissingLastRevSeeker(store.getStore(), clock);
assertTrue(seeker.acquireRecoveryLock(1, 42));
// cluster node 1 must not be able to renew the lease now
try {
// must either return false
assertFalse(info.renewLease());
} catch (DocumentStoreException e) {
// or throw an exception
}
}
@Test
public void renewLeaseTimedOutWithCheck() throws Exception {
ClusterNodeInfo info = newClusterNodeInfo(1);
// wait until after lease end
clock.waitUntil(info.getLeaseEndTime() + ClusterNodeInfo.DEFAULT_LEASE_UPDATE_INTERVAL_MILLIS);
try {
info.performLeaseCheck();
fail("lease check must fail with exception");
} catch (DocumentStoreException e) {
// expected
}
// cluster node 1 must not be able to renew the lease now
try {
// must either return false
assertFalse(info.renewLease());
} catch (DocumentStoreException e) {
// or throw an exception
}
}
// OAK-9564
@Test
public void renewLeaseSameRuntimeId() throws Exception {
ClusterNodeInfo info = newClusterNodeInfo(1);
String runtimeId = info.getRuntimeId();
long leaseEnd = info.getLeaseEndTime();
waitLeaseUpdateInterval();
assertTrue(info.renewLease());
assertTrue(info.getLeaseEndTime() > leaseEnd);
// The Runtime UUID should remain the same
assertEquals(info.getRuntimeId(), runtimeId);
assertFalse(handler.isLeaseFailure());
}
// OAK-9564
@Test
public void renewLeaseDifferentRuntimeId() throws Exception {
ClusterNodeInfo info = newClusterNodeInfo(1);
waitLeaseUpdateInterval();
long leaseEndTimeBeforeRenew = info.getLeaseEndTime();
// Modify the UUID to mock it belongs to a different node
UpdateOp update = new UpdateOp("1", false);
update.set(ClusterNodeInfo.RUNTIME_ID_KEY, "different-uuid");
store.findAndUpdate(Collection.CLUSTER_NODES, update);
try {
info.renewLease();
fail("Should not update lease anymore");
} catch(DocumentStoreException e) {
// expected
}
// Lease end time shouldn't be different
assertEquals(leaseEndTimeBeforeRenew, info.getLeaseEndTime());
}
// OAK-9564
@Test
public void renewLeaseTakingLongerThanTimeout() throws Exception {
ClusterNodeInfo info = newClusterNodeInfo(1);
waitLeaseUpdateInterval();
final long leaseEndTimeBeforeRenew = info.getLeaseEndTime();
final String runtimeId = info.getRuntimeId();
Map<String, Long> unexpectedLeaseEnd = new HashMap<>();
long unexpectedLeaseEndTime = info.getLeaseEndTime() + 133333;
unexpectedLeaseEnd.put(ClusterNodeInfo.LEASE_END_KEY, unexpectedLeaseEndTime);
// The update will fail after 30 seconds. Simulating a Mongo timeout.
store.setFailAfterUpdate(1);
store.setDelayMillisOnce(30000);
store.setDelayMillis(10000);
store.setFindShouldAlterReturnDocument(true);
// However, the following find after the update will return an
// unexpected lease time (but still within a valid time).
// This unexpected update could come from a previous but very slow update
// executed in Mongo. So it's still a valid one, but not the new one
// that is expected.
store.setMapAlterReturnDocument(unexpectedLeaseEnd);
// However, the current behaviour is that as the lease end time doesn't
// match the expected one, the lease will fail and the nodeStore becomes
// unusable.
try {
info.renewLease();
} catch(DocumentStoreException e) {
// expected
}
// The new leaseEndTime coming from Mongo is not reflected in the
// ClusterNodeInfo. Meaning it will eventually be treated as 'expired'
// by the DocumentNodeStore, even when in Mongo it was set.
assertThat(leaseEndTimeBeforeRenew, lessThan(info.getLeaseEndTime()));
assertEquals(unexpectedLeaseEndTime, info.getLeaseEndTime());
// Runtime ID is the same
assertEquals(runtimeId, info.getRuntimeId());
}
// OAK-9564: This is a someway artificial test. The idea behind is to try to reproduce
// a case where a renewLease fails because of a timeout. Then the following renewLease
// occurs faster, but during that time the previous update is executed in Mongo.
// That 'older' update shouldn't go through now, reducing the effective lease end time.
@Test
public void renewLeaseShouldNotGoBackInTime() throws Exception {
ClusterNodeInfo info = newClusterNodeInfo(1);
waitLeaseUpdateInterval();
long newerLeaseEndTime = clock.getTime() + ClusterNodeInfo.DEFAULT_LEASE_DURATION_MILLIS +
ClusterNodeInfo.DEFAULT_LEASE_UPDATE_INTERVAL_MILLIS;
// simulate a newer renew lease took place
UpdateOp update = new UpdateOp("1", false);
update.set(ClusterNodeInfo.LEASE_END_KEY, newerLeaseEndTime);
store.findAndUpdate(Collection.CLUSTER_NODES, update);
// now another renew happens, which will try to set a lesser lease end
info.renewLease();
ClusterNodeInfoDocument info2 = store.find(Collection.CLUSTER_NODES, "1");
assertNotNull(info2);
// the lease end time should remain the same
assertEquals(newerLeaseEndTime, info2.getLeaseEndTime());
}
// OAK-9564
@Test
public void canGetDisposedClusterWithDifferentRuntimeId() {
ClusterNodeInfo info = newClusterNodeInfo(0);
int id = info.getId();
assertEquals(1, id);
// shut it down
info.dispose();
// edit the runtime ID
UpdateOp op = new UpdateOp(String.valueOf(id), false);
op.set(ClusterNodeInfo.RUNTIME_ID_KEY, "some-different-uuid");
assertNotNull(store.findAndUpdate(Collection.CLUSTER_NODES, op));
try {
info = newClusterNodeInfo(id);
assertEquals(info.getId(), id);
} catch(DocumentStoreException e) {
// should be able to acquire it again, because it was properly disposed
fail("Must be able to acquire the cluster again after disposal");
}
}
// OAK-9564
@Test
public void canGetRecoveredClusterWithDifferentRuntimeId() {
ClusterNodeInfo info = newClusterNodeInfo(0);
int id = info.getId();
assertEquals(1, id);
// shut it down
info.dispose();
// edit the data artificially to reproduce the bug where a cluster can't be acquired
// after it was recovered by a different node
UpdateOp op = new UpdateOp(String.valueOf(id), false);
op.set(ClusterNodeInfo.RUNTIME_ID_KEY, "some-different-uuid");
op.set(ClusterNodeInfo.REV_RECOVERY_BY, "");
op.set(ClusterNodeInfo.REV_RECOVERY_LOCK, "NONE");
op.set(ClusterNodeInfo.STATE, null);
op.set(ClusterNodeInfo.LEASE_END_KEY, null);
op.set(ClusterNodeInfo.RECOVERY_TIME_KEY, null);
assertNotNull(store.findAndUpdate(Collection.CLUSTER_NODES, op));
// should be able to acquire it
try {
info = newClusterNodeInfo(id);
assertEquals(info.getId(), id);
} catch(DocumentStoreException e) {
fail("Must be able to acquire the cluster");
}
}
// OAK-9564
@Test
public void cannotGetActiveClusterWithDifferentRuntimeIdUntilExpires() {
ClusterNodeInfo info = newClusterNodeInfo(0);
int id = info.getId();
assertEquals(1, id);
// edit the runtime ID
UpdateOp op = new UpdateOp(String.valueOf(id), false);
op.set(ClusterNodeInfo.RUNTIME_ID_KEY, "some-different-uuid");
assertNotNull(store.findAndUpdate(Collection.CLUSTER_NODES, op));
// should be able to acquire it, but it should wait until the lease expire
ClusterNodeInfo infoNew = newClusterNodeInfo(id);
assertEquals(infoNew.getId(), id);
assertTrue(infoNew.getLeaseEndTime() > info.getLeaseEndTime());
assertNotEquals(infoNew.getRuntimeId(), info.getRuntimeId());
try {
info.performLeaseCheck();
fail("Must fail here, and not get cluster node info");
} catch(DocumentStoreException e) {
// expected exception
assertTrue(e.getMessage().startsWith("This oak instance failed to update the lease in"));
}
infoNew.performLeaseCheck();
}
@Test
public void readOnlyClusterNodeInfo() {
ClusterNodeInfo info = ClusterNodeInfo.getReadOnlyInstance(store);
assertEquals(0, info.getId());
assertEquals(Long.MAX_VALUE, info.getLeaseEndTime());
assertFalse(info.renewLease());
}
@Test
public void ignoreEntryWithInvalidID() {
String instanceId1 = "node1";
ClusterNodeInfo info1 = newClusterNodeInfo(0, instanceId1);
assertEquals(1, info1.getId());
assertEquals(instanceId1, info1.getInstanceId());
// shut it down
info1.dispose();
// sneak in an invalid entry
UpdateOp op = new UpdateOp("invalid", true);
store.create(Collection.CLUSTER_NODES, Collections.singletonList(op));
// acquire again
info1 = newClusterNodeInfo(0, instanceId1);
assertEquals(1, info1.getId());
assertEquals(instanceId1, info1.getInstanceId());
info1.dispose();
}
@Test
public void acquireInactiveClusterId() {
// simulate multiple cluster nodes
String instanceId1 = "node1";
String instanceId2 = "node2";
ClusterNodeInfo info1 = newClusterNodeInfo(0, instanceId1);
assertEquals(1, info1.getId());
assertEquals(instanceId1, info1.getInstanceId());
// shut it down
info1.dispose();
// simulate start from different location
ClusterNodeInfo info2 = newClusterNodeInfo(0, instanceId2);
// must acquire inactive clusterId 1
assertEquals(1, info2.getId());
assertEquals(instanceId2, info2.getInstanceId());
info2.dispose();
}
@Test
public void acquireInactiveClusterIdWithMatchingEnvironment() {
// simulate multiple cluster nodes
String instanceId1 = "node1";
String instanceId2 = "node2";
ClusterNodeInfo info1 = newClusterNodeInfo(0, instanceId1);
assertEquals(1, info1.getId());
assertEquals(instanceId1, info1.getInstanceId());
// simulate start from different location
ClusterNodeInfo info2 = newClusterNodeInfo(0, instanceId2);
assertEquals(2, info2.getId());
assertEquals(instanceId2, info2.getInstanceId());
info1.dispose();
info2.dispose();
// restart node2
info2 = newClusterNodeInfo(0, instanceId2);
// must acquire clusterId 2 again
assertEquals(2, info2.getId());
assertEquals(instanceId2, info2.getInstanceId());
}
@Test
public void acquireInactiveClusterIdConcurrently() throws Exception {
ExecutorService executor = Executors.newCachedThreadPool();
String instanceId1 = "node1";
String instanceId2 = "node2";
String instanceId3 = "node3";
List<String> instanceIds = new ArrayList<>();
Collections.addAll(instanceIds, instanceId1, instanceId2, instanceId3);
// create a first clusterNode entry
ClusterNodeInfo info1 = newClusterNodeInfo(0, instanceId1);
assertEquals(1, info1.getId());
assertEquals(instanceId1, info1.getInstanceId());
// shut it down
info1.dispose();
// start multiple instances from different locations competing for
// the same inactive clusterId
List<Callable<ClusterNodeInfo>> tasks = new ArrayList<>();
for (String id : instanceIds) {
tasks.add(() -> newClusterNodeInfo(0, id));
}
Map<Integer, ClusterNodeInfo> clusterNodes = executor.invokeAll(tasks)
.stream().map(f -> {
try {
return f.get();
} catch (Exception e) {
throw new RuntimeException(e);
}
}).collect(Collectors.toMap(ClusterNodeInfo::getId, Function.identity()));
// must have different clusterIds
assertEquals(3, clusterNodes.size());
assertThat(clusterNodes.keySet(), containsInAnyOrder(1, 2, 3));
clusterNodes.values().forEach(ClusterNodeInfo::dispose);
executor.shutdown();
}
@Test
public void acquireExpiredClusterId() throws Exception {
String instanceId1 = "node1";
ClusterNodeInfo info1 = newClusterNodeInfo(0, instanceId1);
assertEquals(1, info1.getId());
assertEquals(instanceId1, info1.getInstanceId());
expireLease(info1);
// simulate a restart after a crash and expired lease
info1 = newClusterNodeInfo(0, instanceId1);
// must acquire expired clusterId 1
assertEquals(1, info1.getId());
assertEquals(instanceId1, info1.getInstanceId());
info1.dispose();
}
@Test
public void skipExpiredClusterIdWithDifferentInstanceId() throws Exception {
// simulate multiple cluster nodes
String instanceId1 = "node1";
String instanceId2 = "node2";
ClusterNodeInfo info1 = newClusterNodeInfo(0, instanceId1);
assertEquals(1, info1.getId());
assertEquals(instanceId1, info1.getInstanceId());
expireLease(info1);
// simulate start from different location
ClusterNodeInfo info2 = newClusterNodeInfo(0, instanceId2);
// must not acquire expired clusterId 1
assertEquals(2, info2.getId());
assertEquals(instanceId2, info2.getInstanceId());
info2.dispose();
}
@Test
public void acquireExpiredClusterIdStatic() throws Exception {
String instanceId1 = "node1";
String instanceId2 = "node2";
ClusterNodeInfo info1 = newClusterNodeInfo(0, instanceId1);
assertEquals(1, info1.getId());
assertEquals(instanceId1, info1.getInstanceId());
expireLease(info1);
// simulate start from different location and
// acquire with static clusterId
try {
newClusterNodeInfo(1, instanceId2);
fail("Must fail with DocumentStoreException");
} catch (DocumentStoreException e) {
assertThat(e.getMessage(), containsString("needs recovery"));
}
}
@Test
public void acquireExpiredClusterIdConcurrently() throws Exception {
ExecutorService executor = Executors.newCachedThreadPool();
String instanceId1 = "node1";
String instanceId2 = "node2";
String instanceId3 = "node3";
List<String> instanceIds = new ArrayList<>();
Collections.addAll(instanceIds, instanceId1, instanceId2, instanceId3);
ClusterNodeInfo info1 = newClusterNodeInfo(0, instanceId1);
assertEquals(1, info1.getId());
assertEquals(instanceId1, info1.getInstanceId());
expireLease(info1);
// start multiple instances from different locations competing for
// the same clusterId with expired lease
List<Callable<ClusterNodeInfo>> tasks = new ArrayList<>();
for (String id : instanceIds) {
tasks.add(() -> newClusterNodeInfo(0, id));
}
Map<Integer, ClusterNodeInfo> clusterNodes = executor.invokeAll(tasks)
.stream().map(f -> {
try {
return f.get();
} catch (Exception e) {
throw new RuntimeException(e);
}
}).collect(Collectors.toMap(ClusterNodeInfo::getId, Function.identity()));
// must have different clusterIds
assertEquals(3, clusterNodes.size());
assertThat(clusterNodes.keySet(), containsInAnyOrder(1, 2, 3));
clusterNodes.values().forEach(ClusterNodeInfo::dispose);
executor.shutdown();
}
@Test
public void skipClusterIdWithoutStartTime() {
ClusterNodeInfo info = newClusterNodeInfo(0);
int id = info.getId();
assertEquals(1, id);
// shut it down
info.dispose();
// remove startTime field
UpdateOp op = new UpdateOp(String.valueOf(id), false);
op.remove(ClusterNodeInfo.START_TIME_KEY);
assertNotNull(store.findAndUpdate(Collection.CLUSTER_NODES, op));
// acquire it again
info = newClusterNodeInfo(0);
// must not use clusterId 1
assertNotEquals(1, info.getId());
}
@Test
public void defaultLeaseCheckMode() {
assertEquals(LeaseCheckMode.STRICT, newClusterNodeInfo(0).getLeaseCheckMode());
}
@Test
public void strictLeaseCheckMode() throws Exception {
ClusterNodeInfo info = newClusterNodeInfo(1);
clock.waitUntil(info.getLeaseEndTime());
// lease renew must fail with exception
try {
info.renewLease();
fail("must fail with DocumentStoreException");
} catch (DocumentStoreException e) {
assertThat(e.getMessage(), containsString("failed to update the lease"));
assertThat(e.getMessage(), containsString("mode: STRICT"));
}
assertLeaseFailure();
}
@Test
public void lenientLeaseCheckMode() throws Exception {
ClusterNodeInfo info = newClusterNodeInfo(1);
info.setLeaseCheckMode(LeaseCheckMode.LENIENT);
clock.waitUntil(info.getLeaseEndTime());
// must still be able to renew
assertTrue(info.renewLease());
assertFalse(handler.isLeaseFailure());
}
@Test
public void reuseAfterRecover() throws Exception {
reuseAfterRecoverMillis = 60000;
ClusterNodeInfo info = newClusterNodeInfo(1);
assertEquals(1, info.getId());
// wait until after lease end
clock.waitUntil(info.getLeaseEndTime() + ClusterNodeInfo.DEFAULT_LEASE_UPDATE_INTERVAL_MILLIS);
recoverClusterNode(1);
try {
info = newClusterNodeInfo(1);
fail("should fail");
} catch(Exception e) {
// should fail
}
assertEquals(42, newClusterNodeInfo(0).getId());
assertEquals(1, newClusterNodeInfo(0).getId());
assertEquals(3, newClusterNodeInfo(3).getId());
// wait until after recover + reuseAfterRecoverMillis
clock.waitUntil(info.getLeaseEndTime() + reuseAfterRecoverMillis);
assertEquals(1, newClusterNodeInfo(1).getId());
}
@Test
public void recoveryNeededNoDelay() throws Exception {
ClusterNodeInfo info = newClusterNodeInfo(1);
String key = String.valueOf(info.getId());
ClusterNodeInfoDocument doc = store.find(Collection.CLUSTER_NODES, key);
assertFalse(doc.isRecoveryNeeded(clock.getTime()));
clock.waitUntil(info.getLeaseEndTime() + 1);
assertTrue(doc.isRecoveryNeeded(clock.getTime()));
}
@Test
public void recoveryNeededWithDelay() throws Exception {
ClusterNodeInfo.setRecoveryDelayMillis(60000);
ClusterNodeInfo info = newClusterNodeInfo(1);
String key = String.valueOf(info.getId());
ClusterNodeInfoDocument doc = store.find(Collection.CLUSTER_NODES, key);
assertFalse(doc.isRecoveryNeeded(clock.getTime()));
clock.waitUntil(info.getLeaseEndTime() + 59999);
assertFalse(doc.isRecoveryNeeded(clock.getTime()));
clock.waitUntil(info.getLeaseEndTime() + 1);
assertFalse(doc.isRecoveryNeeded(clock.getTime()));
clock.waitUntil(info.getLeaseEndTime() + 1);
assertTrue(doc.isRecoveryNeeded(clock.getTime()));
}
private void assertLeaseFailure() throws Exception {
for (int i = 0; i < 100; i++) {
if (handler.isLeaseFailure()) {
return;
}
Thread.sleep(10);
}
fail("expected lease failure");
}
private void expireLease(ClusterNodeInfo info)
throws InterruptedException {
// let lease expire
clock.waitUntil(info.getLeaseEndTime() +
ClusterNodeInfo.DEFAULT_LEASE_UPDATE_INTERVAL_MILLIS);
// check if expired -> recovery is needed
MissingLastRevSeeker util = new MissingLastRevSeeker(store, clock);
String key = String.valueOf(info.getId());
ClusterNodeInfoDocument infoDoc = store.find(Collection.CLUSTER_NODES, key);
assertNotNull(infoDoc);
assertTrue(infoDoc.isRecoveryNeeded(clock.getTime()));
}
private void recoverClusterNode(int clusterId) throws Exception {
DocumentNodeStore ns = new DocumentMK.Builder()
.setDocumentStore(store.getStore()) // use unwrapped store
.setAsyncDelay(0).setClusterId(42).clock(clock).getNodeStore();
try {
LastRevRecoveryAgent recovery = new LastRevRecoveryAgent(ns.getDocumentStore(), ns);
recovery.recover(clusterId);
} finally {
ns.dispose();
}
}
private void waitLeaseUpdateInterval() throws Exception {
clock.waitUntil(clock.getTime() + ClusterNodeInfo.DEFAULT_LEASE_UPDATE_INTERVAL_MILLIS + 1);
}
private ClusterNodeInfo newClusterNodeInfo(int clusterId,
String instanceId) {
ClusterNodeInfo info = ClusterNodeInfo.getInstance(store,
new SimpleRecoveryHandler(store, clock), null, instanceId, clusterId, invisible,
reuseAfterRecoverMillis);
info.setLeaseFailureHandler(handler);
return info;
}
private ClusterNodeInfo newClusterNodeInfo(int clusterId) {
return newClusterNodeInfo(clusterId, null);
}
static final class FailureHandler implements LeaseFailureHandler {
private final AtomicBoolean leaseFailure = new AtomicBoolean();
@Override
public void handleLeaseFailure() {
leaseFailure.set(true);
}
public boolean isLeaseFailure() {
return leaseFailure.get();
}
}
final class TestStore extends DocumentStoreWrapper {
private final AtomicBoolean findShouldAlterReturnDocument = new AtomicBoolean();
private final AtomicBoolean findAndUpdateShouldAlterReturnDocument = new AtomicBoolean();
private Map mapAlterReturnDocument;
private final AtomicInteger failBeforeUpdate = new AtomicInteger();
private final AtomicInteger failAfterUpdate = new AtomicInteger();
private final AtomicInteger failFind = new AtomicInteger();
private long delayMillis;
private long delayMillisOnce;
TestStore() {
super(new MemoryDocumentStore());
}
DocumentStore getStore() {
return store;
}
@Override
public <T extends Document> T findAndUpdate(Collection<T> collection,
UpdateOp update) {
maybeDelay();
maybeDelayOnce();
maybeThrow(failBeforeUpdate, "update failed before");
T doc = super.findAndUpdate(collection, update);
maybeThrow(failAfterUpdate, "update failed after");
if (getFindAndUpdateShouldAlterReturnDocument()) {
ClusterNodeInfoDocument cdoc = new ClusterNodeInfoDocument();
cdoc.data.putAll(getMapAlterReturnDocument());
cdoc.seal();
return (T)cdoc;
} else {
return doc;
}
}
@Override
public <T extends Document> T find(Collection<T> collection,
String key) {
maybeDelay();
maybeThrow(failFind, "find failed");
T doc = super.find(collection, key);
if (getFindShouldAlterReturnDocument()) {
ClusterNodeInfoDocument cdoc = new ClusterNodeInfoDocument();
doc.deepCopy(cdoc);
cdoc.data.putAll(getMapAlterReturnDocument());
cdoc.seal();
return (T)cdoc;
} else {
return doc;
}
}
private void maybeDelay() {
try {
clock.waitUntil(clock.getTime() + delayMillis);
} catch (InterruptedException e) {
throw new DocumentStoreException(e);
}
}
private void maybeDelayOnce() {
try {
clock.waitUntil(clock.getTime() + delayMillisOnce);
delayMillisOnce = 0;
} catch (InterruptedException e) {
throw new DocumentStoreException(e);
}
}
private void maybeThrow(AtomicInteger num, String msg) {
if (num.get() > 0) {
num.decrementAndGet();
throw new DocumentStoreException(msg);
}
}
public Map getMapAlterReturnDocument() {
return mapAlterReturnDocument;
}
public void setMapAlterReturnDocument(Map mapAlterReturnDocument) {
this.mapAlterReturnDocument = mapAlterReturnDocument;
}
public boolean getFindShouldAlterReturnDocument() {
return findShouldAlterReturnDocument.get();
}
public void setFindShouldAlterReturnDocument(boolean findShouldAlterReturnDocument) {
this.findShouldAlterReturnDocument.set(findShouldAlterReturnDocument);
}
public boolean getFindAndUpdateShouldAlterReturnDocument() {
return findAndUpdateShouldAlterReturnDocument.get();
}
public void setFindAndUpdateShouldAlterReturnDocument(boolean findAndUpdateShouldAlterReturnDocument) {
this.findAndUpdateShouldAlterReturnDocument.set(findAndUpdateShouldAlterReturnDocument);
}
public int getFailBeforeUpdate() {
return failBeforeUpdate.get();
}
public void setFailBeforeUpdate(int num) {
failBeforeUpdate.set(num);
}
public int getFailAfterUpdate() {
return failAfterUpdate.get();
}
public void setFailAfterUpdate(int num) {
this.failAfterUpdate.set(num);
}
public long getDelayMillis() {
return delayMillis;
}
public void setDelayMillis(long delayMillis) {
this.delayMillis = delayMillis;
}
public long getDelayMillisOnce() {
return delayMillisOnce;
}
public void setDelayMillisOnce(long delayMillisOnce) {
this.delayMillisOnce = delayMillisOnce;
}
public int getFailFind() {
return failFind.get();
}
public void setFailFind(int num) {
this.failFind.set(num);
}
}
}
|
googleapis/google-cloud-java | 36,271 | java-securitycenter/proto-google-cloud-securitycenter-v2/src/main/java/com/google/cloud/securitycenter/v2/UpdateSecurityMarksRequest.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/securitycenter/v2/securitycenter_service.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.securitycenter.v2;
/**
*
*
* <pre>
* Request message for updating a SecurityMarks resource.
* </pre>
*
* Protobuf type {@code google.cloud.securitycenter.v2.UpdateSecurityMarksRequest}
*/
public final class UpdateSecurityMarksRequest extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.securitycenter.v2.UpdateSecurityMarksRequest)
UpdateSecurityMarksRequestOrBuilder {
private static final long serialVersionUID = 0L;
// Use UpdateSecurityMarksRequest.newBuilder() to construct.
private UpdateSecurityMarksRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private UpdateSecurityMarksRequest() {}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new UpdateSecurityMarksRequest();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.securitycenter.v2.SecuritycenterServiceProto
.internal_static_google_cloud_securitycenter_v2_UpdateSecurityMarksRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.securitycenter.v2.SecuritycenterServiceProto
.internal_static_google_cloud_securitycenter_v2_UpdateSecurityMarksRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.securitycenter.v2.UpdateSecurityMarksRequest.class,
com.google.cloud.securitycenter.v2.UpdateSecurityMarksRequest.Builder.class);
}
private int bitField0_;
public static final int SECURITY_MARKS_FIELD_NUMBER = 1;
private com.google.cloud.securitycenter.v2.SecurityMarks securityMarks_;
/**
*
*
* <pre>
* Required. The security marks resource to update.
* </pre>
*
* <code>
* .google.cloud.securitycenter.v2.SecurityMarks security_marks = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the securityMarks field is set.
*/
@java.lang.Override
public boolean hasSecurityMarks() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* Required. The security marks resource to update.
* </pre>
*
* <code>
* .google.cloud.securitycenter.v2.SecurityMarks security_marks = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The securityMarks.
*/
@java.lang.Override
public com.google.cloud.securitycenter.v2.SecurityMarks getSecurityMarks() {
return securityMarks_ == null
? com.google.cloud.securitycenter.v2.SecurityMarks.getDefaultInstance()
: securityMarks_;
}
/**
*
*
* <pre>
* Required. The security marks resource to update.
* </pre>
*
* <code>
* .google.cloud.securitycenter.v2.SecurityMarks security_marks = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
@java.lang.Override
public com.google.cloud.securitycenter.v2.SecurityMarksOrBuilder getSecurityMarksOrBuilder() {
return securityMarks_ == null
? com.google.cloud.securitycenter.v2.SecurityMarks.getDefaultInstance()
: securityMarks_;
}
public static final int UPDATE_MASK_FIELD_NUMBER = 2;
private com.google.protobuf.FieldMask updateMask_;
/**
*
*
* <pre>
* The FieldMask to use when updating the security marks resource.
*
* The field mask must not contain duplicate fields.
* If empty or set to "marks", all marks will be replaced. Individual
* marks can be updated using "marks.<mark_key>".
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2;</code>
*
* @return Whether the updateMask field is set.
*/
@java.lang.Override
public boolean hasUpdateMask() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
*
*
* <pre>
* The FieldMask to use when updating the security marks resource.
*
* The field mask must not contain duplicate fields.
* If empty or set to "marks", all marks will be replaced. Individual
* marks can be updated using "marks.<mark_key>".
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2;</code>
*
* @return The updateMask.
*/
@java.lang.Override
public com.google.protobuf.FieldMask getUpdateMask() {
return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_;
}
/**
*
*
* <pre>
* The FieldMask to use when updating the security marks resource.
*
* The field mask must not contain duplicate fields.
* If empty or set to "marks", all marks will be replaced. Individual
* marks can be updated using "marks.<mark_key>".
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2;</code>
*/
@java.lang.Override
public com.google.protobuf.FieldMaskOrBuilder getUpdateMaskOrBuilder() {
return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_;
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (((bitField0_ & 0x00000001) != 0)) {
output.writeMessage(1, getSecurityMarks());
}
if (((bitField0_ & 0x00000002) != 0)) {
output.writeMessage(2, getUpdateMask());
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) != 0)) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, getSecurityMarks());
}
if (((bitField0_ & 0x00000002) != 0)) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, getUpdateMask());
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.securitycenter.v2.UpdateSecurityMarksRequest)) {
return super.equals(obj);
}
com.google.cloud.securitycenter.v2.UpdateSecurityMarksRequest other =
(com.google.cloud.securitycenter.v2.UpdateSecurityMarksRequest) obj;
if (hasSecurityMarks() != other.hasSecurityMarks()) return false;
if (hasSecurityMarks()) {
if (!getSecurityMarks().equals(other.getSecurityMarks())) return false;
}
if (hasUpdateMask() != other.hasUpdateMask()) return false;
if (hasUpdateMask()) {
if (!getUpdateMask().equals(other.getUpdateMask())) return false;
}
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (hasSecurityMarks()) {
hash = (37 * hash) + SECURITY_MARKS_FIELD_NUMBER;
hash = (53 * hash) + getSecurityMarks().hashCode();
}
if (hasUpdateMask()) {
hash = (37 * hash) + UPDATE_MASK_FIELD_NUMBER;
hash = (53 * hash) + getUpdateMask().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.securitycenter.v2.UpdateSecurityMarksRequest parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.securitycenter.v2.UpdateSecurityMarksRequest parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.securitycenter.v2.UpdateSecurityMarksRequest parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.securitycenter.v2.UpdateSecurityMarksRequest parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.securitycenter.v2.UpdateSecurityMarksRequest parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.securitycenter.v2.UpdateSecurityMarksRequest parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.securitycenter.v2.UpdateSecurityMarksRequest parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.securitycenter.v2.UpdateSecurityMarksRequest parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.securitycenter.v2.UpdateSecurityMarksRequest parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.securitycenter.v2.UpdateSecurityMarksRequest parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.securitycenter.v2.UpdateSecurityMarksRequest parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.securitycenter.v2.UpdateSecurityMarksRequest parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.securitycenter.v2.UpdateSecurityMarksRequest prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Request message for updating a SecurityMarks resource.
* </pre>
*
* Protobuf type {@code google.cloud.securitycenter.v2.UpdateSecurityMarksRequest}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.securitycenter.v2.UpdateSecurityMarksRequest)
com.google.cloud.securitycenter.v2.UpdateSecurityMarksRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.securitycenter.v2.SecuritycenterServiceProto
.internal_static_google_cloud_securitycenter_v2_UpdateSecurityMarksRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.securitycenter.v2.SecuritycenterServiceProto
.internal_static_google_cloud_securitycenter_v2_UpdateSecurityMarksRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.securitycenter.v2.UpdateSecurityMarksRequest.class,
com.google.cloud.securitycenter.v2.UpdateSecurityMarksRequest.Builder.class);
}
// Construct using com.google.cloud.securitycenter.v2.UpdateSecurityMarksRequest.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {
getSecurityMarksFieldBuilder();
getUpdateMaskFieldBuilder();
}
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
securityMarks_ = null;
if (securityMarksBuilder_ != null) {
securityMarksBuilder_.dispose();
securityMarksBuilder_ = null;
}
updateMask_ = null;
if (updateMaskBuilder_ != null) {
updateMaskBuilder_.dispose();
updateMaskBuilder_ = null;
}
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.securitycenter.v2.SecuritycenterServiceProto
.internal_static_google_cloud_securitycenter_v2_UpdateSecurityMarksRequest_descriptor;
}
@java.lang.Override
public com.google.cloud.securitycenter.v2.UpdateSecurityMarksRequest
getDefaultInstanceForType() {
return com.google.cloud.securitycenter.v2.UpdateSecurityMarksRequest.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.securitycenter.v2.UpdateSecurityMarksRequest build() {
com.google.cloud.securitycenter.v2.UpdateSecurityMarksRequest result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.securitycenter.v2.UpdateSecurityMarksRequest buildPartial() {
com.google.cloud.securitycenter.v2.UpdateSecurityMarksRequest result =
new com.google.cloud.securitycenter.v2.UpdateSecurityMarksRequest(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(
com.google.cloud.securitycenter.v2.UpdateSecurityMarksRequest result) {
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.securityMarks_ =
securityMarksBuilder_ == null ? securityMarks_ : securityMarksBuilder_.build();
to_bitField0_ |= 0x00000001;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.updateMask_ = updateMaskBuilder_ == null ? updateMask_ : updateMaskBuilder_.build();
to_bitField0_ |= 0x00000002;
}
result.bitField0_ |= to_bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.securitycenter.v2.UpdateSecurityMarksRequest) {
return mergeFrom((com.google.cloud.securitycenter.v2.UpdateSecurityMarksRequest) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.securitycenter.v2.UpdateSecurityMarksRequest other) {
if (other
== com.google.cloud.securitycenter.v2.UpdateSecurityMarksRequest.getDefaultInstance())
return this;
if (other.hasSecurityMarks()) {
mergeSecurityMarks(other.getSecurityMarks());
}
if (other.hasUpdateMask()) {
mergeUpdateMask(other.getUpdateMask());
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
input.readMessage(getSecurityMarksFieldBuilder().getBuilder(), extensionRegistry);
bitField0_ |= 0x00000001;
break;
} // case 10
case 18:
{
input.readMessage(getUpdateMaskFieldBuilder().getBuilder(), extensionRegistry);
bitField0_ |= 0x00000002;
break;
} // case 18
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private com.google.cloud.securitycenter.v2.SecurityMarks securityMarks_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.securitycenter.v2.SecurityMarks,
com.google.cloud.securitycenter.v2.SecurityMarks.Builder,
com.google.cloud.securitycenter.v2.SecurityMarksOrBuilder>
securityMarksBuilder_;
/**
*
*
* <pre>
* Required. The security marks resource to update.
* </pre>
*
* <code>
* .google.cloud.securitycenter.v2.SecurityMarks security_marks = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the securityMarks field is set.
*/
public boolean hasSecurityMarks() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* Required. The security marks resource to update.
* </pre>
*
* <code>
* .google.cloud.securitycenter.v2.SecurityMarks security_marks = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The securityMarks.
*/
public com.google.cloud.securitycenter.v2.SecurityMarks getSecurityMarks() {
if (securityMarksBuilder_ == null) {
return securityMarks_ == null
? com.google.cloud.securitycenter.v2.SecurityMarks.getDefaultInstance()
: securityMarks_;
} else {
return securityMarksBuilder_.getMessage();
}
}
/**
*
*
* <pre>
* Required. The security marks resource to update.
* </pre>
*
* <code>
* .google.cloud.securitycenter.v2.SecurityMarks security_marks = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setSecurityMarks(com.google.cloud.securitycenter.v2.SecurityMarks value) {
if (securityMarksBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
securityMarks_ = value;
} else {
securityMarksBuilder_.setMessage(value);
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The security marks resource to update.
* </pre>
*
* <code>
* .google.cloud.securitycenter.v2.SecurityMarks security_marks = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setSecurityMarks(
com.google.cloud.securitycenter.v2.SecurityMarks.Builder builderForValue) {
if (securityMarksBuilder_ == null) {
securityMarks_ = builderForValue.build();
} else {
securityMarksBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The security marks resource to update.
* </pre>
*
* <code>
* .google.cloud.securitycenter.v2.SecurityMarks security_marks = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder mergeSecurityMarks(com.google.cloud.securitycenter.v2.SecurityMarks value) {
if (securityMarksBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0)
&& securityMarks_ != null
&& securityMarks_
!= com.google.cloud.securitycenter.v2.SecurityMarks.getDefaultInstance()) {
getSecurityMarksBuilder().mergeFrom(value);
} else {
securityMarks_ = value;
}
} else {
securityMarksBuilder_.mergeFrom(value);
}
if (securityMarks_ != null) {
bitField0_ |= 0x00000001;
onChanged();
}
return this;
}
/**
*
*
* <pre>
* Required. The security marks resource to update.
* </pre>
*
* <code>
* .google.cloud.securitycenter.v2.SecurityMarks security_marks = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder clearSecurityMarks() {
bitField0_ = (bitField0_ & ~0x00000001);
securityMarks_ = null;
if (securityMarksBuilder_ != null) {
securityMarksBuilder_.dispose();
securityMarksBuilder_ = null;
}
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The security marks resource to update.
* </pre>
*
* <code>
* .google.cloud.securitycenter.v2.SecurityMarks security_marks = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.cloud.securitycenter.v2.SecurityMarks.Builder getSecurityMarksBuilder() {
bitField0_ |= 0x00000001;
onChanged();
return getSecurityMarksFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* Required. The security marks resource to update.
* </pre>
*
* <code>
* .google.cloud.securitycenter.v2.SecurityMarks security_marks = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.cloud.securitycenter.v2.SecurityMarksOrBuilder getSecurityMarksOrBuilder() {
if (securityMarksBuilder_ != null) {
return securityMarksBuilder_.getMessageOrBuilder();
} else {
return securityMarks_ == null
? com.google.cloud.securitycenter.v2.SecurityMarks.getDefaultInstance()
: securityMarks_;
}
}
/**
*
*
* <pre>
* Required. The security marks resource to update.
* </pre>
*
* <code>
* .google.cloud.securitycenter.v2.SecurityMarks security_marks = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.securitycenter.v2.SecurityMarks,
com.google.cloud.securitycenter.v2.SecurityMarks.Builder,
com.google.cloud.securitycenter.v2.SecurityMarksOrBuilder>
getSecurityMarksFieldBuilder() {
if (securityMarksBuilder_ == null) {
securityMarksBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.securitycenter.v2.SecurityMarks,
com.google.cloud.securitycenter.v2.SecurityMarks.Builder,
com.google.cloud.securitycenter.v2.SecurityMarksOrBuilder>(
getSecurityMarks(), getParentForChildren(), isClean());
securityMarks_ = null;
}
return securityMarksBuilder_;
}
private com.google.protobuf.FieldMask updateMask_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.FieldMask,
com.google.protobuf.FieldMask.Builder,
com.google.protobuf.FieldMaskOrBuilder>
updateMaskBuilder_;
/**
*
*
* <pre>
* The FieldMask to use when updating the security marks resource.
*
* The field mask must not contain duplicate fields.
* If empty or set to "marks", all marks will be replaced. Individual
* marks can be updated using "marks.<mark_key>".
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2;</code>
*
* @return Whether the updateMask field is set.
*/
public boolean hasUpdateMask() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
*
*
* <pre>
* The FieldMask to use when updating the security marks resource.
*
* The field mask must not contain duplicate fields.
* If empty or set to "marks", all marks will be replaced. Individual
* marks can be updated using "marks.<mark_key>".
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2;</code>
*
* @return The updateMask.
*/
public com.google.protobuf.FieldMask getUpdateMask() {
if (updateMaskBuilder_ == null) {
return updateMask_ == null
? com.google.protobuf.FieldMask.getDefaultInstance()
: updateMask_;
} else {
return updateMaskBuilder_.getMessage();
}
}
/**
*
*
* <pre>
* The FieldMask to use when updating the security marks resource.
*
* The field mask must not contain duplicate fields.
* If empty or set to "marks", all marks will be replaced. Individual
* marks can be updated using "marks.<mark_key>".
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2;</code>
*/
public Builder setUpdateMask(com.google.protobuf.FieldMask value) {
if (updateMaskBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
updateMask_ = value;
} else {
updateMaskBuilder_.setMessage(value);
}
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* The FieldMask to use when updating the security marks resource.
*
* The field mask must not contain duplicate fields.
* If empty or set to "marks", all marks will be replaced. Individual
* marks can be updated using "marks.<mark_key>".
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2;</code>
*/
public Builder setUpdateMask(com.google.protobuf.FieldMask.Builder builderForValue) {
if (updateMaskBuilder_ == null) {
updateMask_ = builderForValue.build();
} else {
updateMaskBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* The FieldMask to use when updating the security marks resource.
*
* The field mask must not contain duplicate fields.
* If empty or set to "marks", all marks will be replaced. Individual
* marks can be updated using "marks.<mark_key>".
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2;</code>
*/
public Builder mergeUpdateMask(com.google.protobuf.FieldMask value) {
if (updateMaskBuilder_ == null) {
if (((bitField0_ & 0x00000002) != 0)
&& updateMask_ != null
&& updateMask_ != com.google.protobuf.FieldMask.getDefaultInstance()) {
getUpdateMaskBuilder().mergeFrom(value);
} else {
updateMask_ = value;
}
} else {
updateMaskBuilder_.mergeFrom(value);
}
if (updateMask_ != null) {
bitField0_ |= 0x00000002;
onChanged();
}
return this;
}
/**
*
*
* <pre>
* The FieldMask to use when updating the security marks resource.
*
* The field mask must not contain duplicate fields.
* If empty or set to "marks", all marks will be replaced. Individual
* marks can be updated using "marks.<mark_key>".
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2;</code>
*/
public Builder clearUpdateMask() {
bitField0_ = (bitField0_ & ~0x00000002);
updateMask_ = null;
if (updateMaskBuilder_ != null) {
updateMaskBuilder_.dispose();
updateMaskBuilder_ = null;
}
onChanged();
return this;
}
/**
*
*
* <pre>
* The FieldMask to use when updating the security marks resource.
*
* The field mask must not contain duplicate fields.
* If empty or set to "marks", all marks will be replaced. Individual
* marks can be updated using "marks.<mark_key>".
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2;</code>
*/
public com.google.protobuf.FieldMask.Builder getUpdateMaskBuilder() {
bitField0_ |= 0x00000002;
onChanged();
return getUpdateMaskFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* The FieldMask to use when updating the security marks resource.
*
* The field mask must not contain duplicate fields.
* If empty or set to "marks", all marks will be replaced. Individual
* marks can be updated using "marks.<mark_key>".
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2;</code>
*/
public com.google.protobuf.FieldMaskOrBuilder getUpdateMaskOrBuilder() {
if (updateMaskBuilder_ != null) {
return updateMaskBuilder_.getMessageOrBuilder();
} else {
return updateMask_ == null
? com.google.protobuf.FieldMask.getDefaultInstance()
: updateMask_;
}
}
/**
*
*
* <pre>
* The FieldMask to use when updating the security marks resource.
*
* The field mask must not contain duplicate fields.
* If empty or set to "marks", all marks will be replaced. Individual
* marks can be updated using "marks.<mark_key>".
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2;</code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.FieldMask,
com.google.protobuf.FieldMask.Builder,
com.google.protobuf.FieldMaskOrBuilder>
getUpdateMaskFieldBuilder() {
if (updateMaskBuilder_ == null) {
updateMaskBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.FieldMask,
com.google.protobuf.FieldMask.Builder,
com.google.protobuf.FieldMaskOrBuilder>(
getUpdateMask(), getParentForChildren(), isClean());
updateMask_ = null;
}
return updateMaskBuilder_;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.securitycenter.v2.UpdateSecurityMarksRequest)
}
// @@protoc_insertion_point(class_scope:google.cloud.securitycenter.v2.UpdateSecurityMarksRequest)
private static final com.google.cloud.securitycenter.v2.UpdateSecurityMarksRequest
DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.securitycenter.v2.UpdateSecurityMarksRequest();
}
public static com.google.cloud.securitycenter.v2.UpdateSecurityMarksRequest getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<UpdateSecurityMarksRequest> PARSER =
new com.google.protobuf.AbstractParser<UpdateSecurityMarksRequest>() {
@java.lang.Override
public UpdateSecurityMarksRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<UpdateSecurityMarksRequest> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<UpdateSecurityMarksRequest> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.securitycenter.v2.UpdateSecurityMarksRequest getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
apache/hadoop | 36,428 | hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/BlockSender.java | /**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hdfs.server.datanode;
import java.io.BufferedInputStream;
import java.io.DataInputStream;
import java.io.DataOutputStream;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.net.SocketException;
import java.net.SocketTimeoutException;
import java.nio.ByteBuffer;
import java.nio.channels.FileChannel;
import java.util.Arrays;
import java.util.concurrent.TimeUnit;
import org.apache.hadoop.fs.ChecksumException;
import org.apache.hadoop.fs.FsTracer;
import org.apache.hadoop.hdfs.DFSUtilClient;
import org.apache.hadoop.hdfs.HdfsConfiguration;
import org.apache.hadoop.hdfs.protocol.ExtendedBlock;
import org.apache.hadoop.hdfs.protocol.datatransfer.PacketHeader;
import org.apache.hadoop.hdfs.server.common.HdfsServerConstants.ReplicaState;
import org.apache.hadoop.hdfs.server.common.DataNodeLockManager.LockLevel;
import org.apache.hadoop.hdfs.server.datanode.fsdataset.FsVolumeReference;
import org.apache.hadoop.hdfs.server.datanode.fsdataset.FsVolumeSpi;
import org.apache.hadoop.hdfs.server.datanode.fsdataset.LengthInputStream;
import org.apache.hadoop.hdfs.server.datanode.fsdataset.ReplicaInputStreams;
import org.apache.hadoop.hdfs.util.DataTransferThrottler;
import org.apache.hadoop.io.IOUtils;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.ReadaheadPool.ReadaheadRequest;
import org.apache.hadoop.net.SocketOutputStream;
import org.apache.hadoop.util.AutoCloseableLock;
import org.apache.hadoop.util.DataChecksum;
import org.apache.hadoop.tracing.TraceScope;
import static org.apache.hadoop.io.nativeio.NativeIO.POSIX.POSIX_FADV_DONTNEED;
import static org.apache.hadoop.io.nativeio.NativeIO.POSIX.POSIX_FADV_SEQUENTIAL;
import org.apache.hadoop.classification.VisibleForTesting;
import org.apache.hadoop.util.Preconditions;
import org.slf4j.Logger;
/**
* Reads a block from the disk and sends it to a recipient.
*
* Data sent from the BlockeSender in the following format:
* <br><b>Data format:</b> <pre>
* +--------------------------------------------------+
* | ChecksumHeader | Sequence of data PACKETS... |
* +--------------------------------------------------+
* </pre>
* <b>ChecksumHeader format:</b> <pre>
* +--------------------------------------------------+
* | 1 byte CHECKSUM_TYPE | 4 byte BYTES_PER_CHECKSUM |
* +--------------------------------------------------+
* </pre>
* An empty packet is sent to mark the end of block and read completion.
*
* PACKET Contains a packet header, checksum and data. Amount of data
* carried is set by BUFFER_SIZE.
* <pre>
* +-----------------------------------------------------+
* | Variable length header. See {@link PacketHeader} |
* +-----------------------------------------------------+
* | x byte checksum data. x is defined below |
* +-----------------------------------------------------+
* | actual data ...... |
* +-----------------------------------------------------+
*
* Data is made of Chunks. Each chunk is of length <= BYTES_PER_CHECKSUM.
* A checksum is calculated for each chunk.
*
* x = (length of data + BYTE_PER_CHECKSUM - 1)/BYTES_PER_CHECKSUM *
* CHECKSUM_SIZE
*
* CHECKSUM_SIZE depends on CHECKSUM_TYPE (usually, 4 for CRC32)
* </pre>
*
* The client reads data until it receives a packet with
* "LastPacketInBlock" set to true or with a zero length. If there is
* no checksum error, it replies to DataNode with OP_STATUS_CHECKSUM_OK.
*/
class BlockSender implements java.io.Closeable {
static final Logger LOG = DataNode.LOG;
static final Logger CLIENT_TRACE_LOG = DataNode.CLIENT_TRACE_LOG;
private static final boolean is32Bit =
System.getProperty("sun.arch.data.model").equals("32");
/**
* Minimum buffer used while sending data to clients. Used only if
* transferTo() is enabled. 64KB is not that large. It could be larger, but
* not sure if there will be much more improvement.
*/
private static final int MIN_BUFFER_WITH_TRANSFERTO = 64*1024;
private static final int IO_FILE_BUFFER_SIZE;
static {
HdfsConfiguration conf = new HdfsConfiguration();
IO_FILE_BUFFER_SIZE = DFSUtilClient.getIoFileBufferSize(conf);
}
private static final int TRANSFERTO_BUFFER_SIZE = Math.max(
IO_FILE_BUFFER_SIZE, MIN_BUFFER_WITH_TRANSFERTO);
/** the block to read from */
private final ExtendedBlock block;
/** InputStreams and file descriptors to read block/checksum. */
private ReplicaInputStreams ris;
/** updated while using transferTo() */
private long blockInPosition = -1;
/** Checksum utility */
private final DataChecksum checksum;
/** Initial position to read */
private long initialOffset;
/** Current position of read */
private long offset;
/** Position of last byte to read from block file */
private final long endOffset;
/** Number of bytes in chunk used for computing checksum */
private final int chunkSize;
/** Number bytes of checksum computed for a chunk */
private final int checksumSize;
/** If true, failure to read checksum is ignored */
private final boolean corruptChecksumOk;
/** Sequence number of packet being sent */
private long seqno;
/** Set to true if transferTo is allowed for sending data to the client */
private final boolean transferToAllowed;
/** Set to true once entire requested byte range has been sent to the client */
private boolean sentEntireByteRange;
/** When true, verify checksum while reading from checksum file */
private final boolean verifyChecksum;
/** Format used to print client trace log messages */
private final String clientTraceFmt;
private volatile ChunkChecksum lastChunkChecksum = null;
private DataNode datanode;
/** The replica of the block that is being read. */
private final Replica replica;
// Cache-management related fields
private final long readaheadLength;
private ReadaheadRequest curReadahead;
private final boolean alwaysReadahead;
private final boolean dropCacheBehindLargeReads;
private final boolean dropCacheBehindAllReads;
private long lastCacheDropOffset;
private final FileIoProvider fileIoProvider;
@VisibleForTesting
static long CACHE_DROP_INTERVAL_BYTES = 1024 * 1024; // 1MB
/**
* See {{@link BlockSender#isLongRead()}
*/
private static final long LONG_READ_THRESHOLD_BYTES = 256 * 1024;
// The number of bytes per checksum here determines the alignment
// of reads: we always start reading at a checksum chunk boundary,
// even if the checksum type is NULL. So, choosing too big of a value
// would risk sending too much unnecessary data. 512 (1 disk sector)
// is likely to result in minimal extra IO.
private static final long CHUNK_SIZE = 512;
private static final String EIO_ERROR = "Input/output error";
/**
* Constructor
*
* @param block Block that is being read
* @param startOffset starting offset to read from
* @param length length of data to read
* @param corruptChecksumOk if true, corrupt checksum is okay
* @param verifyChecksum verify checksum while reading the data
* @param sendChecksum send checksum to client.
* @param datanode datanode from which the block is being read
* @param clientTraceFmt format string used to print client trace logs
* @throws IOException
*/
BlockSender(ExtendedBlock block, long startOffset, long length,
boolean corruptChecksumOk, boolean verifyChecksum,
boolean sendChecksum, DataNode datanode, String clientTraceFmt,
CachingStrategy cachingStrategy)
throws IOException {
InputStream blockIn = null;
DataInputStream checksumIn = null;
FsVolumeReference volumeRef = null;
this.fileIoProvider = datanode.getFileIoProvider();
try {
this.block = block;
this.corruptChecksumOk = corruptChecksumOk;
this.verifyChecksum = verifyChecksum;
this.clientTraceFmt = clientTraceFmt;
/*
* If the client asked for the cache to be dropped behind all reads,
* we honor that. Otherwise, we use the DataNode defaults.
* When using DataNode defaults, we use a heuristic where we only
* drop the cache for large reads.
*/
if (cachingStrategy.getDropBehind() == null) {
this.dropCacheBehindAllReads = false;
this.dropCacheBehindLargeReads =
datanode.getDnConf().dropCacheBehindReads;
} else {
this.dropCacheBehindAllReads =
this.dropCacheBehindLargeReads =
cachingStrategy.getDropBehind().booleanValue();
}
/*
* Similarly, if readahead was explicitly requested, we always do it.
* Otherwise, we read ahead based on the DataNode settings, and only
* when the reads are large.
*/
if (cachingStrategy.getReadahead() == null) {
this.alwaysReadahead = false;
this.readaheadLength = datanode.getDnConf().readaheadLength;
} else {
this.alwaysReadahead = true;
this.readaheadLength = cachingStrategy.getReadahead().longValue();
}
this.datanode = datanode;
if (verifyChecksum) {
// To simplify implementation, callers may not specify verification
// without sending.
Preconditions.checkArgument(sendChecksum,
"If verifying checksum, currently must also send it.");
}
// if there is a append write happening right after the BlockSender
// is constructed, the last partial checksum maybe overwritten by the
// append, the BlockSender need to use the partial checksum before
// the append write.
ChunkChecksum chunkChecksum = null;
final long replicaVisibleLength;
try (AutoCloseableLock lock = datanode.getDataSetLockManager().readLock(
LockLevel.BLOCK_POOl, block.getBlockPoolId())) {
replica = getReplica(block, datanode);
replicaVisibleLength = replica.getVisibleLength();
}
if (replica.getState() == ReplicaState.RBW) {
final ReplicaInPipeline rbw = (ReplicaInPipeline) replica;
rbw.waitForMinLength(startOffset + length, 3, TimeUnit.SECONDS);
chunkChecksum = rbw.getLastChecksumAndDataLen();
}
if (replica instanceof FinalizedReplica) {
chunkChecksum = getPartialChunkChecksumForFinalized(
(FinalizedReplica)replica);
}
if (replica.getGenerationStamp() < block.getGenerationStamp()) {
throw new IOException("Replica gen stamp < block genstamp, block="
+ block + ", replica=" + replica);
} else if (replica.getGenerationStamp() > block.getGenerationStamp()) {
if (DataNode.LOG.isDebugEnabled()) {
DataNode.LOG.debug("Bumping up the client provided"
+ " block's genstamp to latest " + replica.getGenerationStamp()
+ " for block " + block);
}
block.setGenerationStamp(replica.getGenerationStamp());
}
if (replicaVisibleLength < 0) {
throw new IOException("Replica is not readable, block="
+ block + ", replica=" + replica);
}
if (DataNode.LOG.isDebugEnabled()) {
DataNode.LOG.debug("block=" + block + ", replica=" + replica);
}
// transferToFully() fails on 32 bit platforms for block sizes >= 2GB,
// use normal transfer in those cases
this.transferToAllowed = datanode.getDnConf().transferToAllowed &&
(!is32Bit || length <= Integer.MAX_VALUE);
// Obtain a reference before reading data
FsVolumeSpi volume = datanode.data.getVolume(block);
if (volume == null) {
LOG.warn("Cannot find FsVolumeSpi to obtain a reference for block: {}", block);
throw new ReplicaNotFoundException(block);
}
volumeRef = volume.obtainReference();
/*
* (corruptChecksumOK, meta_file_exist): operation
* True, True: will verify checksum
* True, False: No verify, e.g., need to read data from a corrupted file
* False, True: will verify checksum
* False, False: throws IOException file not found
*/
DataChecksum csum = null;
if (verifyChecksum || sendChecksum) {
LengthInputStream metaIn = null;
boolean keepMetaInOpen = false;
try {
DataNodeFaultInjector.get().throwTooManyOpenFiles();
metaIn = datanode.data.getMetaDataInputStream(block);
if (!corruptChecksumOk || metaIn != null) {
if (metaIn == null) {
//need checksum but meta-data not found
throw new FileNotFoundException("Meta-data not found for " +
block);
}
// The meta file will contain only the header if the NULL checksum
// type was used, or if the replica was written to transient storage.
// Also, when only header portion of a data packet was transferred
// and then pipeline breaks, the meta file can contain only the
// header and 0 byte in the block data file.
// Checksum verification is not performed for replicas on transient
// storage. The header is important for determining the checksum
// type later when lazy persistence copies the block to non-transient
// storage and computes the checksum.
int expectedHeaderSize = BlockMetadataHeader.getHeaderSize();
if (!replica.isOnTransientStorage() &&
metaIn.getLength() >= expectedHeaderSize) {
checksumIn = new DataInputStream(new BufferedInputStream(
metaIn, IO_FILE_BUFFER_SIZE));
csum = BlockMetadataHeader.readDataChecksum(checksumIn, block);
keepMetaInOpen = true;
} else if (!replica.isOnTransientStorage() &&
metaIn.getLength() < expectedHeaderSize) {
LOG.warn("The meta file length {} is less than the expected " +
"header length {}, indicating the meta file is corrupt",
metaIn.getLength(), expectedHeaderSize);
throw new CorruptMetaHeaderException("The meta file length "+
metaIn.getLength()+" is less than the expected length "+
expectedHeaderSize);
}
} else {
LOG.warn("Could not find metadata file for " + block);
}
} catch (FileNotFoundException e) {
if ((e.getMessage() != null) && !(e.getMessage()
.contains("Too many open files"))) {
datanode.data.invalidateMissingBlock(block.getBlockPoolId(),
block.getLocalBlock());
}
throw e;
} finally {
if (!keepMetaInOpen) {
IOUtils.closeStream(metaIn);
}
}
}
if (csum == null) {
csum = DataChecksum.newDataChecksum(DataChecksum.Type.NULL,
(int)CHUNK_SIZE);
}
/*
* If chunkSize is very large, then the metadata file is mostly
* corrupted. For now just truncate bytesPerchecksum to blockLength.
*/
int size = csum.getBytesPerChecksum();
if (size > 10*1024*1024 && size > replicaVisibleLength) {
csum = DataChecksum.newDataChecksum(csum.getChecksumType(),
Math.max((int)replicaVisibleLength, 10*1024*1024));
size = csum.getBytesPerChecksum();
}
chunkSize = size;
checksum = csum;
checksumSize = checksum.getChecksumSize();
length = length < 0 ? replicaVisibleLength : length;
// end is either last byte on disk or the length for which we have a
// checksum
long end = chunkChecksum != null ? chunkChecksum.getDataLength()
: replica.getBytesOnDisk();
if (startOffset < 0 || startOffset > end
|| (length + startOffset) > end) {
String msg = " Offset " + startOffset + " and length " + length
+ " don't match block " + block + " ( blockLen " + end + " )";
LOG.warn(datanode.getDNRegistrationForBP(block.getBlockPoolId()) +
":sendBlock() : " + msg);
throw new IOException(msg);
}
// Ensure read offset is position at the beginning of chunk
offset = startOffset - (startOffset % chunkSize);
if (length >= 0) {
// Ensure endOffset points to end of chunk.
long tmpLen = startOffset + length;
if (tmpLen % chunkSize != 0) {
tmpLen += (chunkSize - tmpLen % chunkSize);
}
if (tmpLen < end) {
// will use on-disk checksum here since the end is a stable chunk
end = tmpLen;
} else if (chunkChecksum != null) {
// last chunk is changing. flag that we need to use in-memory checksum
this.lastChunkChecksum = chunkChecksum;
}
}
endOffset = end;
// seek to the right offsets
if (offset > 0 && checksumIn != null) {
long checksumSkip = (offset / chunkSize) * checksumSize;
// note blockInStream is seeked when created below
if (checksumSkip > 0) {
// Should we use seek() for checksum file as well?
IOUtils.skipFully(checksumIn, checksumSkip);
}
}
seqno = 0;
if (DataNode.LOG.isDebugEnabled()) {
DataNode.LOG.debug("replica=" + replica);
}
blockIn = datanode.data.getBlockInputStream(block, offset); // seek to offset
ris = new ReplicaInputStreams(
blockIn, checksumIn, volumeRef, fileIoProvider);
} catch (Throwable t) {
IOUtils.cleanupWithLogger(null, volumeRef);
IOUtils.closeStream(this);
IOUtils.closeStream(blockIn);
IOUtils.closeStream(checksumIn);
throw t;
}
}
private ChunkChecksum getPartialChunkChecksumForFinalized(
FinalizedReplica finalized) throws IOException {
// There are a number of places in the code base where a finalized replica
// object is created. If last partial checksum is loaded whenever a
// finalized replica is created, it would increase latency in DataNode
// initialization. Therefore, the last partial chunk checksum is loaded
// lazily.
// Load last checksum in case the replica is being written concurrently
final long replicaVisibleLength = replica.getVisibleLength();
if (replicaVisibleLength % CHUNK_SIZE != 0 &&
finalized.getLastPartialChunkChecksum() == null) {
// the finalized replica does not have precomputed last partial
// chunk checksum. Recompute now.
try {
finalized.loadLastPartialChunkChecksum();
return new ChunkChecksum(finalized.getVisibleLength(),
finalized.getLastPartialChunkChecksum());
} catch (FileNotFoundException e) {
// meta file is lost. Continue anyway to preserve existing behavior.
DataNode.LOG.warn(
"meta file " + finalized.getMetaFile() + " is missing!");
return null;
}
} else {
// If the checksum is null, BlockSender will use on-disk checksum.
return new ChunkChecksum(finalized.getVisibleLength(),
finalized.getLastPartialChunkChecksum());
}
}
/**
* close opened files.
*/
@Override
public void close() throws IOException {
if (ris.getDataInFd() != null &&
((dropCacheBehindAllReads) ||
(dropCacheBehindLargeReads && isLongRead()))) {
try {
ris.dropCacheBehindReads(block.getBlockName(), lastCacheDropOffset,
offset - lastCacheDropOffset, POSIX_FADV_DONTNEED);
} catch (Exception e) {
LOG.warn("Unable to drop cache on file close", e);
}
}
if (curReadahead != null) {
curReadahead.cancel();
}
try {
ris.closeStreams();
} finally {
IOUtils.closeStream(ris);
ris = null;
}
}
private static Replica getReplica(ExtendedBlock block, DataNode datanode)
throws ReplicaNotFoundException {
Replica replica = datanode.data.getReplica(block.getBlockPoolId(),
block.getBlockId());
if (replica == null) {
throw new ReplicaNotFoundException(block);
}
return replica;
}
/**
* Converts an IOExcpetion (not subclasses) to SocketException.
* This is typically done to indicate to upper layers that the error
* was a socket error rather than often more serious exceptions like
* disk errors.
*/
private static IOException ioeToSocketException(IOException ioe) {
if (ioe.getClass().equals(IOException.class)) {
// "se" could be a new class in stead of SocketException.
IOException se = new SocketException("Original Exception : " + ioe);
se.initCause(ioe);
/* Change the stacktrace so that original trace is not truncated
* when printed.*/
se.setStackTrace(ioe.getStackTrace());
return se;
}
// otherwise just return the same exception.
return ioe;
}
/**
* @param datalen Length of data
* @return number of chunks for data of given size
*/
private int numberOfChunks(long datalen) {
return (int) ((datalen + chunkSize - 1)/chunkSize);
}
/**
* Sends a packet with up to maxChunks chunks of data.
*
* @param pkt buffer used for writing packet data
* @param maxChunks maximum number of chunks to send
* @param out stream to send data to
* @param transferTo use transferTo to send data
* @param throttler used for throttling data transfer bandwidth
*/
private int sendPacket(ByteBuffer pkt, int maxChunks, OutputStream out,
boolean transferTo, DataTransferThrottler throttler) throws IOException {
int dataLen = (int) Math.min(endOffset - offset,
(chunkSize * (long) maxChunks));
int numChunks = numberOfChunks(dataLen); // Number of chunks be sent in the packet
int checksumDataLen = numChunks * checksumSize;
int packetLen = dataLen + checksumDataLen + 4;
boolean lastDataPacket = offset + dataLen == endOffset && dataLen > 0;
// The packet buffer is organized as follows:
// _______HHHHCCCCD?D?D?D?
// ^ ^
// | \ checksumOff
// \ headerOff
// _ padding, since the header is variable-length
// H = header and length prefixes
// C = checksums
// D? = data, if transferTo is false.
int headerLen = writePacketHeader(pkt, dataLen, packetLen);
// Per above, the header doesn't start at the beginning of the
// buffer
int headerOff = pkt.position() - headerLen;
int checksumOff = pkt.position();
byte[] buf = pkt.array();
if (checksumSize > 0 && ris.getChecksumIn() != null) {
readChecksum(buf, checksumOff, checksumDataLen);
// write in progress that we need to use to get last checksum
if (lastDataPacket && lastChunkChecksum != null) {
int start = checksumOff + checksumDataLen - checksumSize;
byte[] updatedChecksum = lastChunkChecksum.getChecksum();
if (updatedChecksum != null) {
System.arraycopy(updatedChecksum, 0, buf, start, checksumSize);
}
}
}
int dataOff = checksumOff + checksumDataLen;
if (!transferTo) { // normal transfer
try {
ris.readDataFully(buf, dataOff, dataLen);
} catch (IOException ioe) {
if (ioe.getMessage().startsWith(EIO_ERROR)) {
throw new DiskFileCorruptException("A disk IO error occurred", ioe);
}
throw ioe;
}
if (verifyChecksum) {
verifyChecksum(buf, dataOff, dataLen, numChunks, checksumOff);
}
}
try {
if (transferTo) {
SocketOutputStream sockOut = (SocketOutputStream)out;
// First write header and checksums
sockOut.write(buf, headerOff, dataOff - headerOff);
// no need to flush since we know out is not a buffered stream
FileChannel fileCh = ((FileInputStream)ris.getDataIn()).getChannel();
LongWritable waitTime = new LongWritable();
LongWritable transferTime = new LongWritable();
fileIoProvider.transferToSocketFully(
ris.getVolumeRef().getVolume(), sockOut, fileCh, blockInPosition,
dataLen, waitTime, transferTime);
datanode.metrics.addSendDataPacketBlockedOnNetworkNanos(waitTime.get());
datanode.metrics.addSendDataPacketTransferNanos(transferTime.get());
blockInPosition += dataLen;
} else {
// normal transfer
out.write(buf, headerOff, dataOff + dataLen - headerOff);
}
} catch (IOException e) {
if (e instanceof SocketTimeoutException) {
/*
* writing to client timed out. This happens if the client reads
* part of a block and then decides not to read the rest (but leaves
* the socket open).
*
* Reporting of this case is done in DataXceiver#run
*/
LOG.warn("Sending packets timed out.", e);
} else {
/* Exception while writing to the client. Connection closure from
* the other end is mostly the case and we do not care much about
* it. But other things can go wrong, especially in transferTo(),
* which we do not want to ignore.
*
* The message parsing below should not be considered as a good
* coding example. NEVER do it to drive a program logic. NEVER.
* It was done here because the NIO throws an IOException for EPIPE.
*/
String ioem = e.getMessage();
if (ioem != null) {
/*
* If we got an EIO when reading files or transferTo the client
* socket, it's very likely caused by bad disk track or other file
* corruptions.
*/
if (ioem.startsWith(EIO_ERROR)) {
throw new DiskFileCorruptException("A disk IO error occurred", e);
}
String causeMessage = e.getCause() != null ? e.getCause().getMessage() : "";
causeMessage = causeMessage != null ? causeMessage : "";
if (!ioem.startsWith("Broken pipe")
&& !ioem.startsWith("Connection reset")
&& !causeMessage.startsWith("Broken pipe")
&& !causeMessage.startsWith("Connection reset")) {
LOG.error("BlockSender.sendChunks() exception: ", e);
datanode.getBlockScanner().markSuspectBlock(
ris.getVolumeRef().getVolume().getStorageID(), block);
}
}
}
throw ioeToSocketException(e);
}
if (throttler != null) { // rebalancing so throttle
throttler.throttle(packetLen);
}
return dataLen;
}
/**
* Read checksum into given buffer
* @param buf buffer to read the checksum into
* @param checksumOffset offset at which to write the checksum into buf
* @param checksumLen length of checksum to write
* @throws IOException on error
*/
private void readChecksum(byte[] buf, final int checksumOffset,
final int checksumLen) throws IOException {
if (checksumSize <= 0 && ris.getChecksumIn() == null) {
return;
}
try {
ris.readChecksumFully(buf, checksumOffset, checksumLen);
} catch (IOException e) {
LOG.warn(" Could not read or failed to verify checksum for data"
+ " at offset " + offset + " for block " + block, e);
ris.closeChecksumStream();
if (corruptChecksumOk) {
if (checksumLen > 0) {
// Just fill the array with zeros.
Arrays.fill(buf, checksumOffset, checksumOffset + checksumLen,
(byte) 0);
}
} else {
throw e;
}
}
}
/**
* Compute checksum for chunks and verify the checksum that is read from
* the metadata file is correct.
*
* @param buf buffer that has checksum and data
* @param dataOffset position where data is written in the buf
* @param datalen length of data
* @param numChunks number of chunks corresponding to data
* @param checksumOffset offset where checksum is written in the buf
* @throws ChecksumException on failed checksum verification
*/
public void verifyChecksum(final byte[] buf, final int dataOffset,
final int datalen, final int numChunks, final int checksumOffset)
throws ChecksumException {
int dOff = dataOffset;
int cOff = checksumOffset;
int dLeft = datalen;
for (int i = 0; i < numChunks; i++) {
checksum.reset();
int dLen = Math.min(dLeft, chunkSize);
checksum.update(buf, dOff, dLen);
if (!checksum.compare(buf, cOff)) {
long failedPos = offset + datalen - dLeft;
StringBuilder replicaInfoString = new StringBuilder();
if (replica != null) {
replicaInfoString.append(" for replica: " + replica.toString());
}
throw new ChecksumException("Checksum failed at " + failedPos
+ replicaInfoString, failedPos);
}
dLeft -= dLen;
dOff += dLen;
cOff += checksumSize;
}
}
/**
* sendBlock() is used to read block and its metadata and stream the data to
* either a client or to another datanode.
*
* @param out stream to which the block is written to
* @param baseStream optional. if non-null, <code>out</code> is assumed to
* be a wrapper over this stream. This enables optimizations for
* sending the data, e.g.
* {@link SocketOutputStream#transferToFully(FileChannel,
* long, int)}.
* @param throttler for sending data.
* @return total bytes read, including checksum data.
*/
long sendBlock(DataOutputStream out, OutputStream baseStream,
DataTransferThrottler throttler) throws IOException {
final TraceScope scope = FsTracer.get(null)
.newScope("sendBlock_" + block.getBlockId());
try {
return doSendBlock(out, baseStream, throttler);
} finally {
scope.close();
}
}
private long doSendBlock(DataOutputStream out, OutputStream baseStream,
DataTransferThrottler throttler) throws IOException {
if (out == null) {
throw new IOException( "out stream is null" );
}
initialOffset = offset;
long totalRead = 0;
OutputStream streamForSendChunks = out;
lastCacheDropOffset = initialOffset;
if (isLongRead() && ris.getDataInFd() != null) {
// Advise that this file descriptor will be accessed sequentially.
ris.dropCacheBehindReads(block.getBlockName(), 0, 0,
POSIX_FADV_SEQUENTIAL);
}
// Trigger readahead of beginning of file if configured.
manageOsCache();
final long startTime = CLIENT_TRACE_LOG.isDebugEnabled() ? System.nanoTime() : 0;
try {
int maxChunksPerPacket;
int pktBufSize = PacketHeader.PKT_MAX_HEADER_LEN;
boolean transferTo = transferToAllowed && !verifyChecksum
&& baseStream instanceof SocketOutputStream
&& ris.getDataIn() instanceof FileInputStream;
if (transferTo) {
FileChannel fileChannel =
((FileInputStream)ris.getDataIn()).getChannel();
blockInPosition = fileChannel.position();
streamForSendChunks = baseStream;
maxChunksPerPacket = numberOfChunks(TRANSFERTO_BUFFER_SIZE);
// Smaller packet size to only hold checksum when doing transferTo
pktBufSize += checksumSize * maxChunksPerPacket;
} else {
maxChunksPerPacket = Math.max(1,
numberOfChunks(IO_FILE_BUFFER_SIZE));
// Packet size includes both checksum and data
pktBufSize += (chunkSize + checksumSize) * maxChunksPerPacket;
}
ByteBuffer pktBuf = ByteBuffer.allocate(pktBufSize);
while (endOffset > offset && !Thread.currentThread().isInterrupted()) {
manageOsCache();
long len = sendPacket(pktBuf, maxChunksPerPacket, streamForSendChunks,
transferTo, throttler);
offset += len;
totalRead += len + (numberOfChunks(len) * checksumSize);
seqno++;
}
// If this thread was interrupted, then it did not send the full block.
if (!Thread.currentThread().isInterrupted()) {
try {
// send an empty packet to mark the end of the block
sendPacket(pktBuf, maxChunksPerPacket, streamForSendChunks, transferTo,
throttler);
out.flush();
} catch (IOException e) { //socket error
throw ioeToSocketException(e);
}
sentEntireByteRange = true;
}
} finally {
if ((clientTraceFmt != null) && CLIENT_TRACE_LOG.isDebugEnabled()) {
final long endTime = System.nanoTime();
CLIENT_TRACE_LOG.debug(String.format(clientTraceFmt, totalRead,
initialOffset, endTime - startTime));
}
close();
}
return totalRead;
}
/**
* Manage the OS buffer cache by performing read-ahead
* and drop-behind.
*/
private void manageOsCache() throws IOException {
// We can't manage the cache for this block if we don't have a file
// descriptor to work with.
if (ris.getDataInFd() == null) {
return;
}
// Perform readahead if necessary
if ((readaheadLength > 0) && (datanode.readaheadPool != null) &&
(alwaysReadahead || isLongRead())) {
curReadahead = datanode.readaheadPool.readaheadStream(
clientTraceFmt, ris.getDataInFd(), offset, readaheadLength,
Long.MAX_VALUE, curReadahead);
}
// Drop what we've just read from cache, since we aren't
// likely to need it again
if (dropCacheBehindAllReads ||
(dropCacheBehindLargeReads && isLongRead())) {
long nextCacheDropOffset = lastCacheDropOffset + CACHE_DROP_INTERVAL_BYTES;
if (offset >= nextCacheDropOffset) {
long dropLength = offset - lastCacheDropOffset;
ris.dropCacheBehindReads(block.getBlockName(), lastCacheDropOffset,
dropLength, POSIX_FADV_DONTNEED);
lastCacheDropOffset = offset;
}
}
}
/**
* Returns true if we have done a long enough read for this block to qualify
* for the DataNode-wide cache management defaults. We avoid applying the
* cache management defaults to smaller reads because the overhead would be
* too high.
*
* Note that if the client explicitly asked for dropBehind, we will do it
* even on short reads.
*
* This is also used to determine when to invoke
* posix_fadvise(POSIX_FADV_SEQUENTIAL).
*/
private boolean isLongRead() {
return (endOffset - initialOffset) > LONG_READ_THRESHOLD_BYTES;
}
/**
* Write packet header into {@code pkt},
* return the length of the header written.
*/
private int writePacketHeader(ByteBuffer pkt, int dataLen, int packetLen) {
pkt.clear();
// both syncBlock and syncPacket are false
PacketHeader header = new PacketHeader(packetLen, offset, seqno,
(dataLen == 0), dataLen, false);
int size = header.getSerializedSize();
pkt.position(PacketHeader.PKT_MAX_HEADER_LEN - size);
header.putInBuffer(pkt);
return size;
}
boolean didSendEntireByteRange() {
return sentEntireByteRange;
}
/**
* @return the checksum type that will be used with this block transfer.
*/
DataChecksum getChecksum() {
return checksum;
}
/**
* @return the offset into the block file where the sender is currently
* reading.
*/
long getOffset() {
return offset;
}
}
|
googleapis/google-cloud-java | 36,234 | java-aiplatform/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/CheckTrialEarlyStoppingStateMetatdata.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/aiplatform/v1beta1/vizier_service.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.aiplatform.v1beta1;
/**
*
*
* <pre>
* This message will be placed in the metadata field of a
* google.longrunning.Operation associated with a CheckTrialEarlyStoppingState
* request.
* </pre>
*
* Protobuf type {@code google.cloud.aiplatform.v1beta1.CheckTrialEarlyStoppingStateMetatdata}
*/
public final class CheckTrialEarlyStoppingStateMetatdata
extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.aiplatform.v1beta1.CheckTrialEarlyStoppingStateMetatdata)
CheckTrialEarlyStoppingStateMetatdataOrBuilder {
private static final long serialVersionUID = 0L;
// Use CheckTrialEarlyStoppingStateMetatdata.newBuilder() to construct.
private CheckTrialEarlyStoppingStateMetatdata(
com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private CheckTrialEarlyStoppingStateMetatdata() {
study_ = "";
trial_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new CheckTrialEarlyStoppingStateMetatdata();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.aiplatform.v1beta1.VizierServiceProto
.internal_static_google_cloud_aiplatform_v1beta1_CheckTrialEarlyStoppingStateMetatdata_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.aiplatform.v1beta1.VizierServiceProto
.internal_static_google_cloud_aiplatform_v1beta1_CheckTrialEarlyStoppingStateMetatdata_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.aiplatform.v1beta1.CheckTrialEarlyStoppingStateMetatdata.class,
com.google.cloud.aiplatform.v1beta1.CheckTrialEarlyStoppingStateMetatdata.Builder
.class);
}
private int bitField0_;
public static final int GENERIC_METADATA_FIELD_NUMBER = 1;
private com.google.cloud.aiplatform.v1beta1.GenericOperationMetadata genericMetadata_;
/**
*
*
* <pre>
* Operation metadata for suggesting Trials.
* </pre>
*
* <code>.google.cloud.aiplatform.v1beta1.GenericOperationMetadata generic_metadata = 1;</code>
*
* @return Whether the genericMetadata field is set.
*/
@java.lang.Override
public boolean hasGenericMetadata() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* Operation metadata for suggesting Trials.
* </pre>
*
* <code>.google.cloud.aiplatform.v1beta1.GenericOperationMetadata generic_metadata = 1;</code>
*
* @return The genericMetadata.
*/
@java.lang.Override
public com.google.cloud.aiplatform.v1beta1.GenericOperationMetadata getGenericMetadata() {
return genericMetadata_ == null
? com.google.cloud.aiplatform.v1beta1.GenericOperationMetadata.getDefaultInstance()
: genericMetadata_;
}
/**
*
*
* <pre>
* Operation metadata for suggesting Trials.
* </pre>
*
* <code>.google.cloud.aiplatform.v1beta1.GenericOperationMetadata generic_metadata = 1;</code>
*/
@java.lang.Override
public com.google.cloud.aiplatform.v1beta1.GenericOperationMetadataOrBuilder
getGenericMetadataOrBuilder() {
return genericMetadata_ == null
? com.google.cloud.aiplatform.v1beta1.GenericOperationMetadata.getDefaultInstance()
: genericMetadata_;
}
public static final int STUDY_FIELD_NUMBER = 2;
@SuppressWarnings("serial")
private volatile java.lang.Object study_ = "";
/**
*
*
* <pre>
* The name of the Study that the Trial belongs to.
* </pre>
*
* <code>string study = 2;</code>
*
* @return The study.
*/
@java.lang.Override
public java.lang.String getStudy() {
java.lang.Object ref = study_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
study_ = s;
return s;
}
}
/**
*
*
* <pre>
* The name of the Study that the Trial belongs to.
* </pre>
*
* <code>string study = 2;</code>
*
* @return The bytes for study.
*/
@java.lang.Override
public com.google.protobuf.ByteString getStudyBytes() {
java.lang.Object ref = study_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
study_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int TRIAL_FIELD_NUMBER = 3;
@SuppressWarnings("serial")
private volatile java.lang.Object trial_ = "";
/**
*
*
* <pre>
* The Trial name.
* </pre>
*
* <code>string trial = 3;</code>
*
* @return The trial.
*/
@java.lang.Override
public java.lang.String getTrial() {
java.lang.Object ref = trial_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
trial_ = s;
return s;
}
}
/**
*
*
* <pre>
* The Trial name.
* </pre>
*
* <code>string trial = 3;</code>
*
* @return The bytes for trial.
*/
@java.lang.Override
public com.google.protobuf.ByteString getTrialBytes() {
java.lang.Object ref = trial_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
trial_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (((bitField0_ & 0x00000001) != 0)) {
output.writeMessage(1, getGenericMetadata());
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(study_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 2, study_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(trial_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 3, trial_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) != 0)) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, getGenericMetadata());
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(study_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, study_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(trial_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(3, trial_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj
instanceof com.google.cloud.aiplatform.v1beta1.CheckTrialEarlyStoppingStateMetatdata)) {
return super.equals(obj);
}
com.google.cloud.aiplatform.v1beta1.CheckTrialEarlyStoppingStateMetatdata other =
(com.google.cloud.aiplatform.v1beta1.CheckTrialEarlyStoppingStateMetatdata) obj;
if (hasGenericMetadata() != other.hasGenericMetadata()) return false;
if (hasGenericMetadata()) {
if (!getGenericMetadata().equals(other.getGenericMetadata())) return false;
}
if (!getStudy().equals(other.getStudy())) return false;
if (!getTrial().equals(other.getTrial())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (hasGenericMetadata()) {
hash = (37 * hash) + GENERIC_METADATA_FIELD_NUMBER;
hash = (53 * hash) + getGenericMetadata().hashCode();
}
hash = (37 * hash) + STUDY_FIELD_NUMBER;
hash = (53 * hash) + getStudy().hashCode();
hash = (37 * hash) + TRIAL_FIELD_NUMBER;
hash = (53 * hash) + getTrial().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.aiplatform.v1beta1.CheckTrialEarlyStoppingStateMetatdata parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.aiplatform.v1beta1.CheckTrialEarlyStoppingStateMetatdata parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.aiplatform.v1beta1.CheckTrialEarlyStoppingStateMetatdata parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.aiplatform.v1beta1.CheckTrialEarlyStoppingStateMetatdata parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.aiplatform.v1beta1.CheckTrialEarlyStoppingStateMetatdata parseFrom(
byte[] data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.aiplatform.v1beta1.CheckTrialEarlyStoppingStateMetatdata parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.aiplatform.v1beta1.CheckTrialEarlyStoppingStateMetatdata parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.aiplatform.v1beta1.CheckTrialEarlyStoppingStateMetatdata parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.aiplatform.v1beta1.CheckTrialEarlyStoppingStateMetatdata
parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.aiplatform.v1beta1.CheckTrialEarlyStoppingStateMetatdata
parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.aiplatform.v1beta1.CheckTrialEarlyStoppingStateMetatdata parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.aiplatform.v1beta1.CheckTrialEarlyStoppingStateMetatdata parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.aiplatform.v1beta1.CheckTrialEarlyStoppingStateMetatdata prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* This message will be placed in the metadata field of a
* google.longrunning.Operation associated with a CheckTrialEarlyStoppingState
* request.
* </pre>
*
* Protobuf type {@code google.cloud.aiplatform.v1beta1.CheckTrialEarlyStoppingStateMetatdata}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.aiplatform.v1beta1.CheckTrialEarlyStoppingStateMetatdata)
com.google.cloud.aiplatform.v1beta1.CheckTrialEarlyStoppingStateMetatdataOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.aiplatform.v1beta1.VizierServiceProto
.internal_static_google_cloud_aiplatform_v1beta1_CheckTrialEarlyStoppingStateMetatdata_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.aiplatform.v1beta1.VizierServiceProto
.internal_static_google_cloud_aiplatform_v1beta1_CheckTrialEarlyStoppingStateMetatdata_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.aiplatform.v1beta1.CheckTrialEarlyStoppingStateMetatdata.class,
com.google.cloud.aiplatform.v1beta1.CheckTrialEarlyStoppingStateMetatdata.Builder
.class);
}
// Construct using
// com.google.cloud.aiplatform.v1beta1.CheckTrialEarlyStoppingStateMetatdata.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {
getGenericMetadataFieldBuilder();
}
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
genericMetadata_ = null;
if (genericMetadataBuilder_ != null) {
genericMetadataBuilder_.dispose();
genericMetadataBuilder_ = null;
}
study_ = "";
trial_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.aiplatform.v1beta1.VizierServiceProto
.internal_static_google_cloud_aiplatform_v1beta1_CheckTrialEarlyStoppingStateMetatdata_descriptor;
}
@java.lang.Override
public com.google.cloud.aiplatform.v1beta1.CheckTrialEarlyStoppingStateMetatdata
getDefaultInstanceForType() {
return com.google.cloud.aiplatform.v1beta1.CheckTrialEarlyStoppingStateMetatdata
.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.aiplatform.v1beta1.CheckTrialEarlyStoppingStateMetatdata build() {
com.google.cloud.aiplatform.v1beta1.CheckTrialEarlyStoppingStateMetatdata result =
buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.aiplatform.v1beta1.CheckTrialEarlyStoppingStateMetatdata
buildPartial() {
com.google.cloud.aiplatform.v1beta1.CheckTrialEarlyStoppingStateMetatdata result =
new com.google.cloud.aiplatform.v1beta1.CheckTrialEarlyStoppingStateMetatdata(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(
com.google.cloud.aiplatform.v1beta1.CheckTrialEarlyStoppingStateMetatdata result) {
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.genericMetadata_ =
genericMetadataBuilder_ == null ? genericMetadata_ : genericMetadataBuilder_.build();
to_bitField0_ |= 0x00000001;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.study_ = study_;
}
if (((from_bitField0_ & 0x00000004) != 0)) {
result.trial_ = trial_;
}
result.bitField0_ |= to_bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other
instanceof com.google.cloud.aiplatform.v1beta1.CheckTrialEarlyStoppingStateMetatdata) {
return mergeFrom(
(com.google.cloud.aiplatform.v1beta1.CheckTrialEarlyStoppingStateMetatdata) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(
com.google.cloud.aiplatform.v1beta1.CheckTrialEarlyStoppingStateMetatdata other) {
if (other
== com.google.cloud.aiplatform.v1beta1.CheckTrialEarlyStoppingStateMetatdata
.getDefaultInstance()) return this;
if (other.hasGenericMetadata()) {
mergeGenericMetadata(other.getGenericMetadata());
}
if (!other.getStudy().isEmpty()) {
study_ = other.study_;
bitField0_ |= 0x00000002;
onChanged();
}
if (!other.getTrial().isEmpty()) {
trial_ = other.trial_;
bitField0_ |= 0x00000004;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
input.readMessage(getGenericMetadataFieldBuilder().getBuilder(), extensionRegistry);
bitField0_ |= 0x00000001;
break;
} // case 10
case 18:
{
study_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000002;
break;
} // case 18
case 26:
{
trial_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000004;
break;
} // case 26
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private com.google.cloud.aiplatform.v1beta1.GenericOperationMetadata genericMetadata_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.aiplatform.v1beta1.GenericOperationMetadata,
com.google.cloud.aiplatform.v1beta1.GenericOperationMetadata.Builder,
com.google.cloud.aiplatform.v1beta1.GenericOperationMetadataOrBuilder>
genericMetadataBuilder_;
/**
*
*
* <pre>
* Operation metadata for suggesting Trials.
* </pre>
*
* <code>.google.cloud.aiplatform.v1beta1.GenericOperationMetadata generic_metadata = 1;</code>
*
* @return Whether the genericMetadata field is set.
*/
public boolean hasGenericMetadata() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* Operation metadata for suggesting Trials.
* </pre>
*
* <code>.google.cloud.aiplatform.v1beta1.GenericOperationMetadata generic_metadata = 1;</code>
*
* @return The genericMetadata.
*/
public com.google.cloud.aiplatform.v1beta1.GenericOperationMetadata getGenericMetadata() {
if (genericMetadataBuilder_ == null) {
return genericMetadata_ == null
? com.google.cloud.aiplatform.v1beta1.GenericOperationMetadata.getDefaultInstance()
: genericMetadata_;
} else {
return genericMetadataBuilder_.getMessage();
}
}
/**
*
*
* <pre>
* Operation metadata for suggesting Trials.
* </pre>
*
* <code>.google.cloud.aiplatform.v1beta1.GenericOperationMetadata generic_metadata = 1;</code>
*/
public Builder setGenericMetadata(
com.google.cloud.aiplatform.v1beta1.GenericOperationMetadata value) {
if (genericMetadataBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
genericMetadata_ = value;
} else {
genericMetadataBuilder_.setMessage(value);
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Operation metadata for suggesting Trials.
* </pre>
*
* <code>.google.cloud.aiplatform.v1beta1.GenericOperationMetadata generic_metadata = 1;</code>
*/
public Builder setGenericMetadata(
com.google.cloud.aiplatform.v1beta1.GenericOperationMetadata.Builder builderForValue) {
if (genericMetadataBuilder_ == null) {
genericMetadata_ = builderForValue.build();
} else {
genericMetadataBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Operation metadata for suggesting Trials.
* </pre>
*
* <code>.google.cloud.aiplatform.v1beta1.GenericOperationMetadata generic_metadata = 1;</code>
*/
public Builder mergeGenericMetadata(
com.google.cloud.aiplatform.v1beta1.GenericOperationMetadata value) {
if (genericMetadataBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0)
&& genericMetadata_ != null
&& genericMetadata_
!= com.google.cloud.aiplatform.v1beta1.GenericOperationMetadata
.getDefaultInstance()) {
getGenericMetadataBuilder().mergeFrom(value);
} else {
genericMetadata_ = value;
}
} else {
genericMetadataBuilder_.mergeFrom(value);
}
if (genericMetadata_ != null) {
bitField0_ |= 0x00000001;
onChanged();
}
return this;
}
/**
*
*
* <pre>
* Operation metadata for suggesting Trials.
* </pre>
*
* <code>.google.cloud.aiplatform.v1beta1.GenericOperationMetadata generic_metadata = 1;</code>
*/
public Builder clearGenericMetadata() {
bitField0_ = (bitField0_ & ~0x00000001);
genericMetadata_ = null;
if (genericMetadataBuilder_ != null) {
genericMetadataBuilder_.dispose();
genericMetadataBuilder_ = null;
}
onChanged();
return this;
}
/**
*
*
* <pre>
* Operation metadata for suggesting Trials.
* </pre>
*
* <code>.google.cloud.aiplatform.v1beta1.GenericOperationMetadata generic_metadata = 1;</code>
*/
public com.google.cloud.aiplatform.v1beta1.GenericOperationMetadata.Builder
getGenericMetadataBuilder() {
bitField0_ |= 0x00000001;
onChanged();
return getGenericMetadataFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* Operation metadata for suggesting Trials.
* </pre>
*
* <code>.google.cloud.aiplatform.v1beta1.GenericOperationMetadata generic_metadata = 1;</code>
*/
public com.google.cloud.aiplatform.v1beta1.GenericOperationMetadataOrBuilder
getGenericMetadataOrBuilder() {
if (genericMetadataBuilder_ != null) {
return genericMetadataBuilder_.getMessageOrBuilder();
} else {
return genericMetadata_ == null
? com.google.cloud.aiplatform.v1beta1.GenericOperationMetadata.getDefaultInstance()
: genericMetadata_;
}
}
/**
*
*
* <pre>
* Operation metadata for suggesting Trials.
* </pre>
*
* <code>.google.cloud.aiplatform.v1beta1.GenericOperationMetadata generic_metadata = 1;</code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.aiplatform.v1beta1.GenericOperationMetadata,
com.google.cloud.aiplatform.v1beta1.GenericOperationMetadata.Builder,
com.google.cloud.aiplatform.v1beta1.GenericOperationMetadataOrBuilder>
getGenericMetadataFieldBuilder() {
if (genericMetadataBuilder_ == null) {
genericMetadataBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.aiplatform.v1beta1.GenericOperationMetadata,
com.google.cloud.aiplatform.v1beta1.GenericOperationMetadata.Builder,
com.google.cloud.aiplatform.v1beta1.GenericOperationMetadataOrBuilder>(
getGenericMetadata(), getParentForChildren(), isClean());
genericMetadata_ = null;
}
return genericMetadataBuilder_;
}
private java.lang.Object study_ = "";
/**
*
*
* <pre>
* The name of the Study that the Trial belongs to.
* </pre>
*
* <code>string study = 2;</code>
*
* @return The study.
*/
public java.lang.String getStudy() {
java.lang.Object ref = study_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
study_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* The name of the Study that the Trial belongs to.
* </pre>
*
* <code>string study = 2;</code>
*
* @return The bytes for study.
*/
public com.google.protobuf.ByteString getStudyBytes() {
java.lang.Object ref = study_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
study_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* The name of the Study that the Trial belongs to.
* </pre>
*
* <code>string study = 2;</code>
*
* @param value The study to set.
* @return This builder for chaining.
*/
public Builder setStudy(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
study_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* The name of the Study that the Trial belongs to.
* </pre>
*
* <code>string study = 2;</code>
*
* @return This builder for chaining.
*/
public Builder clearStudy() {
study_ = getDefaultInstance().getStudy();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
return this;
}
/**
*
*
* <pre>
* The name of the Study that the Trial belongs to.
* </pre>
*
* <code>string study = 2;</code>
*
* @param value The bytes for study to set.
* @return This builder for chaining.
*/
public Builder setStudyBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
study_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
private java.lang.Object trial_ = "";
/**
*
*
* <pre>
* The Trial name.
* </pre>
*
* <code>string trial = 3;</code>
*
* @return The trial.
*/
public java.lang.String getTrial() {
java.lang.Object ref = trial_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
trial_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* The Trial name.
* </pre>
*
* <code>string trial = 3;</code>
*
* @return The bytes for trial.
*/
public com.google.protobuf.ByteString getTrialBytes() {
java.lang.Object ref = trial_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
trial_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* The Trial name.
* </pre>
*
* <code>string trial = 3;</code>
*
* @param value The trial to set.
* @return This builder for chaining.
*/
public Builder setTrial(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
trial_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
*
*
* <pre>
* The Trial name.
* </pre>
*
* <code>string trial = 3;</code>
*
* @return This builder for chaining.
*/
public Builder clearTrial() {
trial_ = getDefaultInstance().getTrial();
bitField0_ = (bitField0_ & ~0x00000004);
onChanged();
return this;
}
/**
*
*
* <pre>
* The Trial name.
* </pre>
*
* <code>string trial = 3;</code>
*
* @param value The bytes for trial to set.
* @return This builder for chaining.
*/
public Builder setTrialBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
trial_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.aiplatform.v1beta1.CheckTrialEarlyStoppingStateMetatdata)
}
// @@protoc_insertion_point(class_scope:google.cloud.aiplatform.v1beta1.CheckTrialEarlyStoppingStateMetatdata)
private static final com.google.cloud.aiplatform.v1beta1.CheckTrialEarlyStoppingStateMetatdata
DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE =
new com.google.cloud.aiplatform.v1beta1.CheckTrialEarlyStoppingStateMetatdata();
}
public static com.google.cloud.aiplatform.v1beta1.CheckTrialEarlyStoppingStateMetatdata
getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<CheckTrialEarlyStoppingStateMetatdata> PARSER =
new com.google.protobuf.AbstractParser<CheckTrialEarlyStoppingStateMetatdata>() {
@java.lang.Override
public CheckTrialEarlyStoppingStateMetatdata parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<CheckTrialEarlyStoppingStateMetatdata> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<CheckTrialEarlyStoppingStateMetatdata> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.aiplatform.v1beta1.CheckTrialEarlyStoppingStateMetatdata
getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleapis/google-cloud-java | 36,217 | java-privilegedaccessmanager/proto-google-cloud-privilegedaccessmanager-v1/src/main/java/com/google/cloud/privilegedaccessmanager/v1/SearchGrantsResponse.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/privilegedaccessmanager/v1/privilegedaccessmanager.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.privilegedaccessmanager.v1;
/**
*
*
* <pre>
* Response message for `SearchGrants` method.
* </pre>
*
* Protobuf type {@code google.cloud.privilegedaccessmanager.v1.SearchGrantsResponse}
*/
public final class SearchGrantsResponse extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.privilegedaccessmanager.v1.SearchGrantsResponse)
SearchGrantsResponseOrBuilder {
private static final long serialVersionUID = 0L;
// Use SearchGrantsResponse.newBuilder() to construct.
private SearchGrantsResponse(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private SearchGrantsResponse() {
grants_ = java.util.Collections.emptyList();
nextPageToken_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new SearchGrantsResponse();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.privilegedaccessmanager.v1.PrivilegedAccessManagerProto
.internal_static_google_cloud_privilegedaccessmanager_v1_SearchGrantsResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.privilegedaccessmanager.v1.PrivilegedAccessManagerProto
.internal_static_google_cloud_privilegedaccessmanager_v1_SearchGrantsResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.privilegedaccessmanager.v1.SearchGrantsResponse.class,
com.google.cloud.privilegedaccessmanager.v1.SearchGrantsResponse.Builder.class);
}
public static final int GRANTS_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private java.util.List<com.google.cloud.privilegedaccessmanager.v1.Grant> grants_;
/**
*
*
* <pre>
* The list of grants.
* </pre>
*
* <code>repeated .google.cloud.privilegedaccessmanager.v1.Grant grants = 1;</code>
*/
@java.lang.Override
public java.util.List<com.google.cloud.privilegedaccessmanager.v1.Grant> getGrantsList() {
return grants_;
}
/**
*
*
* <pre>
* The list of grants.
* </pre>
*
* <code>repeated .google.cloud.privilegedaccessmanager.v1.Grant grants = 1;</code>
*/
@java.lang.Override
public java.util.List<? extends com.google.cloud.privilegedaccessmanager.v1.GrantOrBuilder>
getGrantsOrBuilderList() {
return grants_;
}
/**
*
*
* <pre>
* The list of grants.
* </pre>
*
* <code>repeated .google.cloud.privilegedaccessmanager.v1.Grant grants = 1;</code>
*/
@java.lang.Override
public int getGrantsCount() {
return grants_.size();
}
/**
*
*
* <pre>
* The list of grants.
* </pre>
*
* <code>repeated .google.cloud.privilegedaccessmanager.v1.Grant grants = 1;</code>
*/
@java.lang.Override
public com.google.cloud.privilegedaccessmanager.v1.Grant getGrants(int index) {
return grants_.get(index);
}
/**
*
*
* <pre>
* The list of grants.
* </pre>
*
* <code>repeated .google.cloud.privilegedaccessmanager.v1.Grant grants = 1;</code>
*/
@java.lang.Override
public com.google.cloud.privilegedaccessmanager.v1.GrantOrBuilder getGrantsOrBuilder(int index) {
return grants_.get(index);
}
public static final int NEXT_PAGE_TOKEN_FIELD_NUMBER = 2;
@SuppressWarnings("serial")
private volatile java.lang.Object nextPageToken_ = "";
/**
*
*
* <pre>
* A token identifying a page of results the server should return.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The nextPageToken.
*/
@java.lang.Override
public java.lang.String getNextPageToken() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
nextPageToken_ = s;
return s;
}
}
/**
*
*
* <pre>
* A token identifying a page of results the server should return.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The bytes for nextPageToken.
*/
@java.lang.Override
public com.google.protobuf.ByteString getNextPageTokenBytes() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
nextPageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
for (int i = 0; i < grants_.size(); i++) {
output.writeMessage(1, grants_.get(i));
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 2, nextPageToken_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
for (int i = 0; i < grants_.size(); i++) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, grants_.get(i));
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, nextPageToken_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.privilegedaccessmanager.v1.SearchGrantsResponse)) {
return super.equals(obj);
}
com.google.cloud.privilegedaccessmanager.v1.SearchGrantsResponse other =
(com.google.cloud.privilegedaccessmanager.v1.SearchGrantsResponse) obj;
if (!getGrantsList().equals(other.getGrantsList())) return false;
if (!getNextPageToken().equals(other.getNextPageToken())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (getGrantsCount() > 0) {
hash = (37 * hash) + GRANTS_FIELD_NUMBER;
hash = (53 * hash) + getGrantsList().hashCode();
}
hash = (37 * hash) + NEXT_PAGE_TOKEN_FIELD_NUMBER;
hash = (53 * hash) + getNextPageToken().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.privilegedaccessmanager.v1.SearchGrantsResponse parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.privilegedaccessmanager.v1.SearchGrantsResponse parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.privilegedaccessmanager.v1.SearchGrantsResponse parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.privilegedaccessmanager.v1.SearchGrantsResponse parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.privilegedaccessmanager.v1.SearchGrantsResponse parseFrom(
byte[] data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.privilegedaccessmanager.v1.SearchGrantsResponse parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.privilegedaccessmanager.v1.SearchGrantsResponse parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.privilegedaccessmanager.v1.SearchGrantsResponse parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.privilegedaccessmanager.v1.SearchGrantsResponse parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.privilegedaccessmanager.v1.SearchGrantsResponse parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.privilegedaccessmanager.v1.SearchGrantsResponse parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.privilegedaccessmanager.v1.SearchGrantsResponse parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.privilegedaccessmanager.v1.SearchGrantsResponse prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Response message for `SearchGrants` method.
* </pre>
*
* Protobuf type {@code google.cloud.privilegedaccessmanager.v1.SearchGrantsResponse}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.privilegedaccessmanager.v1.SearchGrantsResponse)
com.google.cloud.privilegedaccessmanager.v1.SearchGrantsResponseOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.privilegedaccessmanager.v1.PrivilegedAccessManagerProto
.internal_static_google_cloud_privilegedaccessmanager_v1_SearchGrantsResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.privilegedaccessmanager.v1.PrivilegedAccessManagerProto
.internal_static_google_cloud_privilegedaccessmanager_v1_SearchGrantsResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.privilegedaccessmanager.v1.SearchGrantsResponse.class,
com.google.cloud.privilegedaccessmanager.v1.SearchGrantsResponse.Builder.class);
}
// Construct using com.google.cloud.privilegedaccessmanager.v1.SearchGrantsResponse.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
if (grantsBuilder_ == null) {
grants_ = java.util.Collections.emptyList();
} else {
grants_ = null;
grantsBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000001);
nextPageToken_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.privilegedaccessmanager.v1.PrivilegedAccessManagerProto
.internal_static_google_cloud_privilegedaccessmanager_v1_SearchGrantsResponse_descriptor;
}
@java.lang.Override
public com.google.cloud.privilegedaccessmanager.v1.SearchGrantsResponse
getDefaultInstanceForType() {
return com.google.cloud.privilegedaccessmanager.v1.SearchGrantsResponse.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.privilegedaccessmanager.v1.SearchGrantsResponse build() {
com.google.cloud.privilegedaccessmanager.v1.SearchGrantsResponse result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.privilegedaccessmanager.v1.SearchGrantsResponse buildPartial() {
com.google.cloud.privilegedaccessmanager.v1.SearchGrantsResponse result =
new com.google.cloud.privilegedaccessmanager.v1.SearchGrantsResponse(this);
buildPartialRepeatedFields(result);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartialRepeatedFields(
com.google.cloud.privilegedaccessmanager.v1.SearchGrantsResponse result) {
if (grantsBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0)) {
grants_ = java.util.Collections.unmodifiableList(grants_);
bitField0_ = (bitField0_ & ~0x00000001);
}
result.grants_ = grants_;
} else {
result.grants_ = grantsBuilder_.build();
}
}
private void buildPartial0(
com.google.cloud.privilegedaccessmanager.v1.SearchGrantsResponse result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000002) != 0)) {
result.nextPageToken_ = nextPageToken_;
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.privilegedaccessmanager.v1.SearchGrantsResponse) {
return mergeFrom((com.google.cloud.privilegedaccessmanager.v1.SearchGrantsResponse) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(
com.google.cloud.privilegedaccessmanager.v1.SearchGrantsResponse other) {
if (other
== com.google.cloud.privilegedaccessmanager.v1.SearchGrantsResponse.getDefaultInstance())
return this;
if (grantsBuilder_ == null) {
if (!other.grants_.isEmpty()) {
if (grants_.isEmpty()) {
grants_ = other.grants_;
bitField0_ = (bitField0_ & ~0x00000001);
} else {
ensureGrantsIsMutable();
grants_.addAll(other.grants_);
}
onChanged();
}
} else {
if (!other.grants_.isEmpty()) {
if (grantsBuilder_.isEmpty()) {
grantsBuilder_.dispose();
grantsBuilder_ = null;
grants_ = other.grants_;
bitField0_ = (bitField0_ & ~0x00000001);
grantsBuilder_ =
com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders
? getGrantsFieldBuilder()
: null;
} else {
grantsBuilder_.addAllMessages(other.grants_);
}
}
}
if (!other.getNextPageToken().isEmpty()) {
nextPageToken_ = other.nextPageToken_;
bitField0_ |= 0x00000002;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
com.google.cloud.privilegedaccessmanager.v1.Grant m =
input.readMessage(
com.google.cloud.privilegedaccessmanager.v1.Grant.parser(),
extensionRegistry);
if (grantsBuilder_ == null) {
ensureGrantsIsMutable();
grants_.add(m);
} else {
grantsBuilder_.addMessage(m);
}
break;
} // case 10
case 18:
{
nextPageToken_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000002;
break;
} // case 18
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.util.List<com.google.cloud.privilegedaccessmanager.v1.Grant> grants_ =
java.util.Collections.emptyList();
private void ensureGrantsIsMutable() {
if (!((bitField0_ & 0x00000001) != 0)) {
grants_ =
new java.util.ArrayList<com.google.cloud.privilegedaccessmanager.v1.Grant>(grants_);
bitField0_ |= 0x00000001;
}
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.privilegedaccessmanager.v1.Grant,
com.google.cloud.privilegedaccessmanager.v1.Grant.Builder,
com.google.cloud.privilegedaccessmanager.v1.GrantOrBuilder>
grantsBuilder_;
/**
*
*
* <pre>
* The list of grants.
* </pre>
*
* <code>repeated .google.cloud.privilegedaccessmanager.v1.Grant grants = 1;</code>
*/
public java.util.List<com.google.cloud.privilegedaccessmanager.v1.Grant> getGrantsList() {
if (grantsBuilder_ == null) {
return java.util.Collections.unmodifiableList(grants_);
} else {
return grantsBuilder_.getMessageList();
}
}
/**
*
*
* <pre>
* The list of grants.
* </pre>
*
* <code>repeated .google.cloud.privilegedaccessmanager.v1.Grant grants = 1;</code>
*/
public int getGrantsCount() {
if (grantsBuilder_ == null) {
return grants_.size();
} else {
return grantsBuilder_.getCount();
}
}
/**
*
*
* <pre>
* The list of grants.
* </pre>
*
* <code>repeated .google.cloud.privilegedaccessmanager.v1.Grant grants = 1;</code>
*/
public com.google.cloud.privilegedaccessmanager.v1.Grant getGrants(int index) {
if (grantsBuilder_ == null) {
return grants_.get(index);
} else {
return grantsBuilder_.getMessage(index);
}
}
/**
*
*
* <pre>
* The list of grants.
* </pre>
*
* <code>repeated .google.cloud.privilegedaccessmanager.v1.Grant grants = 1;</code>
*/
public Builder setGrants(int index, com.google.cloud.privilegedaccessmanager.v1.Grant value) {
if (grantsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureGrantsIsMutable();
grants_.set(index, value);
onChanged();
} else {
grantsBuilder_.setMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* The list of grants.
* </pre>
*
* <code>repeated .google.cloud.privilegedaccessmanager.v1.Grant grants = 1;</code>
*/
public Builder setGrants(
int index, com.google.cloud.privilegedaccessmanager.v1.Grant.Builder builderForValue) {
if (grantsBuilder_ == null) {
ensureGrantsIsMutable();
grants_.set(index, builderForValue.build());
onChanged();
} else {
grantsBuilder_.setMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* The list of grants.
* </pre>
*
* <code>repeated .google.cloud.privilegedaccessmanager.v1.Grant grants = 1;</code>
*/
public Builder addGrants(com.google.cloud.privilegedaccessmanager.v1.Grant value) {
if (grantsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureGrantsIsMutable();
grants_.add(value);
onChanged();
} else {
grantsBuilder_.addMessage(value);
}
return this;
}
/**
*
*
* <pre>
* The list of grants.
* </pre>
*
* <code>repeated .google.cloud.privilegedaccessmanager.v1.Grant grants = 1;</code>
*/
public Builder addGrants(int index, com.google.cloud.privilegedaccessmanager.v1.Grant value) {
if (grantsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureGrantsIsMutable();
grants_.add(index, value);
onChanged();
} else {
grantsBuilder_.addMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* The list of grants.
* </pre>
*
* <code>repeated .google.cloud.privilegedaccessmanager.v1.Grant grants = 1;</code>
*/
public Builder addGrants(
com.google.cloud.privilegedaccessmanager.v1.Grant.Builder builderForValue) {
if (grantsBuilder_ == null) {
ensureGrantsIsMutable();
grants_.add(builderForValue.build());
onChanged();
} else {
grantsBuilder_.addMessage(builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* The list of grants.
* </pre>
*
* <code>repeated .google.cloud.privilegedaccessmanager.v1.Grant grants = 1;</code>
*/
public Builder addGrants(
int index, com.google.cloud.privilegedaccessmanager.v1.Grant.Builder builderForValue) {
if (grantsBuilder_ == null) {
ensureGrantsIsMutable();
grants_.add(index, builderForValue.build());
onChanged();
} else {
grantsBuilder_.addMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* The list of grants.
* </pre>
*
* <code>repeated .google.cloud.privilegedaccessmanager.v1.Grant grants = 1;</code>
*/
public Builder addAllGrants(
java.lang.Iterable<? extends com.google.cloud.privilegedaccessmanager.v1.Grant> values) {
if (grantsBuilder_ == null) {
ensureGrantsIsMutable();
com.google.protobuf.AbstractMessageLite.Builder.addAll(values, grants_);
onChanged();
} else {
grantsBuilder_.addAllMessages(values);
}
return this;
}
/**
*
*
* <pre>
* The list of grants.
* </pre>
*
* <code>repeated .google.cloud.privilegedaccessmanager.v1.Grant grants = 1;</code>
*/
public Builder clearGrants() {
if (grantsBuilder_ == null) {
grants_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
} else {
grantsBuilder_.clear();
}
return this;
}
/**
*
*
* <pre>
* The list of grants.
* </pre>
*
* <code>repeated .google.cloud.privilegedaccessmanager.v1.Grant grants = 1;</code>
*/
public Builder removeGrants(int index) {
if (grantsBuilder_ == null) {
ensureGrantsIsMutable();
grants_.remove(index);
onChanged();
} else {
grantsBuilder_.remove(index);
}
return this;
}
/**
*
*
* <pre>
* The list of grants.
* </pre>
*
* <code>repeated .google.cloud.privilegedaccessmanager.v1.Grant grants = 1;</code>
*/
public com.google.cloud.privilegedaccessmanager.v1.Grant.Builder getGrantsBuilder(int index) {
return getGrantsFieldBuilder().getBuilder(index);
}
/**
*
*
* <pre>
* The list of grants.
* </pre>
*
* <code>repeated .google.cloud.privilegedaccessmanager.v1.Grant grants = 1;</code>
*/
public com.google.cloud.privilegedaccessmanager.v1.GrantOrBuilder getGrantsOrBuilder(
int index) {
if (grantsBuilder_ == null) {
return grants_.get(index);
} else {
return grantsBuilder_.getMessageOrBuilder(index);
}
}
/**
*
*
* <pre>
* The list of grants.
* </pre>
*
* <code>repeated .google.cloud.privilegedaccessmanager.v1.Grant grants = 1;</code>
*/
public java.util.List<? extends com.google.cloud.privilegedaccessmanager.v1.GrantOrBuilder>
getGrantsOrBuilderList() {
if (grantsBuilder_ != null) {
return grantsBuilder_.getMessageOrBuilderList();
} else {
return java.util.Collections.unmodifiableList(grants_);
}
}
/**
*
*
* <pre>
* The list of grants.
* </pre>
*
* <code>repeated .google.cloud.privilegedaccessmanager.v1.Grant grants = 1;</code>
*/
public com.google.cloud.privilegedaccessmanager.v1.Grant.Builder addGrantsBuilder() {
return getGrantsFieldBuilder()
.addBuilder(com.google.cloud.privilegedaccessmanager.v1.Grant.getDefaultInstance());
}
/**
*
*
* <pre>
* The list of grants.
* </pre>
*
* <code>repeated .google.cloud.privilegedaccessmanager.v1.Grant grants = 1;</code>
*/
public com.google.cloud.privilegedaccessmanager.v1.Grant.Builder addGrantsBuilder(int index) {
return getGrantsFieldBuilder()
.addBuilder(
index, com.google.cloud.privilegedaccessmanager.v1.Grant.getDefaultInstance());
}
/**
*
*
* <pre>
* The list of grants.
* </pre>
*
* <code>repeated .google.cloud.privilegedaccessmanager.v1.Grant grants = 1;</code>
*/
public java.util.List<com.google.cloud.privilegedaccessmanager.v1.Grant.Builder>
getGrantsBuilderList() {
return getGrantsFieldBuilder().getBuilderList();
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.privilegedaccessmanager.v1.Grant,
com.google.cloud.privilegedaccessmanager.v1.Grant.Builder,
com.google.cloud.privilegedaccessmanager.v1.GrantOrBuilder>
getGrantsFieldBuilder() {
if (grantsBuilder_ == null) {
grantsBuilder_ =
new com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.privilegedaccessmanager.v1.Grant,
com.google.cloud.privilegedaccessmanager.v1.Grant.Builder,
com.google.cloud.privilegedaccessmanager.v1.GrantOrBuilder>(
grants_, ((bitField0_ & 0x00000001) != 0), getParentForChildren(), isClean());
grants_ = null;
}
return grantsBuilder_;
}
private java.lang.Object nextPageToken_ = "";
/**
*
*
* <pre>
* A token identifying a page of results the server should return.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The nextPageToken.
*/
public java.lang.String getNextPageToken() {
java.lang.Object ref = nextPageToken_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
nextPageToken_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* A token identifying a page of results the server should return.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The bytes for nextPageToken.
*/
public com.google.protobuf.ByteString getNextPageTokenBytes() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
nextPageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* A token identifying a page of results the server should return.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @param value The nextPageToken to set.
* @return This builder for chaining.
*/
public Builder setNextPageToken(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
nextPageToken_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* A token identifying a page of results the server should return.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return This builder for chaining.
*/
public Builder clearNextPageToken() {
nextPageToken_ = getDefaultInstance().getNextPageToken();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
return this;
}
/**
*
*
* <pre>
* A token identifying a page of results the server should return.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @param value The bytes for nextPageToken to set.
* @return This builder for chaining.
*/
public Builder setNextPageTokenBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
nextPageToken_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.privilegedaccessmanager.v1.SearchGrantsResponse)
}
// @@protoc_insertion_point(class_scope:google.cloud.privilegedaccessmanager.v1.SearchGrantsResponse)
private static final com.google.cloud.privilegedaccessmanager.v1.SearchGrantsResponse
DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.privilegedaccessmanager.v1.SearchGrantsResponse();
}
public static com.google.cloud.privilegedaccessmanager.v1.SearchGrantsResponse
getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<SearchGrantsResponse> PARSER =
new com.google.protobuf.AbstractParser<SearchGrantsResponse>() {
@java.lang.Override
public SearchGrantsResponse parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<SearchGrantsResponse> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<SearchGrantsResponse> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.privilegedaccessmanager.v1.SearchGrantsResponse
getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleapis/google-cloud-java | 36,247 | java-analytics-admin/proto-google-analytics-admin-v1beta/src/main/java/com/google/analytics/admin/v1beta/ListKeyEventsResponse.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/analytics/admin/v1beta/analytics_admin.proto
// Protobuf Java Version: 3.25.8
package com.google.analytics.admin.v1beta;
/**
*
*
* <pre>
* Response message for ListKeyEvents RPC.
* </pre>
*
* Protobuf type {@code google.analytics.admin.v1beta.ListKeyEventsResponse}
*/
public final class ListKeyEventsResponse extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.analytics.admin.v1beta.ListKeyEventsResponse)
ListKeyEventsResponseOrBuilder {
private static final long serialVersionUID = 0L;
// Use ListKeyEventsResponse.newBuilder() to construct.
private ListKeyEventsResponse(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private ListKeyEventsResponse() {
keyEvents_ = java.util.Collections.emptyList();
nextPageToken_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new ListKeyEventsResponse();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.analytics.admin.v1beta.AnalyticsAdminProto
.internal_static_google_analytics_admin_v1beta_ListKeyEventsResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.analytics.admin.v1beta.AnalyticsAdminProto
.internal_static_google_analytics_admin_v1beta_ListKeyEventsResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.analytics.admin.v1beta.ListKeyEventsResponse.class,
com.google.analytics.admin.v1beta.ListKeyEventsResponse.Builder.class);
}
public static final int KEY_EVENTS_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private java.util.List<com.google.analytics.admin.v1beta.KeyEvent> keyEvents_;
/**
*
*
* <pre>
* The requested Key Events
* </pre>
*
* <code>repeated .google.analytics.admin.v1beta.KeyEvent key_events = 1;</code>
*/
@java.lang.Override
public java.util.List<com.google.analytics.admin.v1beta.KeyEvent> getKeyEventsList() {
return keyEvents_;
}
/**
*
*
* <pre>
* The requested Key Events
* </pre>
*
* <code>repeated .google.analytics.admin.v1beta.KeyEvent key_events = 1;</code>
*/
@java.lang.Override
public java.util.List<? extends com.google.analytics.admin.v1beta.KeyEventOrBuilder>
getKeyEventsOrBuilderList() {
return keyEvents_;
}
/**
*
*
* <pre>
* The requested Key Events
* </pre>
*
* <code>repeated .google.analytics.admin.v1beta.KeyEvent key_events = 1;</code>
*/
@java.lang.Override
public int getKeyEventsCount() {
return keyEvents_.size();
}
/**
*
*
* <pre>
* The requested Key Events
* </pre>
*
* <code>repeated .google.analytics.admin.v1beta.KeyEvent key_events = 1;</code>
*/
@java.lang.Override
public com.google.analytics.admin.v1beta.KeyEvent getKeyEvents(int index) {
return keyEvents_.get(index);
}
/**
*
*
* <pre>
* The requested Key Events
* </pre>
*
* <code>repeated .google.analytics.admin.v1beta.KeyEvent key_events = 1;</code>
*/
@java.lang.Override
public com.google.analytics.admin.v1beta.KeyEventOrBuilder getKeyEventsOrBuilder(int index) {
return keyEvents_.get(index);
}
public static final int NEXT_PAGE_TOKEN_FIELD_NUMBER = 2;
@SuppressWarnings("serial")
private volatile java.lang.Object nextPageToken_ = "";
/**
*
*
* <pre>
* A token, which can be sent as `page_token` to retrieve the next page.
* If this field is omitted, there are no subsequent pages.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The nextPageToken.
*/
@java.lang.Override
public java.lang.String getNextPageToken() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
nextPageToken_ = s;
return s;
}
}
/**
*
*
* <pre>
* A token, which can be sent as `page_token` to retrieve the next page.
* If this field is omitted, there are no subsequent pages.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The bytes for nextPageToken.
*/
@java.lang.Override
public com.google.protobuf.ByteString getNextPageTokenBytes() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
nextPageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
for (int i = 0; i < keyEvents_.size(); i++) {
output.writeMessage(1, keyEvents_.get(i));
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 2, nextPageToken_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
for (int i = 0; i < keyEvents_.size(); i++) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, keyEvents_.get(i));
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, nextPageToken_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.analytics.admin.v1beta.ListKeyEventsResponse)) {
return super.equals(obj);
}
com.google.analytics.admin.v1beta.ListKeyEventsResponse other =
(com.google.analytics.admin.v1beta.ListKeyEventsResponse) obj;
if (!getKeyEventsList().equals(other.getKeyEventsList())) return false;
if (!getNextPageToken().equals(other.getNextPageToken())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (getKeyEventsCount() > 0) {
hash = (37 * hash) + KEY_EVENTS_FIELD_NUMBER;
hash = (53 * hash) + getKeyEventsList().hashCode();
}
hash = (37 * hash) + NEXT_PAGE_TOKEN_FIELD_NUMBER;
hash = (53 * hash) + getNextPageToken().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.analytics.admin.v1beta.ListKeyEventsResponse parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.analytics.admin.v1beta.ListKeyEventsResponse parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.analytics.admin.v1beta.ListKeyEventsResponse parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.analytics.admin.v1beta.ListKeyEventsResponse parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.analytics.admin.v1beta.ListKeyEventsResponse parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.analytics.admin.v1beta.ListKeyEventsResponse parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.analytics.admin.v1beta.ListKeyEventsResponse parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.analytics.admin.v1beta.ListKeyEventsResponse parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.analytics.admin.v1beta.ListKeyEventsResponse parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.analytics.admin.v1beta.ListKeyEventsResponse parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.analytics.admin.v1beta.ListKeyEventsResponse parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.analytics.admin.v1beta.ListKeyEventsResponse parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.analytics.admin.v1beta.ListKeyEventsResponse prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Response message for ListKeyEvents RPC.
* </pre>
*
* Protobuf type {@code google.analytics.admin.v1beta.ListKeyEventsResponse}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.analytics.admin.v1beta.ListKeyEventsResponse)
com.google.analytics.admin.v1beta.ListKeyEventsResponseOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.analytics.admin.v1beta.AnalyticsAdminProto
.internal_static_google_analytics_admin_v1beta_ListKeyEventsResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.analytics.admin.v1beta.AnalyticsAdminProto
.internal_static_google_analytics_admin_v1beta_ListKeyEventsResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.analytics.admin.v1beta.ListKeyEventsResponse.class,
com.google.analytics.admin.v1beta.ListKeyEventsResponse.Builder.class);
}
// Construct using com.google.analytics.admin.v1beta.ListKeyEventsResponse.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
if (keyEventsBuilder_ == null) {
keyEvents_ = java.util.Collections.emptyList();
} else {
keyEvents_ = null;
keyEventsBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000001);
nextPageToken_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.analytics.admin.v1beta.AnalyticsAdminProto
.internal_static_google_analytics_admin_v1beta_ListKeyEventsResponse_descriptor;
}
@java.lang.Override
public com.google.analytics.admin.v1beta.ListKeyEventsResponse getDefaultInstanceForType() {
return com.google.analytics.admin.v1beta.ListKeyEventsResponse.getDefaultInstance();
}
@java.lang.Override
public com.google.analytics.admin.v1beta.ListKeyEventsResponse build() {
com.google.analytics.admin.v1beta.ListKeyEventsResponse result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.analytics.admin.v1beta.ListKeyEventsResponse buildPartial() {
com.google.analytics.admin.v1beta.ListKeyEventsResponse result =
new com.google.analytics.admin.v1beta.ListKeyEventsResponse(this);
buildPartialRepeatedFields(result);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartialRepeatedFields(
com.google.analytics.admin.v1beta.ListKeyEventsResponse result) {
if (keyEventsBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0)) {
keyEvents_ = java.util.Collections.unmodifiableList(keyEvents_);
bitField0_ = (bitField0_ & ~0x00000001);
}
result.keyEvents_ = keyEvents_;
} else {
result.keyEvents_ = keyEventsBuilder_.build();
}
}
private void buildPartial0(com.google.analytics.admin.v1beta.ListKeyEventsResponse result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000002) != 0)) {
result.nextPageToken_ = nextPageToken_;
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.analytics.admin.v1beta.ListKeyEventsResponse) {
return mergeFrom((com.google.analytics.admin.v1beta.ListKeyEventsResponse) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.analytics.admin.v1beta.ListKeyEventsResponse other) {
if (other == com.google.analytics.admin.v1beta.ListKeyEventsResponse.getDefaultInstance())
return this;
if (keyEventsBuilder_ == null) {
if (!other.keyEvents_.isEmpty()) {
if (keyEvents_.isEmpty()) {
keyEvents_ = other.keyEvents_;
bitField0_ = (bitField0_ & ~0x00000001);
} else {
ensureKeyEventsIsMutable();
keyEvents_.addAll(other.keyEvents_);
}
onChanged();
}
} else {
if (!other.keyEvents_.isEmpty()) {
if (keyEventsBuilder_.isEmpty()) {
keyEventsBuilder_.dispose();
keyEventsBuilder_ = null;
keyEvents_ = other.keyEvents_;
bitField0_ = (bitField0_ & ~0x00000001);
keyEventsBuilder_ =
com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders
? getKeyEventsFieldBuilder()
: null;
} else {
keyEventsBuilder_.addAllMessages(other.keyEvents_);
}
}
}
if (!other.getNextPageToken().isEmpty()) {
nextPageToken_ = other.nextPageToken_;
bitField0_ |= 0x00000002;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
com.google.analytics.admin.v1beta.KeyEvent m =
input.readMessage(
com.google.analytics.admin.v1beta.KeyEvent.parser(), extensionRegistry);
if (keyEventsBuilder_ == null) {
ensureKeyEventsIsMutable();
keyEvents_.add(m);
} else {
keyEventsBuilder_.addMessage(m);
}
break;
} // case 10
case 18:
{
nextPageToken_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000002;
break;
} // case 18
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.util.List<com.google.analytics.admin.v1beta.KeyEvent> keyEvents_ =
java.util.Collections.emptyList();
private void ensureKeyEventsIsMutable() {
if (!((bitField0_ & 0x00000001) != 0)) {
keyEvents_ =
new java.util.ArrayList<com.google.analytics.admin.v1beta.KeyEvent>(keyEvents_);
bitField0_ |= 0x00000001;
}
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.analytics.admin.v1beta.KeyEvent,
com.google.analytics.admin.v1beta.KeyEvent.Builder,
com.google.analytics.admin.v1beta.KeyEventOrBuilder>
keyEventsBuilder_;
/**
*
*
* <pre>
* The requested Key Events
* </pre>
*
* <code>repeated .google.analytics.admin.v1beta.KeyEvent key_events = 1;</code>
*/
public java.util.List<com.google.analytics.admin.v1beta.KeyEvent> getKeyEventsList() {
if (keyEventsBuilder_ == null) {
return java.util.Collections.unmodifiableList(keyEvents_);
} else {
return keyEventsBuilder_.getMessageList();
}
}
/**
*
*
* <pre>
* The requested Key Events
* </pre>
*
* <code>repeated .google.analytics.admin.v1beta.KeyEvent key_events = 1;</code>
*/
public int getKeyEventsCount() {
if (keyEventsBuilder_ == null) {
return keyEvents_.size();
} else {
return keyEventsBuilder_.getCount();
}
}
/**
*
*
* <pre>
* The requested Key Events
* </pre>
*
* <code>repeated .google.analytics.admin.v1beta.KeyEvent key_events = 1;</code>
*/
public com.google.analytics.admin.v1beta.KeyEvent getKeyEvents(int index) {
if (keyEventsBuilder_ == null) {
return keyEvents_.get(index);
} else {
return keyEventsBuilder_.getMessage(index);
}
}
/**
*
*
* <pre>
* The requested Key Events
* </pre>
*
* <code>repeated .google.analytics.admin.v1beta.KeyEvent key_events = 1;</code>
*/
public Builder setKeyEvents(int index, com.google.analytics.admin.v1beta.KeyEvent value) {
if (keyEventsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureKeyEventsIsMutable();
keyEvents_.set(index, value);
onChanged();
} else {
keyEventsBuilder_.setMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* The requested Key Events
* </pre>
*
* <code>repeated .google.analytics.admin.v1beta.KeyEvent key_events = 1;</code>
*/
public Builder setKeyEvents(
int index, com.google.analytics.admin.v1beta.KeyEvent.Builder builderForValue) {
if (keyEventsBuilder_ == null) {
ensureKeyEventsIsMutable();
keyEvents_.set(index, builderForValue.build());
onChanged();
} else {
keyEventsBuilder_.setMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* The requested Key Events
* </pre>
*
* <code>repeated .google.analytics.admin.v1beta.KeyEvent key_events = 1;</code>
*/
public Builder addKeyEvents(com.google.analytics.admin.v1beta.KeyEvent value) {
if (keyEventsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureKeyEventsIsMutable();
keyEvents_.add(value);
onChanged();
} else {
keyEventsBuilder_.addMessage(value);
}
return this;
}
/**
*
*
* <pre>
* The requested Key Events
* </pre>
*
* <code>repeated .google.analytics.admin.v1beta.KeyEvent key_events = 1;</code>
*/
public Builder addKeyEvents(int index, com.google.analytics.admin.v1beta.KeyEvent value) {
if (keyEventsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureKeyEventsIsMutable();
keyEvents_.add(index, value);
onChanged();
} else {
keyEventsBuilder_.addMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* The requested Key Events
* </pre>
*
* <code>repeated .google.analytics.admin.v1beta.KeyEvent key_events = 1;</code>
*/
public Builder addKeyEvents(
com.google.analytics.admin.v1beta.KeyEvent.Builder builderForValue) {
if (keyEventsBuilder_ == null) {
ensureKeyEventsIsMutable();
keyEvents_.add(builderForValue.build());
onChanged();
} else {
keyEventsBuilder_.addMessage(builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* The requested Key Events
* </pre>
*
* <code>repeated .google.analytics.admin.v1beta.KeyEvent key_events = 1;</code>
*/
public Builder addKeyEvents(
int index, com.google.analytics.admin.v1beta.KeyEvent.Builder builderForValue) {
if (keyEventsBuilder_ == null) {
ensureKeyEventsIsMutable();
keyEvents_.add(index, builderForValue.build());
onChanged();
} else {
keyEventsBuilder_.addMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* The requested Key Events
* </pre>
*
* <code>repeated .google.analytics.admin.v1beta.KeyEvent key_events = 1;</code>
*/
public Builder addAllKeyEvents(
java.lang.Iterable<? extends com.google.analytics.admin.v1beta.KeyEvent> values) {
if (keyEventsBuilder_ == null) {
ensureKeyEventsIsMutable();
com.google.protobuf.AbstractMessageLite.Builder.addAll(values, keyEvents_);
onChanged();
} else {
keyEventsBuilder_.addAllMessages(values);
}
return this;
}
/**
*
*
* <pre>
* The requested Key Events
* </pre>
*
* <code>repeated .google.analytics.admin.v1beta.KeyEvent key_events = 1;</code>
*/
public Builder clearKeyEvents() {
if (keyEventsBuilder_ == null) {
keyEvents_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
} else {
keyEventsBuilder_.clear();
}
return this;
}
/**
*
*
* <pre>
* The requested Key Events
* </pre>
*
* <code>repeated .google.analytics.admin.v1beta.KeyEvent key_events = 1;</code>
*/
public Builder removeKeyEvents(int index) {
if (keyEventsBuilder_ == null) {
ensureKeyEventsIsMutable();
keyEvents_.remove(index);
onChanged();
} else {
keyEventsBuilder_.remove(index);
}
return this;
}
/**
*
*
* <pre>
* The requested Key Events
* </pre>
*
* <code>repeated .google.analytics.admin.v1beta.KeyEvent key_events = 1;</code>
*/
public com.google.analytics.admin.v1beta.KeyEvent.Builder getKeyEventsBuilder(int index) {
return getKeyEventsFieldBuilder().getBuilder(index);
}
/**
*
*
* <pre>
* The requested Key Events
* </pre>
*
* <code>repeated .google.analytics.admin.v1beta.KeyEvent key_events = 1;</code>
*/
public com.google.analytics.admin.v1beta.KeyEventOrBuilder getKeyEventsOrBuilder(int index) {
if (keyEventsBuilder_ == null) {
return keyEvents_.get(index);
} else {
return keyEventsBuilder_.getMessageOrBuilder(index);
}
}
/**
*
*
* <pre>
* The requested Key Events
* </pre>
*
* <code>repeated .google.analytics.admin.v1beta.KeyEvent key_events = 1;</code>
*/
public java.util.List<? extends com.google.analytics.admin.v1beta.KeyEventOrBuilder>
getKeyEventsOrBuilderList() {
if (keyEventsBuilder_ != null) {
return keyEventsBuilder_.getMessageOrBuilderList();
} else {
return java.util.Collections.unmodifiableList(keyEvents_);
}
}
/**
*
*
* <pre>
* The requested Key Events
* </pre>
*
* <code>repeated .google.analytics.admin.v1beta.KeyEvent key_events = 1;</code>
*/
public com.google.analytics.admin.v1beta.KeyEvent.Builder addKeyEventsBuilder() {
return getKeyEventsFieldBuilder()
.addBuilder(com.google.analytics.admin.v1beta.KeyEvent.getDefaultInstance());
}
/**
*
*
* <pre>
* The requested Key Events
* </pre>
*
* <code>repeated .google.analytics.admin.v1beta.KeyEvent key_events = 1;</code>
*/
public com.google.analytics.admin.v1beta.KeyEvent.Builder addKeyEventsBuilder(int index) {
return getKeyEventsFieldBuilder()
.addBuilder(index, com.google.analytics.admin.v1beta.KeyEvent.getDefaultInstance());
}
/**
*
*
* <pre>
* The requested Key Events
* </pre>
*
* <code>repeated .google.analytics.admin.v1beta.KeyEvent key_events = 1;</code>
*/
public java.util.List<com.google.analytics.admin.v1beta.KeyEvent.Builder>
getKeyEventsBuilderList() {
return getKeyEventsFieldBuilder().getBuilderList();
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.analytics.admin.v1beta.KeyEvent,
com.google.analytics.admin.v1beta.KeyEvent.Builder,
com.google.analytics.admin.v1beta.KeyEventOrBuilder>
getKeyEventsFieldBuilder() {
if (keyEventsBuilder_ == null) {
keyEventsBuilder_ =
new com.google.protobuf.RepeatedFieldBuilderV3<
com.google.analytics.admin.v1beta.KeyEvent,
com.google.analytics.admin.v1beta.KeyEvent.Builder,
com.google.analytics.admin.v1beta.KeyEventOrBuilder>(
keyEvents_, ((bitField0_ & 0x00000001) != 0), getParentForChildren(), isClean());
keyEvents_ = null;
}
return keyEventsBuilder_;
}
private java.lang.Object nextPageToken_ = "";
/**
*
*
* <pre>
* A token, which can be sent as `page_token` to retrieve the next page.
* If this field is omitted, there are no subsequent pages.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The nextPageToken.
*/
public java.lang.String getNextPageToken() {
java.lang.Object ref = nextPageToken_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
nextPageToken_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* A token, which can be sent as `page_token` to retrieve the next page.
* If this field is omitted, there are no subsequent pages.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The bytes for nextPageToken.
*/
public com.google.protobuf.ByteString getNextPageTokenBytes() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
nextPageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* A token, which can be sent as `page_token` to retrieve the next page.
* If this field is omitted, there are no subsequent pages.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @param value The nextPageToken to set.
* @return This builder for chaining.
*/
public Builder setNextPageToken(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
nextPageToken_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* A token, which can be sent as `page_token` to retrieve the next page.
* If this field is omitted, there are no subsequent pages.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return This builder for chaining.
*/
public Builder clearNextPageToken() {
nextPageToken_ = getDefaultInstance().getNextPageToken();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
return this;
}
/**
*
*
* <pre>
* A token, which can be sent as `page_token` to retrieve the next page.
* If this field is omitted, there are no subsequent pages.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @param value The bytes for nextPageToken to set.
* @return This builder for chaining.
*/
public Builder setNextPageTokenBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
nextPageToken_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.analytics.admin.v1beta.ListKeyEventsResponse)
}
// @@protoc_insertion_point(class_scope:google.analytics.admin.v1beta.ListKeyEventsResponse)
private static final com.google.analytics.admin.v1beta.ListKeyEventsResponse DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.analytics.admin.v1beta.ListKeyEventsResponse();
}
public static com.google.analytics.admin.v1beta.ListKeyEventsResponse getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<ListKeyEventsResponse> PARSER =
new com.google.protobuf.AbstractParser<ListKeyEventsResponse>() {
@java.lang.Override
public ListKeyEventsResponse parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<ListKeyEventsResponse> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<ListKeyEventsResponse> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.analytics.admin.v1beta.ListKeyEventsResponse getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
apache/iceberg | 36,246 | core/src/test/java/org/apache/iceberg/jdbc/TestJdbcTableConcurrency.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.iceberg.jdbc;
import static org.apache.iceberg.TableProperties.COMMIT_MAX_RETRY_WAIT_MS;
import static org.apache.iceberg.TableProperties.COMMIT_MIN_RETRY_WAIT_MS;
import static org.apache.iceberg.TableProperties.COMMIT_NUM_RETRIES;
import static org.apache.iceberg.types.Types.NestedField.required;
import static org.assertj.core.api.Assertions.assertThat;
import java.io.File;
import java.io.IOException;
import java.io.InputStream;
import java.io.Reader;
import java.math.BigDecimal;
import java.net.URL;
import java.sql.Array;
import java.sql.Blob;
import java.sql.CallableStatement;
import java.sql.Clob;
import java.sql.Connection;
import java.sql.DatabaseMetaData;
import java.sql.Date;
import java.sql.Driver;
import java.sql.DriverManager;
import java.sql.DriverPropertyInfo;
import java.sql.NClob;
import java.sql.ParameterMetaData;
import java.sql.PreparedStatement;
import java.sql.Ref;
import java.sql.ResultSet;
import java.sql.ResultSetMetaData;
import java.sql.RowId;
import java.sql.SQLClientInfoException;
import java.sql.SQLException;
import java.sql.SQLFeatureNotSupportedException;
import java.sql.SQLType;
import java.sql.SQLWarning;
import java.sql.SQLXML;
import java.sql.Savepoint;
import java.sql.ShardingKey;
import java.sql.Statement;
import java.sql.Struct;
import java.sql.Time;
import java.sql.Timestamp;
import java.time.Duration;
import java.util.Calendar;
import java.util.List;
import java.util.Map;
import java.util.Properties;
import java.util.UUID;
import java.util.concurrent.Callable;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.Executor;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.Future;
import java.util.concurrent.ThreadPoolExecutor;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicInteger;
import org.apache.hadoop.conf.Configuration;
import org.apache.iceberg.CatalogProperties;
import org.apache.iceberg.DataFile;
import org.apache.iceberg.DataFiles;
import org.apache.iceberg.FileFormat;
import org.apache.iceberg.Schema;
import org.apache.iceberg.Table;
import org.apache.iceberg.catalog.TableIdentifier;
import org.apache.iceberg.relocated.com.google.common.collect.Iterables;
import org.apache.iceberg.relocated.com.google.common.collect.Lists;
import org.apache.iceberg.relocated.com.google.common.collect.Maps;
import org.apache.iceberg.relocated.com.google.common.util.concurrent.MoreExecutors;
import org.apache.iceberg.types.Types;
import org.apache.iceberg.util.Tasks;
import org.awaitility.Awaitility;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.io.TempDir;
public class TestJdbcTableConcurrency {
static final TableIdentifier TABLE_IDENTIFIER = TableIdentifier.of("db", "test_table");
static final Schema SCHEMA =
new Schema(
required(1, "id", Types.IntegerType.get(), "unique ID"),
required(2, "data", Types.StringType.get()));
@TempDir private File tableDir;
@Test
public synchronized void testConcurrentFastAppends() throws IOException {
Map<String, String> properties = Maps.newHashMap();
properties.put(CatalogProperties.WAREHOUSE_LOCATION, tableDir.getAbsolutePath());
String sqliteDb = "jdbc:sqlite:" + tableDir.getAbsolutePath() + "concurentFastAppend.db";
properties.put(CatalogProperties.URI, sqliteDb);
JdbcCatalog catalog = new JdbcCatalog();
catalog.setConf(new Configuration());
catalog.initialize("jdbc", properties);
catalog.createTable(TABLE_IDENTIFIER, SCHEMA);
Table icebergTable = catalog.loadTable(TABLE_IDENTIFIER);
String fileName = UUID.randomUUID().toString();
DataFile file =
DataFiles.builder(icebergTable.spec())
.withPath(FileFormat.PARQUET.addExtension(fileName))
.withRecordCount(2)
.withFileSizeInBytes(0)
.build();
ExecutorService executorService =
MoreExecutors.getExitingExecutorService(
(ThreadPoolExecutor) Executors.newFixedThreadPool(2));
AtomicInteger barrier = new AtomicInteger(0);
int threadsCount = 2;
Tasks.range(threadsCount)
.stopOnFailure()
.throwFailureWhenFinished()
.executeWith(executorService)
.run(
index -> {
for (int numCommittedFiles = 0; numCommittedFiles < 10; numCommittedFiles++) {
final int currentFilesCount = numCommittedFiles;
Awaitility.await()
.pollInterval(Duration.ofMillis(10))
.atMost(Duration.ofSeconds(10))
.until(() -> barrier.get() >= currentFilesCount * threadsCount);
icebergTable.newFastAppend().appendFile(file).commit();
barrier.incrementAndGet();
}
});
icebergTable.refresh();
assertThat(icebergTable.currentSnapshot().allManifests(icebergTable.io())).hasSize(20);
}
@Test
public synchronized void testConcurrentConnections() throws InterruptedException, IOException {
Map<String, String> properties = Maps.newHashMap();
properties.put(CatalogProperties.WAREHOUSE_LOCATION, tableDir.getAbsolutePath());
String sqliteDb = "jdbc:sqlite:" + tableDir.getAbsolutePath() + "concurentConnections.db";
properties.put(CatalogProperties.URI, sqliteDb);
JdbcCatalog catalog = new JdbcCatalog();
catalog.setConf(new Configuration());
catalog.initialize("jdbc", properties);
catalog.createTable(TABLE_IDENTIFIER, SCHEMA);
Table icebergTable = catalog.loadTable(TABLE_IDENTIFIER);
icebergTable
.updateProperties()
.set(COMMIT_NUM_RETRIES, "20")
.set(COMMIT_MIN_RETRY_WAIT_MS, "25")
.set(COMMIT_MAX_RETRY_WAIT_MS, "25")
.commit();
String fileName = UUID.randomUUID().toString();
DataFile file =
DataFiles.builder(icebergTable.spec())
.withPath(FileFormat.PARQUET.addExtension(fileName))
.withRecordCount(2)
.withFileSizeInBytes(0)
.build();
ExecutorService executorService =
MoreExecutors.getExitingExecutorService(
(ThreadPoolExecutor) Executors.newFixedThreadPool(7));
for (int i = 0; i < 7; i++) {
executorService.submit(() -> icebergTable.newAppend().appendFile(file).commit());
}
executorService.shutdown();
assertThat(executorService.awaitTermination(3, TimeUnit.MINUTES)).as("Timeout").isTrue();
assertThat(Iterables.size(icebergTable.snapshots())).isEqualTo(7);
}
@Test
public synchronized void testInitializeWithSlowConcurrentConnections()
throws InterruptedException, SQLException, ExecutionException, ClassNotFoundException {
// number of threads and requests to attempt.
int parallelism = 2;
// verifies that multiple calls to initialize with slow responses will not fail.
Map<String, String> properties = Maps.newHashMap();
properties.put(CatalogProperties.WAREHOUSE_LOCATION, tableDir.getAbsolutePath());
String testingDB = "jdbc:slow:derby:memory:testDb;create=true";
new org.apache.derby.jdbc.EmbeddedDriver();
properties.put(CatalogProperties.URI, testingDB);
SlowDriver slowDriver = new SlowDriver(testingDB);
Callable<JdbcCatalog> makeCatalog =
() -> {
JdbcCatalog catalog = new JdbcCatalog();
catalog.setConf(new Configuration());
catalog.initialize("jdbc", properties);
return catalog;
};
try {
DriverManager.registerDriver(slowDriver);
ExecutorService executorService =
MoreExecutors.getExitingExecutorService(
(ThreadPoolExecutor) Executors.newFixedThreadPool(parallelism));
List<Future<JdbcCatalog>> futures = Lists.newArrayList();
for (int i = 0; i < parallelism; i++) {
futures.add(executorService.submit(makeCatalog));
}
for (Future<JdbcCatalog> future : futures) {
future.get();
}
} finally {
DriverManager.deregisterDriver(slowDriver);
}
}
/** A Connection implementation that returns SlowPreparedStatements */
private static class SlowJDBCConnection implements Connection {
Connection delegate;
SlowJDBCConnection(Connection delegate) {
this.delegate = delegate;
}
@Override
public Statement createStatement() throws SQLException {
return delegate.createStatement();
}
@Override
public PreparedStatement prepareStatement(String sql) throws SQLException {
return new SlowPreparedStatement(delegate.prepareStatement(sql));
}
@Override
public CallableStatement prepareCall(String sql) throws SQLException {
return delegate.prepareCall(sql);
}
@Override
public String nativeSQL(String sql) throws SQLException {
return delegate.nativeSQL(sql);
}
@Override
public void setAutoCommit(boolean autoCommit) throws SQLException {
delegate.setAutoCommit(autoCommit);
}
@Override
public boolean getAutoCommit() throws SQLException {
return delegate.getAutoCommit();
}
@Override
public void commit() throws SQLException {
delegate.commit();
}
@Override
public void rollback() throws SQLException {
delegate.rollback();
}
@Override
public void close() throws SQLException {
delegate.close();
}
@Override
public boolean isClosed() throws SQLException {
return delegate.isClosed();
}
@Override
public DatabaseMetaData getMetaData() throws SQLException {
return delegate.getMetaData();
}
@Override
public void setReadOnly(boolean readOnly) throws SQLException {
delegate.setReadOnly(readOnly);
}
@Override
public boolean isReadOnly() throws SQLException {
return delegate.isReadOnly();
}
@Override
public void setCatalog(String catalog) throws SQLException {
delegate.setCatalog(catalog);
}
@Override
public String getCatalog() throws SQLException {
return delegate.getCatalog();
}
@Override
public void setTransactionIsolation(int level) throws SQLException {
delegate.setTransactionIsolation(level);
}
@Override
public int getTransactionIsolation() throws SQLException {
return delegate.getTransactionIsolation();
}
@Override
public SQLWarning getWarnings() throws SQLException {
return delegate.getWarnings();
}
@Override
public void clearWarnings() throws SQLException {
delegate.clearWarnings();
}
@Override
public Statement createStatement(int resultSetType, int resultSetConcurrency)
throws SQLException {
return delegate.createStatement(resultSetType, resultSetConcurrency);
}
@Override
public PreparedStatement prepareStatement(
String sql, int resultSetType, int resultSetConcurrency) throws SQLException {
return delegate.prepareStatement(sql, resultSetType, resultSetConcurrency);
}
@Override
public CallableStatement prepareCall(String sql, int resultSetType, int resultSetConcurrency)
throws SQLException {
return delegate.prepareCall(sql, resultSetType, resultSetConcurrency);
}
@Override
public Map<String, Class<?>> getTypeMap() throws SQLException {
return delegate.getTypeMap();
}
@Override
public void setTypeMap(Map<String, Class<?>> map) throws SQLException {
delegate.setTypeMap(map);
}
@Override
public void setHoldability(int holdability) throws SQLException {
delegate.setHoldability(holdability);
}
@Override
public int getHoldability() throws SQLException {
return delegate.getHoldability();
}
@Override
public Savepoint setSavepoint() throws SQLException {
return delegate.setSavepoint();
}
@Override
public Savepoint setSavepoint(String name) throws SQLException {
return delegate.setSavepoint(name);
}
@Override
public void rollback(Savepoint savepoint) throws SQLException {
delegate.rollback(savepoint);
}
@Override
public void releaseSavepoint(Savepoint savepoint) throws SQLException {
delegate.releaseSavepoint(savepoint);
}
@Override
public Statement createStatement(
int resultSetType, int resultSetConcurrency, int resultSetHoldability) throws SQLException {
return delegate.createStatement(resultSetType, resultSetConcurrency, resultSetHoldability);
}
@Override
public PreparedStatement prepareStatement(
String sql, int resultSetType, int resultSetConcurrency, int resultSetHoldability)
throws SQLException {
return delegate.prepareStatement(
sql, resultSetType, resultSetConcurrency, resultSetHoldability);
}
@Override
public CallableStatement prepareCall(
String sql, int resultSetType, int resultSetConcurrency, int resultSetHoldability)
throws SQLException {
return delegate.prepareCall(sql, resultSetType, resultSetConcurrency, resultSetHoldability);
}
@Override
public PreparedStatement prepareStatement(String sql, int autoGeneratedKeys)
throws SQLException {
return delegate.prepareStatement(sql, autoGeneratedKeys);
}
@Override
public PreparedStatement prepareStatement(String sql, int[] columnIndexes) throws SQLException {
return delegate.prepareStatement(sql, columnIndexes);
}
@Override
public PreparedStatement prepareStatement(String sql, String[] columnNames)
throws SQLException {
return delegate.prepareStatement(sql, columnNames);
}
@Override
public Clob createClob() throws SQLException {
return delegate.createClob();
}
@Override
public Blob createBlob() throws SQLException {
return delegate.createBlob();
}
@Override
public NClob createNClob() throws SQLException {
return delegate.createNClob();
}
@Override
public SQLXML createSQLXML() throws SQLException {
return delegate.createSQLXML();
}
@Override
public boolean isValid(int timeout) throws SQLException {
return delegate.isValid(timeout);
}
@Override
public void setClientInfo(String name, String value) throws SQLClientInfoException {
delegate.setClientInfo(name, value);
}
@Override
public void setClientInfo(Properties properties) throws SQLClientInfoException {
delegate.setClientInfo(properties);
}
@Override
public String getClientInfo(String name) throws SQLException {
return delegate.getClientInfo(name);
}
@Override
public Properties getClientInfo() throws SQLException {
return delegate.getClientInfo();
}
@Override
public Array createArrayOf(String typeName, Object[] elements) throws SQLException {
return delegate.createArrayOf(typeName, elements);
}
@Override
public Struct createStruct(String typeName, Object[] attributes) throws SQLException {
return delegate.createStruct(typeName, attributes);
}
@Override
public void setSchema(String schema) throws SQLException {
delegate.setSchema(schema);
}
@Override
public String getSchema() throws SQLException {
return delegate.getSchema();
}
@Override
public void abort(Executor executor) throws SQLException {
delegate.abort(executor);
}
@Override
public void setNetworkTimeout(Executor executor, int milliseconds) throws SQLException {
delegate.setNetworkTimeout(executor, milliseconds);
}
@Override
public int getNetworkTimeout() throws SQLException {
return delegate.getNetworkTimeout();
}
@Override
public void beginRequest() throws SQLException {
delegate.beginRequest();
}
@Override
public void endRequest() throws SQLException {
delegate.endRequest();
}
@Override
public boolean setShardingKeyIfValid(
ShardingKey shardingKey, ShardingKey superShardingKey, int timeout) throws SQLException {
return delegate.setShardingKeyIfValid(shardingKey, superShardingKey, timeout);
}
@Override
public boolean setShardingKeyIfValid(ShardingKey shardingKey, int timeout) throws SQLException {
return delegate.setShardingKeyIfValid(shardingKey, timeout);
}
@Override
public void setShardingKey(ShardingKey shardingKey, ShardingKey superShardingKey)
throws SQLException {
delegate.setShardingKey(shardingKey, superShardingKey);
}
@Override
public void setShardingKey(ShardingKey shardingKey) throws SQLException {
delegate.setShardingKey(shardingKey);
}
@Override
public <T> T unwrap(Class<T> iface) throws SQLException {
return delegate.unwrap(iface);
}
@Override
public boolean isWrapperFor(Class<?> iface) throws SQLException {
return delegate.isWrapperFor(iface);
}
}
/** A slow prepared statement that has a 500 ms delay before evaluating the execute() method. */
private static class SlowPreparedStatement implements PreparedStatement {
private final PreparedStatement delegate;
SlowPreparedStatement(PreparedStatement delegate) {
this.delegate = delegate;
}
@Override
public ResultSet executeQuery() throws SQLException {
return delegate.executeQuery();
}
@Override
public int executeUpdate() throws SQLException {
return delegate.executeUpdate();
}
@Override
public void setNull(int parameterIndex, int sqlType) throws SQLException {
delegate.setNull(parameterIndex, sqlType);
}
@Override
public void setBoolean(int parameterIndex, boolean x) throws SQLException {
delegate.setBoolean(parameterIndex, x);
}
@Override
public void setByte(int parameterIndex, byte x) throws SQLException {
delegate.setByte(parameterIndex, x);
}
@Override
public void setShort(int parameterIndex, short x) throws SQLException {
delegate.setShort(parameterIndex, x);
}
@Override
public void setInt(int parameterIndex, int x) throws SQLException {
delegate.setInt(parameterIndex, x);
}
@Override
public void setLong(int parameterIndex, long x) throws SQLException {
delegate.setLong(parameterIndex, x);
}
@Override
public void setFloat(int parameterIndex, float x) throws SQLException {
delegate.setFloat(parameterIndex, x);
}
@Override
public void setDouble(int parameterIndex, double x) throws SQLException {
delegate.setDouble(parameterIndex, x);
}
@Override
public void setBigDecimal(int parameterIndex, BigDecimal x) throws SQLException {
delegate.setBigDecimal(parameterIndex, x);
}
@Override
public void setString(int parameterIndex, String x) throws SQLException {
delegate.setString(parameterIndex, x);
}
@Override
public void setBytes(int parameterIndex, byte[] x) throws SQLException {
delegate.setBytes(parameterIndex, x);
}
@Override
public void setDate(int parameterIndex, Date x) throws SQLException {
delegate.setDate(parameterIndex, x);
}
@Override
public void setTime(int parameterIndex, Time x) throws SQLException {
delegate.setTime(parameterIndex, x);
}
@Override
public void setTimestamp(int parameterIndex, Timestamp x) throws SQLException {
delegate.setTimestamp(parameterIndex, x);
}
@Override
public void setAsciiStream(int parameterIndex, InputStream x, int length) throws SQLException {
delegate.setAsciiStream(parameterIndex, x, length);
}
@Deprecated(since = "1.2")
@Override
public void setUnicodeStream(int parameterIndex, InputStream inputStream, int length)
throws SQLException {
delegate.setUnicodeStream(parameterIndex, inputStream, length);
}
@Override
public void setBinaryStream(int parameterIndex, InputStream x, int length) throws SQLException {
delegate.setBinaryStream(parameterIndex, x, length);
}
@Override
public void clearParameters() throws SQLException {
delegate.clearParameters();
}
@Override
public void setObject(int parameterIndex, Object x, int targetSqlType) throws SQLException {
delegate.setObject(parameterIndex, x, targetSqlType);
}
@Override
public void setObject(int parameterIndex, Object x) throws SQLException {
delegate.setObject(parameterIndex, x);
}
@Override
public boolean execute() throws SQLException {
try {
Thread.sleep(500);
} catch (InterruptedException e) {
throw new RuntimeException(e);
}
return delegate.execute();
}
@Override
public void addBatch() throws SQLException {
delegate.addBatch();
}
@Override
public void setCharacterStream(int parameterIndex, Reader reader, int length)
throws SQLException {
delegate.setCharacterStream(parameterIndex, reader, length);
}
@Override
public void setRef(int parameterIndex, Ref x) throws SQLException {
delegate.setRef(parameterIndex, x);
}
@Override
public void setBlob(int parameterIndex, Blob x) throws SQLException {
delegate.setBlob(parameterIndex, x);
}
@Override
public void setClob(int parameterIndex, Clob x) throws SQLException {
delegate.setClob(parameterIndex, x);
}
@Override
public void setArray(int parameterIndex, Array x) throws SQLException {
delegate.setArray(parameterIndex, x);
}
@Override
public ResultSetMetaData getMetaData() throws SQLException {
return delegate.getMetaData();
}
@Override
public void setDate(int parameterIndex, Date x, Calendar cal) throws SQLException {
delegate.setDate(parameterIndex, x, cal);
}
@Override
public void setTime(int parameterIndex, Time x, Calendar cal) throws SQLException {
delegate.setTime(parameterIndex, x, cal);
}
@Override
public void setTimestamp(int parameterIndex, Timestamp x, Calendar cal) throws SQLException {
delegate.setTimestamp(parameterIndex, x, cal);
}
@Override
public void setNull(int parameterIndex, int sqlType, String typeName) throws SQLException {
delegate.setNull(parameterIndex, sqlType, typeName);
}
@Override
public void setURL(int parameterIndex, URL x) throws SQLException {
delegate.setURL(parameterIndex, x);
}
@Override
public ParameterMetaData getParameterMetaData() throws SQLException {
return delegate.getParameterMetaData();
}
@Override
public void setRowId(int parameterIndex, RowId x) throws SQLException {
delegate.setRowId(parameterIndex, x);
}
@Override
public void setNString(int parameterIndex, String value) throws SQLException {
delegate.setNString(parameterIndex, value);
}
@Override
public void setNCharacterStream(int parameterIndex, Reader value, long length)
throws SQLException {
delegate.setNCharacterStream(parameterIndex, value, length);
}
@Override
public void setNClob(int parameterIndex, NClob value) throws SQLException {
delegate.setNClob(parameterIndex, value);
}
@Override
public void setClob(int parameterIndex, Reader reader, long length) throws SQLException {
delegate.setClob(parameterIndex, reader, length);
}
@Override
public void setBlob(int parameterIndex, InputStream inputStream, long length)
throws SQLException {
delegate.setBlob(parameterIndex, inputStream, length);
}
@Override
public void setNClob(int parameterIndex, Reader reader, long length) throws SQLException {
delegate.setNClob(parameterIndex, reader, length);
}
@Override
public void setSQLXML(int parameterIndex, SQLXML xmlObject) throws SQLException {
delegate.setSQLXML(parameterIndex, xmlObject);
}
@Override
public void setObject(int parameterIndex, Object x, int targetSqlType, int scaleOrLength)
throws SQLException {
delegate.setObject(parameterIndex, x, targetSqlType, scaleOrLength);
}
@Override
public void setAsciiStream(int parameterIndex, InputStream x, long length) throws SQLException {
delegate.setAsciiStream(parameterIndex, x, length);
}
@Override
public void setBinaryStream(int parameterIndex, InputStream x, long length)
throws SQLException {
delegate.setBinaryStream(parameterIndex, x, length);
}
@Override
public void setCharacterStream(int parameterIndex, Reader reader, long length)
throws SQLException {
delegate.setCharacterStream(parameterIndex, reader, length);
}
@Override
public void setAsciiStream(int parameterIndex, InputStream x) throws SQLException {
delegate.setAsciiStream(parameterIndex, x);
}
@Override
public void setBinaryStream(int parameterIndex, InputStream x) throws SQLException {
delegate.setBinaryStream(parameterIndex, x);
}
@Override
public void setCharacterStream(int parameterIndex, Reader reader) throws SQLException {
delegate.setCharacterStream(parameterIndex, reader);
}
@Override
public void setNCharacterStream(int parameterIndex, Reader value) throws SQLException {
delegate.setNCharacterStream(parameterIndex, value);
}
@Override
public void setClob(int parameterIndex, Reader reader) throws SQLException {
delegate.setClob(parameterIndex, reader);
}
@Override
public void setBlob(int parameterIndex, InputStream inputStream) throws SQLException {
delegate.setBlob(parameterIndex, inputStream);
}
@Override
public void setNClob(int parameterIndex, Reader reader) throws SQLException {
delegate.setNClob(parameterIndex, reader);
}
@Override
public void setObject(int parameterIndex, Object x, SQLType targetSqlType, int scaleOrLength)
throws SQLException {
delegate.setObject(parameterIndex, x, targetSqlType, scaleOrLength);
}
@Override
public void setObject(int parameterIndex, Object x, SQLType targetSqlType) throws SQLException {
delegate.setObject(parameterIndex, x, targetSqlType);
}
@Override
public long executeLargeUpdate() throws SQLException {
return delegate.executeLargeUpdate();
}
@Override
public ResultSet executeQuery(String sql) throws SQLException {
return delegate.executeQuery(sql);
}
@Override
public int executeUpdate(String sql) throws SQLException {
return delegate.executeUpdate(sql);
}
@Override
public void close() throws SQLException {
delegate.close();
}
@Override
public int getMaxFieldSize() throws SQLException {
return delegate.getMaxFieldSize();
}
@Override
public void setMaxFieldSize(int max) throws SQLException {
delegate.setMaxFieldSize(max);
}
@Override
public int getMaxRows() throws SQLException {
return delegate.getMaxRows();
}
@Override
public void setMaxRows(int max) throws SQLException {
delegate.setMaxRows(max);
}
@Override
public void setEscapeProcessing(boolean enable) throws SQLException {
delegate.setEscapeProcessing(enable);
}
@Override
public int getQueryTimeout() throws SQLException {
return delegate.getQueryTimeout();
}
@Override
public void setQueryTimeout(int seconds) throws SQLException {
delegate.setQueryTimeout(seconds);
}
@Override
public void cancel() throws SQLException {
delegate.cancel();
}
@Override
public SQLWarning getWarnings() throws SQLException {
return delegate.getWarnings();
}
@Override
public void clearWarnings() throws SQLException {
delegate.clearWarnings();
}
@Override
public void setCursorName(String name) throws SQLException {
delegate.setCursorName(name);
}
@Override
public boolean execute(String sql) throws SQLException {
return delegate.execute(sql);
}
@Override
public ResultSet getResultSet() throws SQLException {
return delegate.getResultSet();
}
@Override
public int getUpdateCount() throws SQLException {
return delegate.getUpdateCount();
}
@Override
public boolean getMoreResults() throws SQLException {
return delegate.getMoreResults();
}
@Override
public void setFetchDirection(int direction) throws SQLException {
delegate.setFetchDirection(direction);
}
@Override
public int getFetchDirection() throws SQLException {
return delegate.getFetchDirection();
}
@Override
public void setFetchSize(int rows) throws SQLException {
delegate.setFetchSize(rows);
}
@Override
public int getFetchSize() throws SQLException {
return delegate.getFetchSize();
}
@Override
public int getResultSetConcurrency() throws SQLException {
return delegate.getResultSetConcurrency();
}
@Override
public int getResultSetType() throws SQLException {
return delegate.getResultSetType();
}
@Override
public void addBatch(String sql) throws SQLException {
delegate.addBatch(sql);
}
@Override
public void clearBatch() throws SQLException {
delegate.clearBatch();
}
@Override
public int[] executeBatch() throws SQLException {
return delegate.executeBatch();
}
@Override
public Connection getConnection() throws SQLException {
return delegate.getConnection();
}
@Override
public boolean getMoreResults(int current) throws SQLException {
return delegate.getMoreResults(current);
}
@Override
public ResultSet getGeneratedKeys() throws SQLException {
return delegate.getGeneratedKeys();
}
@Override
public int executeUpdate(String sql, int autoGeneratedKeys) throws SQLException {
return delegate.executeUpdate(sql, autoGeneratedKeys);
}
@Override
public int executeUpdate(String sql, int[] columnIndexes) throws SQLException {
return delegate.executeUpdate(sql, columnIndexes);
}
@Override
public int executeUpdate(String sql, String[] columnNames) throws SQLException {
return delegate.executeUpdate(sql, columnNames);
}
@Override
public boolean execute(String sql, int autoGeneratedKeys) throws SQLException {
return delegate.execute(sql, autoGeneratedKeys);
}
@Override
public boolean execute(String sql, int[] columnIndexes) throws SQLException {
return delegate.execute(sql, columnIndexes);
}
@Override
public boolean execute(String sql, String[] columnNames) throws SQLException {
return delegate.execute(sql, columnNames);
}
@Override
public int getResultSetHoldability() throws SQLException {
return delegate.getResultSetHoldability();
}
@Override
public boolean isClosed() throws SQLException {
return delegate.isClosed();
}
@Override
public void setPoolable(boolean poolable) throws SQLException {
delegate.setPoolable(poolable);
}
@Override
public boolean isPoolable() throws SQLException {
return delegate.isPoolable();
}
@Override
public void closeOnCompletion() throws SQLException {
delegate.closeOnCompletion();
}
@Override
public boolean isCloseOnCompletion() throws SQLException {
return delegate.isCloseOnCompletion();
}
@Override
public long getLargeUpdateCount() throws SQLException {
return delegate.getLargeUpdateCount();
}
@Override
public void setLargeMaxRows(long max) throws SQLException {
delegate.setLargeMaxRows(max);
}
@Override
public long getLargeMaxRows() throws SQLException {
return delegate.getLargeMaxRows();
}
@Override
public long[] executeLargeBatch() throws SQLException {
return delegate.executeLargeBatch();
}
@Override
public long executeLargeUpdate(String sql) throws SQLException {
return delegate.executeLargeUpdate(sql);
}
@Override
public long executeLargeUpdate(String sql, int autoGeneratedKeys) throws SQLException {
return delegate.executeLargeUpdate(sql, autoGeneratedKeys);
}
@Override
public long executeLargeUpdate(String sql, int[] columnIndexes) throws SQLException {
return delegate.executeLargeUpdate(sql, columnIndexes);
}
@Override
public long executeLargeUpdate(String sql, String[] columnNames) throws SQLException {
return delegate.executeLargeUpdate(sql, columnNames);
}
@Override
public String enquoteLiteral(String val) throws SQLException {
return delegate.enquoteLiteral(val);
}
@Override
public String enquoteIdentifier(String identifier, boolean alwaysQuote) throws SQLException {
return delegate.enquoteIdentifier(identifier, alwaysQuote);
}
@Override
public boolean isSimpleIdentifier(String identifier) throws SQLException {
return delegate.isSimpleIdentifier(identifier);
}
@Override
public String enquoteNCharLiteral(String val) throws SQLException {
return delegate.enquoteNCharLiteral(val);
}
@Override
public <T> T unwrap(Class<T> iface) throws SQLException {
return delegate.unwrap(iface);
}
@Override
public boolean isWrapperFor(Class<?> iface) throws SQLException {
return delegate.isWrapperFor(iface);
}
}
/**
* A driver that wraps a true driver implementation and returns SlopPreparedStatements. URL for
* this driver is "jdbc:slow:true_driver_url":
*/
private static class SlowDriver implements Driver {
private static final String PREFIX = "jdbc:slow:";
private Driver delegate;
SlowDriver(String url) throws SQLException {
if (!url.startsWith(PREFIX)) {
throw new SQLException("url must start with " + PREFIX);
}
delegate = DriverManager.getDriver(rewriteUrl(url));
}
static String rewriteUrl(String url) {
return url.startsWith(PREFIX) ? "jdbc:" + url.substring(PREFIX.length()) : url;
}
@Override
public Connection connect(String url, Properties info) throws SQLException {
return new SlowJDBCConnection(delegate.connect(rewriteUrl(url), info));
}
@Override
public boolean acceptsURL(String url) throws SQLException {
return url.startsWith(PREFIX) && delegate.acceptsURL(rewriteUrl(url));
}
@Override
public DriverPropertyInfo[] getPropertyInfo(String url, Properties info) throws SQLException {
return delegate.getPropertyInfo(url, info);
}
@Override
public int getMajorVersion() {
return delegate.getMajorVersion();
}
@Override
public int getMinorVersion() {
return delegate.getMinorVersion();
}
@Override
public boolean jdbcCompliant() {
return delegate.jdbcCompliant();
}
@Override
public java.util.logging.Logger getParentLogger() throws SQLFeatureNotSupportedException {
return delegate.getParentLogger();
}
}
}
|
googleapis/google-api-java-client-services | 36,498 | clients/google-api-services-compute/beta/2.0.0/com/google/api/services/compute/model/Firewall.java | /*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
* in compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
/*
* This code was generated by https://github.com/googleapis/google-api-java-client-services/
* Modify at your own risk.
*/
package com.google.api.services.compute.model;
/**
* Represents a Firewall Rule resource.
*
* Firewall rules allow or deny ingress traffic to, and egress traffic from your instances. For more
* information, readFirewall rules.
*
* <p> This is the Java data model class that specifies how to parse/serialize into the JSON that is
* transmitted over HTTP when working with the Compute Engine API. For a detailed explanation see:
* <a href="https://developers.google.com/api-client-library/java/google-http-java-client/json">https://developers.google.com/api-client-library/java/google-http-java-client/json</a>
* </p>
*
* @author Google, Inc.
*/
@SuppressWarnings("javadoc")
public final class Firewall extends com.google.api.client.json.GenericJson {
/**
* The list of ALLOW rules specified by this firewall. Each rule specifies a protocol and port-
* range tuple that describes a permitted connection.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.util.List<Allowed> allowed;
static {
// hack to force ProGuard to consider Allowed used, since otherwise it would be stripped out
// see https://github.com/google/google-api-java-client/issues/543
com.google.api.client.util.Data.nullOf(Allowed.class);
}
/**
* [Output Only] Creation timestamp inRFC3339 text format.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String creationTimestamp;
/**
* The list of DENY rules specified by this firewall. Each rule specifies a protocol and port-
* range tuple that describes a denied connection.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.util.List<Denied> denied;
static {
// hack to force ProGuard to consider Denied used, since otherwise it would be stripped out
// see https://github.com/google/google-api-java-client/issues/543
com.google.api.client.util.Data.nullOf(Denied.class);
}
/**
* An optional description of this resource. Provide this field when you create the resource.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String description;
/**
* If destination ranges are specified, the firewall rule applies only to traffic that has
* destination IP address in these ranges. These ranges must be expressed inCIDR format. Both IPv4
* and IPv6 are supported.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.util.List<java.lang.String> destinationRanges;
/**
* Direction of traffic to which this firewall applies, either `INGRESS` or `EGRESS`. The default
* is `INGRESS`. For `EGRESS` traffic, you cannot specify the sourceTags fields.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String direction;
/**
* Denotes whether the firewall rule is disabled. When set to true, the firewall rule is not
* enforced and the network behaves as if it did not exist. If this is unspecified, the firewall
* rule will be enabled.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.Boolean disabled;
/**
* Deprecated in favor of enable in LogConfig. This field denotes whether to enable logging for a
* particular firewall rule. If logging is enabled, logs will be exported t Cloud Logging.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.Boolean enableLogging;
/**
* [Output Only] The unique identifier for the resource. This identifier is defined by the server.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key @com.google.api.client.json.JsonString
private java.math.BigInteger id;
/**
* [Output Only] Type of the resource. Always compute#firewall for firewall rules.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String kind;
/**
* This field denotes the logging options for a particular firewall rule. If logging is enabled,
* logs will be exported to Cloud Logging.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private FirewallLogConfig logConfig;
/**
* Name of the resource; provided by the client when the resource is created. The name must be
* 1-63 characters long, and comply withRFC1035. Specifically, the name must be 1-63 characters
* long and match the regular expression `[a-z]([-a-z0-9]*[a-z0-9])?`. The first character must be
* a lowercase letter, and all following characters (except for the last character) must be a
* dash, lowercase letter, or digit. The last character must be a lowercase letter or digit.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String name;
/**
* URL of the network resource for this firewall rule. If not specified when creating a firewall
* rule, the default network is used:
*
* global/networks/default
*
* If you choose to specify this field, you can specify the network as a full or partial URL. For
* example, the following are all valid URLs: -
* https://www.googleapis.com/compute/v1/projects/myproject/global/networks/my-network -
* projects/myproject/global/networks/my-network - global/networks/default
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String network;
/**
* Input only. [Input Only] Additional params passed with the request, but not persisted as part
* of resource payload.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private FirewallParams params;
/**
* Priority for this rule. This is an integer between `0` and `65535`, both inclusive. The default
* value is `1000`. Relative priorities determine which rule takes effect if multiple rules apply.
* Lower values indicate higher priority. For example, a rule with priority `0` has higher
* precedence than a rule with priority `1`. DENY rules take precedence over ALLOW rules if they
* have equal priority. Note that VPC networks have implied rules with a priority of `65535`. To
* avoid conflicts with the implied rules, use a priority number less than `65535`.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.Integer priority;
/**
* [Output Only] Server-defined URL for the resource.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String selfLink;
/**
* If source ranges are specified, the firewall rule applies only to traffic that has a source IP
* address in these ranges. These ranges must be expressed inCIDR format. One or both of
* sourceRanges and sourceTags may be set. If both fields are set, the rule applies to traffic
* that has a source IP address within sourceRanges OR a source IP from a resource with a matching
* tag listed in thesourceTags field. The connection does not need to match both fields for the
* rule to apply. Both IPv4 and IPv6 are supported.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.util.List<java.lang.String> sourceRanges;
/**
* If source service accounts are specified, the firewall rules apply only to traffic originating
* from an instance with a service account in this list. Source service accounts cannot be used to
* control traffic to an instance's external IP address because service accounts are associated
* with an instance, not an IP address.sourceRanges can be set at the same time
* assourceServiceAccounts. If both are set, the firewall applies to traffic that has a source IP
* address within the sourceRanges OR a source IP that belongs to an instance with service account
* listed insourceServiceAccount. The connection does not need to match both fields for the
* firewall to apply.sourceServiceAccounts cannot be used at the same time assourceTags or
* targetTags.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.util.List<java.lang.String> sourceServiceAccounts;
/**
* If source tags are specified, the firewall rule applies only to traffic with source IPs that
* match the primary network interfaces of VM instances that have the tag and are in the same VPC
* network. Source tags cannot be used to control traffic to an instance's external IP address, it
* only applies to traffic between instances in the same virtual network. Because tags are
* associated with instances, not IP addresses. One or both of sourceRanges and sourceTags may be
* set. If both fields are set, the firewall applies to traffic that has a source IP address
* within sourceRanges OR a source IP from a resource with a matching tag listed in the sourceTags
* field. The connection does not need to match both fields for the firewall to apply.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.util.List<java.lang.String> sourceTags;
/**
* A list of service accounts indicating sets of instances located in the network that may make
* network connections as specified inallowed[].targetServiceAccounts cannot be used at the same
* time astargetTags or sourceTags. If neither targetServiceAccounts nor targetTags are specified,
* the firewall rule applies to all instances on the specified network.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.util.List<java.lang.String> targetServiceAccounts;
/**
* A list of tags that controls which instances the firewall rule applies to. If targetTags are
* specified, then the firewall rule applies only to instances in the VPC network that have one of
* those tags. If no targetTags are specified, the firewall rule applies to all instances on the
* specified network.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.util.List<java.lang.String> targetTags;
/**
* The list of ALLOW rules specified by this firewall. Each rule specifies a protocol and port-
* range tuple that describes a permitted connection.
* @return value or {@code null} for none
*/
public java.util.List<Allowed> getAllowed() {
return allowed;
}
/**
* The list of ALLOW rules specified by this firewall. Each rule specifies a protocol and port-
* range tuple that describes a permitted connection.
* @param allowed allowed or {@code null} for none
*/
public Firewall setAllowed(java.util.List<Allowed> allowed) {
this.allowed = allowed;
return this;
}
/**
* [Output Only] Creation timestamp inRFC3339 text format.
* @return value or {@code null} for none
*/
public java.lang.String getCreationTimestamp() {
return creationTimestamp;
}
/**
* [Output Only] Creation timestamp inRFC3339 text format.
* @param creationTimestamp creationTimestamp or {@code null} for none
*/
public Firewall setCreationTimestamp(java.lang.String creationTimestamp) {
this.creationTimestamp = creationTimestamp;
return this;
}
/**
* The list of DENY rules specified by this firewall. Each rule specifies a protocol and port-
* range tuple that describes a denied connection.
* @return value or {@code null} for none
*/
public java.util.List<Denied> getDenied() {
return denied;
}
/**
* The list of DENY rules specified by this firewall. Each rule specifies a protocol and port-
* range tuple that describes a denied connection.
* @param denied denied or {@code null} for none
*/
public Firewall setDenied(java.util.List<Denied> denied) {
this.denied = denied;
return this;
}
/**
* An optional description of this resource. Provide this field when you create the resource.
* @return value or {@code null} for none
*/
public java.lang.String getDescription() {
return description;
}
/**
* An optional description of this resource. Provide this field when you create the resource.
* @param description description or {@code null} for none
*/
public Firewall setDescription(java.lang.String description) {
this.description = description;
return this;
}
/**
* If destination ranges are specified, the firewall rule applies only to traffic that has
* destination IP address in these ranges. These ranges must be expressed inCIDR format. Both IPv4
* and IPv6 are supported.
* @return value or {@code null} for none
*/
public java.util.List<java.lang.String> getDestinationRanges() {
return destinationRanges;
}
/**
* If destination ranges are specified, the firewall rule applies only to traffic that has
* destination IP address in these ranges. These ranges must be expressed inCIDR format. Both IPv4
* and IPv6 are supported.
* @param destinationRanges destinationRanges or {@code null} for none
*/
public Firewall setDestinationRanges(java.util.List<java.lang.String> destinationRanges) {
this.destinationRanges = destinationRanges;
return this;
}
/**
* Direction of traffic to which this firewall applies, either `INGRESS` or `EGRESS`. The default
* is `INGRESS`. For `EGRESS` traffic, you cannot specify the sourceTags fields.
* @return value or {@code null} for none
*/
public java.lang.String getDirection() {
return direction;
}
/**
* Direction of traffic to which this firewall applies, either `INGRESS` or `EGRESS`. The default
* is `INGRESS`. For `EGRESS` traffic, you cannot specify the sourceTags fields.
* @param direction direction or {@code null} for none
*/
public Firewall setDirection(java.lang.String direction) {
this.direction = direction;
return this;
}
/**
* Denotes whether the firewall rule is disabled. When set to true, the firewall rule is not
* enforced and the network behaves as if it did not exist. If this is unspecified, the firewall
* rule will be enabled.
* @return value or {@code null} for none
*/
public java.lang.Boolean getDisabled() {
return disabled;
}
/**
* Denotes whether the firewall rule is disabled. When set to true, the firewall rule is not
* enforced and the network behaves as if it did not exist. If this is unspecified, the firewall
* rule will be enabled.
* @param disabled disabled or {@code null} for none
*/
public Firewall setDisabled(java.lang.Boolean disabled) {
this.disabled = disabled;
return this;
}
/**
* Deprecated in favor of enable in LogConfig. This field denotes whether to enable logging for a
* particular firewall rule. If logging is enabled, logs will be exported t Cloud Logging.
* @return value or {@code null} for none
*/
public java.lang.Boolean getEnableLogging() {
return enableLogging;
}
/**
* Deprecated in favor of enable in LogConfig. This field denotes whether to enable logging for a
* particular firewall rule. If logging is enabled, logs will be exported t Cloud Logging.
* @param enableLogging enableLogging or {@code null} for none
*/
public Firewall setEnableLogging(java.lang.Boolean enableLogging) {
this.enableLogging = enableLogging;
return this;
}
/**
* [Output Only] The unique identifier for the resource. This identifier is defined by the server.
* @return value or {@code null} for none
*/
public java.math.BigInteger getId() {
return id;
}
/**
* [Output Only] The unique identifier for the resource. This identifier is defined by the server.
* @param id id or {@code null} for none
*/
public Firewall setId(java.math.BigInteger id) {
this.id = id;
return this;
}
/**
* [Output Only] Type of the resource. Always compute#firewall for firewall rules.
* @return value or {@code null} for none
*/
public java.lang.String getKind() {
return kind;
}
/**
* [Output Only] Type of the resource. Always compute#firewall for firewall rules.
* @param kind kind or {@code null} for none
*/
public Firewall setKind(java.lang.String kind) {
this.kind = kind;
return this;
}
/**
* This field denotes the logging options for a particular firewall rule. If logging is enabled,
* logs will be exported to Cloud Logging.
* @return value or {@code null} for none
*/
public FirewallLogConfig getLogConfig() {
return logConfig;
}
/**
* This field denotes the logging options for a particular firewall rule. If logging is enabled,
* logs will be exported to Cloud Logging.
* @param logConfig logConfig or {@code null} for none
*/
public Firewall setLogConfig(FirewallLogConfig logConfig) {
this.logConfig = logConfig;
return this;
}
/**
* Name of the resource; provided by the client when the resource is created. The name must be
* 1-63 characters long, and comply withRFC1035. Specifically, the name must be 1-63 characters
* long and match the regular expression `[a-z]([-a-z0-9]*[a-z0-9])?`. The first character must be
* a lowercase letter, and all following characters (except for the last character) must be a
* dash, lowercase letter, or digit. The last character must be a lowercase letter or digit.
* @return value or {@code null} for none
*/
public java.lang.String getName() {
return name;
}
/**
* Name of the resource; provided by the client when the resource is created. The name must be
* 1-63 characters long, and comply withRFC1035. Specifically, the name must be 1-63 characters
* long and match the regular expression `[a-z]([-a-z0-9]*[a-z0-9])?`. The first character must be
* a lowercase letter, and all following characters (except for the last character) must be a
* dash, lowercase letter, or digit. The last character must be a lowercase letter or digit.
* @param name name or {@code null} for none
*/
public Firewall setName(java.lang.String name) {
this.name = name;
return this;
}
/**
* URL of the network resource for this firewall rule. If not specified when creating a firewall
* rule, the default network is used:
*
* global/networks/default
*
* If you choose to specify this field, you can specify the network as a full or partial URL. For
* example, the following are all valid URLs: -
* https://www.googleapis.com/compute/v1/projects/myproject/global/networks/my-network -
* projects/myproject/global/networks/my-network - global/networks/default
* @return value or {@code null} for none
*/
public java.lang.String getNetwork() {
return network;
}
/**
* URL of the network resource for this firewall rule. If not specified when creating a firewall
* rule, the default network is used:
*
* global/networks/default
*
* If you choose to specify this field, you can specify the network as a full or partial URL. For
* example, the following are all valid URLs: -
* https://www.googleapis.com/compute/v1/projects/myproject/global/networks/my-network -
* projects/myproject/global/networks/my-network - global/networks/default
* @param network network or {@code null} for none
*/
public Firewall setNetwork(java.lang.String network) {
this.network = network;
return this;
}
/**
* Input only. [Input Only] Additional params passed with the request, but not persisted as part
* of resource payload.
* @return value or {@code null} for none
*/
public FirewallParams getParams() {
return params;
}
/**
* Input only. [Input Only] Additional params passed with the request, but not persisted as part
* of resource payload.
* @param params params or {@code null} for none
*/
public Firewall setParams(FirewallParams params) {
this.params = params;
return this;
}
/**
* Priority for this rule. This is an integer between `0` and `65535`, both inclusive. The default
* value is `1000`. Relative priorities determine which rule takes effect if multiple rules apply.
* Lower values indicate higher priority. For example, a rule with priority `0` has higher
* precedence than a rule with priority `1`. DENY rules take precedence over ALLOW rules if they
* have equal priority. Note that VPC networks have implied rules with a priority of `65535`. To
* avoid conflicts with the implied rules, use a priority number less than `65535`.
* @return value or {@code null} for none
*/
public java.lang.Integer getPriority() {
return priority;
}
/**
* Priority for this rule. This is an integer between `0` and `65535`, both inclusive. The default
* value is `1000`. Relative priorities determine which rule takes effect if multiple rules apply.
* Lower values indicate higher priority. For example, a rule with priority `0` has higher
* precedence than a rule with priority `1`. DENY rules take precedence over ALLOW rules if they
* have equal priority. Note that VPC networks have implied rules with a priority of `65535`. To
* avoid conflicts with the implied rules, use a priority number less than `65535`.
* @param priority priority or {@code null} for none
*/
public Firewall setPriority(java.lang.Integer priority) {
this.priority = priority;
return this;
}
/**
* [Output Only] Server-defined URL for the resource.
* @return value or {@code null} for none
*/
public java.lang.String getSelfLink() {
return selfLink;
}
/**
* [Output Only] Server-defined URL for the resource.
* @param selfLink selfLink or {@code null} for none
*/
public Firewall setSelfLink(java.lang.String selfLink) {
this.selfLink = selfLink;
return this;
}
/**
* If source ranges are specified, the firewall rule applies only to traffic that has a source IP
* address in these ranges. These ranges must be expressed inCIDR format. One or both of
* sourceRanges and sourceTags may be set. If both fields are set, the rule applies to traffic
* that has a source IP address within sourceRanges OR a source IP from a resource with a matching
* tag listed in thesourceTags field. The connection does not need to match both fields for the
* rule to apply. Both IPv4 and IPv6 are supported.
* @return value or {@code null} for none
*/
public java.util.List<java.lang.String> getSourceRanges() {
return sourceRanges;
}
/**
* If source ranges are specified, the firewall rule applies only to traffic that has a source IP
* address in these ranges. These ranges must be expressed inCIDR format. One or both of
* sourceRanges and sourceTags may be set. If both fields are set, the rule applies to traffic
* that has a source IP address within sourceRanges OR a source IP from a resource with a matching
* tag listed in thesourceTags field. The connection does not need to match both fields for the
* rule to apply. Both IPv4 and IPv6 are supported.
* @param sourceRanges sourceRanges or {@code null} for none
*/
public Firewall setSourceRanges(java.util.List<java.lang.String> sourceRanges) {
this.sourceRanges = sourceRanges;
return this;
}
/**
* If source service accounts are specified, the firewall rules apply only to traffic originating
* from an instance with a service account in this list. Source service accounts cannot be used to
* control traffic to an instance's external IP address because service accounts are associated
* with an instance, not an IP address.sourceRanges can be set at the same time
* assourceServiceAccounts. If both are set, the firewall applies to traffic that has a source IP
* address within the sourceRanges OR a source IP that belongs to an instance with service account
* listed insourceServiceAccount. The connection does not need to match both fields for the
* firewall to apply.sourceServiceAccounts cannot be used at the same time assourceTags or
* targetTags.
* @return value or {@code null} for none
*/
public java.util.List<java.lang.String> getSourceServiceAccounts() {
return sourceServiceAccounts;
}
/**
* If source service accounts are specified, the firewall rules apply only to traffic originating
* from an instance with a service account in this list. Source service accounts cannot be used to
* control traffic to an instance's external IP address because service accounts are associated
* with an instance, not an IP address.sourceRanges can be set at the same time
* assourceServiceAccounts. If both are set, the firewall applies to traffic that has a source IP
* address within the sourceRanges OR a source IP that belongs to an instance with service account
* listed insourceServiceAccount. The connection does not need to match both fields for the
* firewall to apply.sourceServiceAccounts cannot be used at the same time assourceTags or
* targetTags.
* @param sourceServiceAccounts sourceServiceAccounts or {@code null} for none
*/
public Firewall setSourceServiceAccounts(java.util.List<java.lang.String> sourceServiceAccounts) {
this.sourceServiceAccounts = sourceServiceAccounts;
return this;
}
/**
* If source tags are specified, the firewall rule applies only to traffic with source IPs that
* match the primary network interfaces of VM instances that have the tag and are in the same VPC
* network. Source tags cannot be used to control traffic to an instance's external IP address, it
* only applies to traffic between instances in the same virtual network. Because tags are
* associated with instances, not IP addresses. One or both of sourceRanges and sourceTags may be
* set. If both fields are set, the firewall applies to traffic that has a source IP address
* within sourceRanges OR a source IP from a resource with a matching tag listed in the sourceTags
* field. The connection does not need to match both fields for the firewall to apply.
* @return value or {@code null} for none
*/
public java.util.List<java.lang.String> getSourceTags() {
return sourceTags;
}
/**
* If source tags are specified, the firewall rule applies only to traffic with source IPs that
* match the primary network interfaces of VM instances that have the tag and are in the same VPC
* network. Source tags cannot be used to control traffic to an instance's external IP address, it
* only applies to traffic between instances in the same virtual network. Because tags are
* associated with instances, not IP addresses. One or both of sourceRanges and sourceTags may be
* set. If both fields are set, the firewall applies to traffic that has a source IP address
* within sourceRanges OR a source IP from a resource with a matching tag listed in the sourceTags
* field. The connection does not need to match both fields for the firewall to apply.
* @param sourceTags sourceTags or {@code null} for none
*/
public Firewall setSourceTags(java.util.List<java.lang.String> sourceTags) {
this.sourceTags = sourceTags;
return this;
}
/**
* A list of service accounts indicating sets of instances located in the network that may make
* network connections as specified inallowed[].targetServiceAccounts cannot be used at the same
* time astargetTags or sourceTags. If neither targetServiceAccounts nor targetTags are specified,
* the firewall rule applies to all instances on the specified network.
* @return value or {@code null} for none
*/
public java.util.List<java.lang.String> getTargetServiceAccounts() {
return targetServiceAccounts;
}
/**
* A list of service accounts indicating sets of instances located in the network that may make
* network connections as specified inallowed[].targetServiceAccounts cannot be used at the same
* time astargetTags or sourceTags. If neither targetServiceAccounts nor targetTags are specified,
* the firewall rule applies to all instances on the specified network.
* @param targetServiceAccounts targetServiceAccounts or {@code null} for none
*/
public Firewall setTargetServiceAccounts(java.util.List<java.lang.String> targetServiceAccounts) {
this.targetServiceAccounts = targetServiceAccounts;
return this;
}
/**
* A list of tags that controls which instances the firewall rule applies to. If targetTags are
* specified, then the firewall rule applies only to instances in the VPC network that have one of
* those tags. If no targetTags are specified, the firewall rule applies to all instances on the
* specified network.
* @return value or {@code null} for none
*/
public java.util.List<java.lang.String> getTargetTags() {
return targetTags;
}
/**
* A list of tags that controls which instances the firewall rule applies to. If targetTags are
* specified, then the firewall rule applies only to instances in the VPC network that have one of
* those tags. If no targetTags are specified, the firewall rule applies to all instances on the
* specified network.
* @param targetTags targetTags or {@code null} for none
*/
public Firewall setTargetTags(java.util.List<java.lang.String> targetTags) {
this.targetTags = targetTags;
return this;
}
@Override
public Firewall set(String fieldName, Object value) {
return (Firewall) super.set(fieldName, value);
}
@Override
public Firewall clone() {
return (Firewall) super.clone();
}
/**
* Model definition for FirewallAllowed.
*/
public static final class Allowed extends com.google.api.client.json.GenericJson {
/**
* The IP protocol to which this rule applies. The protocol type is required when creating a
* firewall rule. This value can either be one of the following well known protocol strings (tcp,
* udp,icmp, esp, ah, ipip,sctp) or the IP protocol number.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key("IPProtocol")
private java.lang.String iPProtocol;
/**
* An optional list of ports to which this rule applies. This field is only applicable for the UDP
* or TCP protocol. Each entry must be either an integer or a range. If not specified, this rule
* applies to connections through any port.
*
* Example inputs include: ["22"], ["80","443"], and ["12345-12349"].
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.util.List<java.lang.String> ports;
/**
* The IP protocol to which this rule applies. The protocol type is required when creating a
* firewall rule. This value can either be one of the following well known protocol strings (tcp,
* udp,icmp, esp, ah, ipip,sctp) or the IP protocol number.
* @return value or {@code null} for none
*/
public java.lang.String getIPProtocol() {
return iPProtocol;
}
/**
* The IP protocol to which this rule applies. The protocol type is required when creating a
* firewall rule. This value can either be one of the following well known protocol strings (tcp,
* udp,icmp, esp, ah, ipip,sctp) or the IP protocol number.
* @param iPProtocol iPProtocol or {@code null} for none
*/
public Allowed setIPProtocol(java.lang.String iPProtocol) {
this.iPProtocol = iPProtocol;
return this;
}
/**
* An optional list of ports to which this rule applies. This field is only applicable for the UDP
* or TCP protocol. Each entry must be either an integer or a range. If not specified, this rule
* applies to connections through any port.
*
* Example inputs include: ["22"], ["80","443"], and ["12345-12349"].
* @return value or {@code null} for none
*/
public java.util.List<java.lang.String> getPorts() {
return ports;
}
/**
* An optional list of ports to which this rule applies. This field is only applicable for the UDP
* or TCP protocol. Each entry must be either an integer or a range. If not specified, this rule
* applies to connections through any port.
*
* Example inputs include: ["22"], ["80","443"], and ["12345-12349"].
* @param ports ports or {@code null} for none
*/
public Allowed setPorts(java.util.List<java.lang.String> ports) {
this.ports = ports;
return this;
}
@Override
public Allowed set(String fieldName, Object value) {
return (Allowed) super.set(fieldName, value);
}
@Override
public Allowed clone() {
return (Allowed) super.clone();
}
}
/**
* Model definition for FirewallDenied.
*/
public static final class Denied extends com.google.api.client.json.GenericJson {
/**
* The IP protocol to which this rule applies. The protocol type is required when creating a
* firewall rule. This value can either be one of the following well known protocol strings (tcp,
* udp,icmp, esp, ah, ipip,sctp) or the IP protocol number.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key("IPProtocol")
private java.lang.String iPProtocol;
/**
* An optional list of ports to which this rule applies. This field is only applicable for the UDP
* or TCP protocol. Each entry must be either an integer or a range. If not specified, this rule
* applies to connections through any port.
*
* Example inputs include: ["22"], ["80","443"], and ["12345-12349"].
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.util.List<java.lang.String> ports;
/**
* The IP protocol to which this rule applies. The protocol type is required when creating a
* firewall rule. This value can either be one of the following well known protocol strings (tcp,
* udp,icmp, esp, ah, ipip,sctp) or the IP protocol number.
* @return value or {@code null} for none
*/
public java.lang.String getIPProtocol() {
return iPProtocol;
}
/**
* The IP protocol to which this rule applies. The protocol type is required when creating a
* firewall rule. This value can either be one of the following well known protocol strings (tcp,
* udp,icmp, esp, ah, ipip,sctp) or the IP protocol number.
* @param iPProtocol iPProtocol or {@code null} for none
*/
public Denied setIPProtocol(java.lang.String iPProtocol) {
this.iPProtocol = iPProtocol;
return this;
}
/**
* An optional list of ports to which this rule applies. This field is only applicable for the UDP
* or TCP protocol. Each entry must be either an integer or a range. If not specified, this rule
* applies to connections through any port.
*
* Example inputs include: ["22"], ["80","443"], and ["12345-12349"].
* @return value or {@code null} for none
*/
public java.util.List<java.lang.String> getPorts() {
return ports;
}
/**
* An optional list of ports to which this rule applies. This field is only applicable for the UDP
* or TCP protocol. Each entry must be either an integer or a range. If not specified, this rule
* applies to connections through any port.
*
* Example inputs include: ["22"], ["80","443"], and ["12345-12349"].
* @param ports ports or {@code null} for none
*/
public Denied setPorts(java.util.List<java.lang.String> ports) {
this.ports = ports;
return this;
}
@Override
public Denied set(String fieldName, Object value) {
return (Denied) super.set(fieldName, value);
}
@Override
public Denied clone() {
return (Denied) super.clone();
}
}
}
|
googleapis/google-cloud-java | 36,253 | java-securitycentermanagement/proto-google-cloud-securitycentermanagement-v1/src/main/java/com/google/cloud/securitycentermanagement/v1/ListEffectiveSecurityHealthAnalyticsCustomModulesRequest.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/securitycentermanagement/v1/security_center_management.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.securitycentermanagement.v1;
/**
*
*
* <pre>
* Request message for
* [SecurityCenterManagement.ListEffectiveSecurityHealthAnalyticsCustomModules][google.cloud.securitycentermanagement.v1.SecurityCenterManagement.ListEffectiveSecurityHealthAnalyticsCustomModules].
* </pre>
*
* Protobuf type {@code
* google.cloud.securitycentermanagement.v1.ListEffectiveSecurityHealthAnalyticsCustomModulesRequest}
*/
public final class ListEffectiveSecurityHealthAnalyticsCustomModulesRequest
extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.securitycentermanagement.v1.ListEffectiveSecurityHealthAnalyticsCustomModulesRequest)
ListEffectiveSecurityHealthAnalyticsCustomModulesRequestOrBuilder {
private static final long serialVersionUID = 0L;
// Use ListEffectiveSecurityHealthAnalyticsCustomModulesRequest.newBuilder() to construct.
private ListEffectiveSecurityHealthAnalyticsCustomModulesRequest(
com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private ListEffectiveSecurityHealthAnalyticsCustomModulesRequest() {
parent_ = "";
pageToken_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new ListEffectiveSecurityHealthAnalyticsCustomModulesRequest();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.securitycentermanagement.v1.SecurityCenterManagementProto
.internal_static_google_cloud_securitycentermanagement_v1_ListEffectiveSecurityHealthAnalyticsCustomModulesRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.securitycentermanagement.v1.SecurityCenterManagementProto
.internal_static_google_cloud_securitycentermanagement_v1_ListEffectiveSecurityHealthAnalyticsCustomModulesRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.securitycentermanagement.v1
.ListEffectiveSecurityHealthAnalyticsCustomModulesRequest.class,
com.google.cloud.securitycentermanagement.v1
.ListEffectiveSecurityHealthAnalyticsCustomModulesRequest.Builder.class);
}
public static final int PARENT_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private volatile java.lang.Object parent_ = "";
/**
*
*
* <pre>
* Required. Name of parent to list effective custom modules, in one of the
* following formats:
*
* * `organizations/{organization}/locations/{location}`
* * `folders/{folder}/locations/{location}`
* * `projects/{project}/locations/{location}`
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The parent.
*/
@java.lang.Override
public java.lang.String getParent() {
java.lang.Object ref = parent_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
parent_ = s;
return s;
}
}
/**
*
*
* <pre>
* Required. Name of parent to list effective custom modules, in one of the
* following formats:
*
* * `organizations/{organization}/locations/{location}`
* * `folders/{folder}/locations/{location}`
* * `projects/{project}/locations/{location}`
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for parent.
*/
@java.lang.Override
public com.google.protobuf.ByteString getParentBytes() {
java.lang.Object ref = parent_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
parent_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int PAGE_SIZE_FIELD_NUMBER = 2;
private int pageSize_ = 0;
/**
*
*
* <pre>
* Optional. The maximum number of results to return in a single response.
* Default is 10, minimum is 1, maximum is 1000.
* </pre>
*
* <code>int32 page_size = 2 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The pageSize.
*/
@java.lang.Override
public int getPageSize() {
return pageSize_;
}
public static final int PAGE_TOKEN_FIELD_NUMBER = 3;
@SuppressWarnings("serial")
private volatile java.lang.Object pageToken_ = "";
/**
*
*
* <pre>
* Optional. A pagination token returned from a previous request. Provide this
* token to retrieve the next page of results.
*
* When paginating, the rest of the request must match the request that
* generated the page token.
* </pre>
*
* <code>string page_token = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The pageToken.
*/
@java.lang.Override
public java.lang.String getPageToken() {
java.lang.Object ref = pageToken_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
pageToken_ = s;
return s;
}
}
/**
*
*
* <pre>
* Optional. A pagination token returned from a previous request. Provide this
* token to retrieve the next page of results.
*
* When paginating, the rest of the request must match the request that
* generated the page token.
* </pre>
*
* <code>string page_token = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The bytes for pageToken.
*/
@java.lang.Override
public com.google.protobuf.ByteString getPageTokenBytes() {
java.lang.Object ref = pageToken_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
pageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 1, parent_);
}
if (pageSize_ != 0) {
output.writeInt32(2, pageSize_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(pageToken_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 3, pageToken_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, parent_);
}
if (pageSize_ != 0) {
size += com.google.protobuf.CodedOutputStream.computeInt32Size(2, pageSize_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(pageToken_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(3, pageToken_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj
instanceof
com.google.cloud.securitycentermanagement.v1
.ListEffectiveSecurityHealthAnalyticsCustomModulesRequest)) {
return super.equals(obj);
}
com.google.cloud.securitycentermanagement.v1
.ListEffectiveSecurityHealthAnalyticsCustomModulesRequest
other =
(com.google.cloud.securitycentermanagement.v1
.ListEffectiveSecurityHealthAnalyticsCustomModulesRequest)
obj;
if (!getParent().equals(other.getParent())) return false;
if (getPageSize() != other.getPageSize()) return false;
if (!getPageToken().equals(other.getPageToken())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + PARENT_FIELD_NUMBER;
hash = (53 * hash) + getParent().hashCode();
hash = (37 * hash) + PAGE_SIZE_FIELD_NUMBER;
hash = (53 * hash) + getPageSize();
hash = (37 * hash) + PAGE_TOKEN_FIELD_NUMBER;
hash = (53 * hash) + getPageToken().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.securitycentermanagement.v1
.ListEffectiveSecurityHealthAnalyticsCustomModulesRequest
parseFrom(java.nio.ByteBuffer data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.securitycentermanagement.v1
.ListEffectiveSecurityHealthAnalyticsCustomModulesRequest
parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.securitycentermanagement.v1
.ListEffectiveSecurityHealthAnalyticsCustomModulesRequest
parseFrom(com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.securitycentermanagement.v1
.ListEffectiveSecurityHealthAnalyticsCustomModulesRequest
parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.securitycentermanagement.v1
.ListEffectiveSecurityHealthAnalyticsCustomModulesRequest
parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.securitycentermanagement.v1
.ListEffectiveSecurityHealthAnalyticsCustomModulesRequest
parseFrom(byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.securitycentermanagement.v1
.ListEffectiveSecurityHealthAnalyticsCustomModulesRequest
parseFrom(java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.securitycentermanagement.v1
.ListEffectiveSecurityHealthAnalyticsCustomModulesRequest
parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.securitycentermanagement.v1
.ListEffectiveSecurityHealthAnalyticsCustomModulesRequest
parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.securitycentermanagement.v1
.ListEffectiveSecurityHealthAnalyticsCustomModulesRequest
parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.securitycentermanagement.v1
.ListEffectiveSecurityHealthAnalyticsCustomModulesRequest
parseFrom(com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.securitycentermanagement.v1
.ListEffectiveSecurityHealthAnalyticsCustomModulesRequest
parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.securitycentermanagement.v1
.ListEffectiveSecurityHealthAnalyticsCustomModulesRequest
prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Request message for
* [SecurityCenterManagement.ListEffectiveSecurityHealthAnalyticsCustomModules][google.cloud.securitycentermanagement.v1.SecurityCenterManagement.ListEffectiveSecurityHealthAnalyticsCustomModules].
* </pre>
*
* Protobuf type {@code
* google.cloud.securitycentermanagement.v1.ListEffectiveSecurityHealthAnalyticsCustomModulesRequest}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.securitycentermanagement.v1.ListEffectiveSecurityHealthAnalyticsCustomModulesRequest)
com.google.cloud.securitycentermanagement.v1
.ListEffectiveSecurityHealthAnalyticsCustomModulesRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.securitycentermanagement.v1.SecurityCenterManagementProto
.internal_static_google_cloud_securitycentermanagement_v1_ListEffectiveSecurityHealthAnalyticsCustomModulesRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.securitycentermanagement.v1.SecurityCenterManagementProto
.internal_static_google_cloud_securitycentermanagement_v1_ListEffectiveSecurityHealthAnalyticsCustomModulesRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.securitycentermanagement.v1
.ListEffectiveSecurityHealthAnalyticsCustomModulesRequest.class,
com.google.cloud.securitycentermanagement.v1
.ListEffectiveSecurityHealthAnalyticsCustomModulesRequest.Builder.class);
}
// Construct using
// com.google.cloud.securitycentermanagement.v1.ListEffectiveSecurityHealthAnalyticsCustomModulesRequest.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
parent_ = "";
pageSize_ = 0;
pageToken_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.securitycentermanagement.v1.SecurityCenterManagementProto
.internal_static_google_cloud_securitycentermanagement_v1_ListEffectiveSecurityHealthAnalyticsCustomModulesRequest_descriptor;
}
@java.lang.Override
public com.google.cloud.securitycentermanagement.v1
.ListEffectiveSecurityHealthAnalyticsCustomModulesRequest
getDefaultInstanceForType() {
return com.google.cloud.securitycentermanagement.v1
.ListEffectiveSecurityHealthAnalyticsCustomModulesRequest.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.securitycentermanagement.v1
.ListEffectiveSecurityHealthAnalyticsCustomModulesRequest
build() {
com.google.cloud.securitycentermanagement.v1
.ListEffectiveSecurityHealthAnalyticsCustomModulesRequest
result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.securitycentermanagement.v1
.ListEffectiveSecurityHealthAnalyticsCustomModulesRequest
buildPartial() {
com.google.cloud.securitycentermanagement.v1
.ListEffectiveSecurityHealthAnalyticsCustomModulesRequest
result =
new com.google.cloud.securitycentermanagement.v1
.ListEffectiveSecurityHealthAnalyticsCustomModulesRequest(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(
com.google.cloud.securitycentermanagement.v1
.ListEffectiveSecurityHealthAnalyticsCustomModulesRequest
result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.parent_ = parent_;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.pageSize_ = pageSize_;
}
if (((from_bitField0_ & 0x00000004) != 0)) {
result.pageToken_ = pageToken_;
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other
instanceof
com.google.cloud.securitycentermanagement.v1
.ListEffectiveSecurityHealthAnalyticsCustomModulesRequest) {
return mergeFrom(
(com.google.cloud.securitycentermanagement.v1
.ListEffectiveSecurityHealthAnalyticsCustomModulesRequest)
other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(
com.google.cloud.securitycentermanagement.v1
.ListEffectiveSecurityHealthAnalyticsCustomModulesRequest
other) {
if (other
== com.google.cloud.securitycentermanagement.v1
.ListEffectiveSecurityHealthAnalyticsCustomModulesRequest.getDefaultInstance())
return this;
if (!other.getParent().isEmpty()) {
parent_ = other.parent_;
bitField0_ |= 0x00000001;
onChanged();
}
if (other.getPageSize() != 0) {
setPageSize(other.getPageSize());
}
if (!other.getPageToken().isEmpty()) {
pageToken_ = other.pageToken_;
bitField0_ |= 0x00000004;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
parent_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000001;
break;
} // case 10
case 16:
{
pageSize_ = input.readInt32();
bitField0_ |= 0x00000002;
break;
} // case 16
case 26:
{
pageToken_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000004;
break;
} // case 26
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.lang.Object parent_ = "";
/**
*
*
* <pre>
* Required. Name of parent to list effective custom modules, in one of the
* following formats:
*
* * `organizations/{organization}/locations/{location}`
* * `folders/{folder}/locations/{location}`
* * `projects/{project}/locations/{location}`
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The parent.
*/
public java.lang.String getParent() {
java.lang.Object ref = parent_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
parent_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Required. Name of parent to list effective custom modules, in one of the
* following formats:
*
* * `organizations/{organization}/locations/{location}`
* * `folders/{folder}/locations/{location}`
* * `projects/{project}/locations/{location}`
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for parent.
*/
public com.google.protobuf.ByteString getParentBytes() {
java.lang.Object ref = parent_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
parent_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Required. Name of parent to list effective custom modules, in one of the
* following formats:
*
* * `organizations/{organization}/locations/{location}`
* * `folders/{folder}/locations/{location}`
* * `projects/{project}/locations/{location}`
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The parent to set.
* @return This builder for chaining.
*/
public Builder setParent(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
parent_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. Name of parent to list effective custom modules, in one of the
* following formats:
*
* * `organizations/{organization}/locations/{location}`
* * `folders/{folder}/locations/{location}`
* * `projects/{project}/locations/{location}`
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return This builder for chaining.
*/
public Builder clearParent() {
parent_ = getDefaultInstance().getParent();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. Name of parent to list effective custom modules, in one of the
* following formats:
*
* * `organizations/{organization}/locations/{location}`
* * `folders/{folder}/locations/{location}`
* * `projects/{project}/locations/{location}`
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The bytes for parent to set.
* @return This builder for chaining.
*/
public Builder setParentBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
parent_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
private int pageSize_;
/**
*
*
* <pre>
* Optional. The maximum number of results to return in a single response.
* Default is 10, minimum is 1, maximum is 1000.
* </pre>
*
* <code>int32 page_size = 2 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The pageSize.
*/
@java.lang.Override
public int getPageSize() {
return pageSize_;
}
/**
*
*
* <pre>
* Optional. The maximum number of results to return in a single response.
* Default is 10, minimum is 1, maximum is 1000.
* </pre>
*
* <code>int32 page_size = 2 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @param value The pageSize to set.
* @return This builder for chaining.
*/
public Builder setPageSize(int value) {
pageSize_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. The maximum number of results to return in a single response.
* Default is 10, minimum is 1, maximum is 1000.
* </pre>
*
* <code>int32 page_size = 2 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return This builder for chaining.
*/
public Builder clearPageSize() {
bitField0_ = (bitField0_ & ~0x00000002);
pageSize_ = 0;
onChanged();
return this;
}
private java.lang.Object pageToken_ = "";
/**
*
*
* <pre>
* Optional. A pagination token returned from a previous request. Provide this
* token to retrieve the next page of results.
*
* When paginating, the rest of the request must match the request that
* generated the page token.
* </pre>
*
* <code>string page_token = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The pageToken.
*/
public java.lang.String getPageToken() {
java.lang.Object ref = pageToken_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
pageToken_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Optional. A pagination token returned from a previous request. Provide this
* token to retrieve the next page of results.
*
* When paginating, the rest of the request must match the request that
* generated the page token.
* </pre>
*
* <code>string page_token = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The bytes for pageToken.
*/
public com.google.protobuf.ByteString getPageTokenBytes() {
java.lang.Object ref = pageToken_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
pageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Optional. A pagination token returned from a previous request. Provide this
* token to retrieve the next page of results.
*
* When paginating, the rest of the request must match the request that
* generated the page token.
* </pre>
*
* <code>string page_token = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @param value The pageToken to set.
* @return This builder for chaining.
*/
public Builder setPageToken(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
pageToken_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. A pagination token returned from a previous request. Provide this
* token to retrieve the next page of results.
*
* When paginating, the rest of the request must match the request that
* generated the page token.
* </pre>
*
* <code>string page_token = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return This builder for chaining.
*/
public Builder clearPageToken() {
pageToken_ = getDefaultInstance().getPageToken();
bitField0_ = (bitField0_ & ~0x00000004);
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. A pagination token returned from a previous request. Provide this
* token to retrieve the next page of results.
*
* When paginating, the rest of the request must match the request that
* generated the page token.
* </pre>
*
* <code>string page_token = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @param value The bytes for pageToken to set.
* @return This builder for chaining.
*/
public Builder setPageTokenBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
pageToken_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.securitycentermanagement.v1.ListEffectiveSecurityHealthAnalyticsCustomModulesRequest)
}
// @@protoc_insertion_point(class_scope:google.cloud.securitycentermanagement.v1.ListEffectiveSecurityHealthAnalyticsCustomModulesRequest)
private static final com.google.cloud.securitycentermanagement.v1
.ListEffectiveSecurityHealthAnalyticsCustomModulesRequest
DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE =
new com.google.cloud.securitycentermanagement.v1
.ListEffectiveSecurityHealthAnalyticsCustomModulesRequest();
}
public static com.google.cloud.securitycentermanagement.v1
.ListEffectiveSecurityHealthAnalyticsCustomModulesRequest
getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<
ListEffectiveSecurityHealthAnalyticsCustomModulesRequest>
PARSER =
new com.google.protobuf.AbstractParser<
ListEffectiveSecurityHealthAnalyticsCustomModulesRequest>() {
@java.lang.Override
public ListEffectiveSecurityHealthAnalyticsCustomModulesRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException()
.setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<ListEffectiveSecurityHealthAnalyticsCustomModulesRequest>
parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<ListEffectiveSecurityHealthAnalyticsCustomModulesRequest>
getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.securitycentermanagement.v1
.ListEffectiveSecurityHealthAnalyticsCustomModulesRequest
getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
google-ar/arcore-android-sdk | 36,481 | samples/hello_ar_java/app/src/main/java/com/google/ar/core/examples/java/helloar/HelloArActivity.java | /*
* Copyright 2017 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.ar.core.examples.java.helloar;
import android.content.DialogInterface;
import android.content.res.Resources;
import android.media.Image;
import android.opengl.GLES30;
import android.opengl.GLSurfaceView;
import android.opengl.Matrix;
import android.os.Bundle;
import android.util.Log;
import android.view.MenuItem;
import android.view.MotionEvent;
import android.view.View;
import android.widget.ImageButton;
import android.widget.PopupMenu;
import android.widget.Toast;
import androidx.appcompat.app.AlertDialog;
import androidx.appcompat.app.AppCompatActivity;
import com.google.ar.core.Anchor;
import com.google.ar.core.ArCoreApk;
import com.google.ar.core.ArCoreApk.Availability;
import com.google.ar.core.Camera;
import com.google.ar.core.Config;
import com.google.ar.core.Config.InstantPlacementMode;
import com.google.ar.core.DepthPoint;
import com.google.ar.core.Frame;
import com.google.ar.core.HitResult;
import com.google.ar.core.InstantPlacementPoint;
import com.google.ar.core.LightEstimate;
import com.google.ar.core.Plane;
import com.google.ar.core.Point;
import com.google.ar.core.Point.OrientationMode;
import com.google.ar.core.PointCloud;
import com.google.ar.core.Session;
import com.google.ar.core.Trackable;
import com.google.ar.core.TrackingFailureReason;
import com.google.ar.core.TrackingState;
import com.google.ar.core.examples.java.common.helpers.CameraPermissionHelper;
import com.google.ar.core.examples.java.common.helpers.DepthSettings;
import com.google.ar.core.examples.java.common.helpers.DisplayRotationHelper;
import com.google.ar.core.examples.java.common.helpers.FullScreenHelper;
import com.google.ar.core.examples.java.common.helpers.InstantPlacementSettings;
import com.google.ar.core.examples.java.common.helpers.SnackbarHelper;
import com.google.ar.core.examples.java.common.helpers.TapHelper;
import com.google.ar.core.examples.java.common.helpers.TrackingStateHelper;
import com.google.ar.core.examples.java.common.samplerender.Framebuffer;
import com.google.ar.core.examples.java.common.samplerender.GLError;
import com.google.ar.core.examples.java.common.samplerender.Mesh;
import com.google.ar.core.examples.java.common.samplerender.SampleRender;
import com.google.ar.core.examples.java.common.samplerender.Shader;
import com.google.ar.core.examples.java.common.samplerender.Texture;
import com.google.ar.core.examples.java.common.samplerender.VertexBuffer;
import com.google.ar.core.examples.java.common.samplerender.arcore.BackgroundRenderer;
import com.google.ar.core.examples.java.common.samplerender.arcore.PlaneRenderer;
import com.google.ar.core.examples.java.common.samplerender.arcore.SpecularCubemapFilter;
import com.google.ar.core.exceptions.CameraNotAvailableException;
import com.google.ar.core.exceptions.NotYetAvailableException;
import com.google.ar.core.exceptions.UnavailableApkTooOldException;
import com.google.ar.core.exceptions.UnavailableArcoreNotInstalledException;
import com.google.ar.core.exceptions.UnavailableDeviceNotCompatibleException;
import com.google.ar.core.exceptions.UnavailableSdkTooOldException;
import com.google.ar.core.exceptions.UnavailableUserDeclinedInstallationException;
import java.io.IOException;
import java.io.InputStream;
import java.nio.ByteBuffer;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
/**
* This is a simple example that shows how to create an augmented reality (AR) application using the
* ARCore API. The application will display any detected planes and will allow the user to tap on a
* plane to place a 3D model.
*/
public class HelloArActivity extends AppCompatActivity implements SampleRender.Renderer {
private static final String TAG = HelloArActivity.class.getSimpleName();
private static final String SEARCHING_PLANE_MESSAGE = "Searching for surfaces...";
private static final String WAITING_FOR_TAP_MESSAGE = "Tap on a surface to place an object.";
// See the definition of updateSphericalHarmonicsCoefficients for an explanation of these
// constants.
private static final float[] sphericalHarmonicFactors = {
0.282095f,
-0.325735f,
0.325735f,
-0.325735f,
0.273137f,
-0.273137f,
0.078848f,
-0.273137f,
0.136569f,
};
private static final float Z_NEAR = 0.1f;
private static final float Z_FAR = 100f;
private static final int CUBEMAP_RESOLUTION = 16;
private static final int CUBEMAP_NUMBER_OF_IMPORTANCE_SAMPLES = 32;
// Rendering. The Renderers are created here, and initialized when the GL surface is created.
private GLSurfaceView surfaceView;
private boolean installRequested;
private Session session;
private final SnackbarHelper messageSnackbarHelper = new SnackbarHelper();
private DisplayRotationHelper displayRotationHelper;
private final TrackingStateHelper trackingStateHelper = new TrackingStateHelper(this);
private TapHelper tapHelper;
private SampleRender render;
private PlaneRenderer planeRenderer;
private BackgroundRenderer backgroundRenderer;
private Framebuffer virtualSceneFramebuffer;
private boolean hasSetTextureNames = false;
private final DepthSettings depthSettings = new DepthSettings();
private boolean[] depthSettingsMenuDialogCheckboxes = new boolean[2];
private final InstantPlacementSettings instantPlacementSettings = new InstantPlacementSettings();
private boolean[] instantPlacementSettingsMenuDialogCheckboxes = new boolean[1];
// Assumed distance from the device camera to the surface on which user will try to place objects.
// This value affects the apparent scale of objects while the tracking method of the
// Instant Placement point is SCREENSPACE_WITH_APPROXIMATE_DISTANCE.
// Values in the [0.2, 2.0] meter range are a good choice for most AR experiences. Use lower
// values for AR experiences where users are expected to place objects on surfaces close to the
// camera. Use larger values for experiences where the user will likely be standing and trying to
// place an object on the ground or floor in front of them.
private static final float APPROXIMATE_DISTANCE_METERS = 2.0f;
// Point Cloud
private VertexBuffer pointCloudVertexBuffer;
private Mesh pointCloudMesh;
private Shader pointCloudShader;
// Keep track of the last point cloud rendered to avoid updating the VBO if point cloud
// was not changed. Do this using the timestamp since we can't compare PointCloud objects.
private long lastPointCloudTimestamp = 0;
// Virtual object (ARCore pawn)
private Mesh virtualObjectMesh;
private Shader virtualObjectShader;
private Texture virtualObjectAlbedoTexture;
private Texture virtualObjectAlbedoInstantPlacementTexture;
private final List<WrappedAnchor> wrappedAnchors = new ArrayList<>();
// Environmental HDR
private Texture dfgTexture;
private SpecularCubemapFilter cubemapFilter;
// Temporary matrix allocated here to reduce number of allocations for each frame.
private final float[] modelMatrix = new float[16];
private final float[] viewMatrix = new float[16];
private final float[] projectionMatrix = new float[16];
private final float[] modelViewMatrix = new float[16]; // view x model
private final float[] modelViewProjectionMatrix = new float[16]; // projection x view x model
private final float[] sphericalHarmonicsCoefficients = new float[9 * 3];
private final float[] viewInverseMatrix = new float[16];
private final float[] worldLightDirection = {0.0f, 0.0f, 0.0f, 0.0f};
private final float[] viewLightDirection = new float[4]; // view x world light direction
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
surfaceView = findViewById(R.id.surfaceview);
displayRotationHelper = new DisplayRotationHelper(/* context= */ this);
// Set up touch listener.
tapHelper = new TapHelper(/* context= */ this);
surfaceView.setOnTouchListener(tapHelper);
// Set up renderer.
render = new SampleRender(surfaceView, this, getAssets());
installRequested = false;
depthSettings.onCreate(this);
instantPlacementSettings.onCreate(this);
ImageButton settingsButton = findViewById(R.id.settings_button);
settingsButton.setOnClickListener(
new View.OnClickListener() {
@Override
public void onClick(View v) {
PopupMenu popup = new PopupMenu(HelloArActivity.this, v);
popup.setOnMenuItemClickListener(HelloArActivity.this::settingsMenuClick);
popup.inflate(R.menu.settings_menu);
popup.show();
}
});
}
/** Menu button to launch feature specific settings. */
protected boolean settingsMenuClick(MenuItem item) {
if (item.getItemId() == R.id.depth_settings) {
launchDepthSettingsMenuDialog();
return true;
} else if (item.getItemId() == R.id.instant_placement_settings) {
launchInstantPlacementSettingsMenuDialog();
return true;
}
return false;
}
@Override
protected void onDestroy() {
if (session != null) {
// Explicitly close ARCore Session to release native resources.
// Review the API reference for important considerations before calling close() in apps with
// more complicated lifecycle requirements:
// https://developers.google.com/ar/reference/java/arcore/reference/com/google/ar/core/Session#close()
session.close();
session = null;
}
super.onDestroy();
}
@Override
protected void onResume() {
super.onResume();
if (session == null) {
Exception exception = null;
String message = null;
try {
// Always check the latest availability.
Availability availability = ArCoreApk.getInstance().checkAvailability(this);
// In all other cases, try to install ARCore and handle installation failures.
if (availability != Availability.SUPPORTED_INSTALLED) {
switch (ArCoreApk.getInstance().requestInstall(this, !installRequested)) {
case INSTALL_REQUESTED:
installRequested = true;
return;
case INSTALLED:
break;
}
}
// ARCore requires camera permissions to operate. If we did not yet obtain runtime
// permission on Android M and above, now is a good time to ask the user for it.
if (!CameraPermissionHelper.hasCameraPermission(this)) {
CameraPermissionHelper.requestCameraPermission(this);
return;
}
// Create the session.
session = new Session(/* context= */ this);
} catch (UnavailableArcoreNotInstalledException
| UnavailableUserDeclinedInstallationException e) {
message = "Please install ARCore";
exception = e;
} catch (UnavailableApkTooOldException e) {
message = "Please update ARCore";
exception = e;
} catch (UnavailableSdkTooOldException e) {
message = "Please update this app";
exception = e;
} catch (UnavailableDeviceNotCompatibleException e) {
message = "This device does not support AR";
exception = e;
} catch (Exception e) {
message = "Failed to create AR session";
exception = e;
}
if (message != null) {
messageSnackbarHelper.showError(this, message);
Log.e(TAG, "Exception creating session", exception);
return;
}
}
// Note that order matters - see the note in onPause(), the reverse applies here.
try {
configureSession();
// To record a live camera session for later playback, call
// `session.startRecording(recordingConfig)` at anytime. To playback a previously recorded AR
// session instead of using the live camera feed, call
// `session.setPlaybackDatasetUri(Uri)` before calling `session.resume()`. To
// learn more about recording and playback, see:
// https://developers.google.com/ar/develop/java/recording-and-playback
session.resume();
} catch (CameraNotAvailableException e) {
messageSnackbarHelper.showError(this, "Camera not available. Try restarting the app.");
session = null;
return;
}
surfaceView.onResume();
displayRotationHelper.onResume();
}
@Override
public void onPause() {
super.onPause();
if (session != null) {
// Note that the order matters - GLSurfaceView is paused first so that it does not try
// to query the session. If Session is paused before GLSurfaceView, GLSurfaceView may
// still call session.update() and get a SessionPausedException.
displayRotationHelper.onPause();
surfaceView.onPause();
session.pause();
}
}
@Override
public void onRequestPermissionsResult(int requestCode, String[] permissions, int[] results) {
super.onRequestPermissionsResult(requestCode, permissions, results);
if (!CameraPermissionHelper.hasCameraPermission(this)) {
// Use toast instead of snackbar here since the activity will exit.
Toast.makeText(this, "Camera permission is needed to run this application", Toast.LENGTH_LONG)
.show();
if (!CameraPermissionHelper.shouldShowRequestPermissionRationale(this)) {
// Permission denied with checking "Do not ask again".
CameraPermissionHelper.launchPermissionSettings(this);
}
finish();
}
}
@Override
public void onWindowFocusChanged(boolean hasFocus) {
super.onWindowFocusChanged(hasFocus);
FullScreenHelper.setFullScreenOnWindowFocusChanged(this, hasFocus);
}
@Override
public void onSurfaceCreated(SampleRender render) {
// Prepare the rendering objects. This involves reading shaders and 3D model files, so may throw
// an IOException.
try {
planeRenderer = new PlaneRenderer(render);
backgroundRenderer = new BackgroundRenderer(render);
virtualSceneFramebuffer = new Framebuffer(render, /* width= */ 1, /* height= */ 1);
cubemapFilter =
new SpecularCubemapFilter(
render, CUBEMAP_RESOLUTION, CUBEMAP_NUMBER_OF_IMPORTANCE_SAMPLES);
// Load DFG lookup table for environmental lighting
dfgTexture =
new Texture(
render,
Texture.Target.TEXTURE_2D,
Texture.WrapMode.CLAMP_TO_EDGE,
/* useMipmaps= */ false);
// The dfg.raw file is a raw half-float texture with two channels.
final int dfgResolution = 64;
final int dfgChannels = 2;
final int halfFloatSize = 2;
ByteBuffer buffer =
ByteBuffer.allocateDirect(dfgResolution * dfgResolution * dfgChannels * halfFloatSize);
try (InputStream is = getAssets().open("models/dfg.raw")) {
is.read(buffer.array());
}
// SampleRender abstraction leaks here.
GLES30.glBindTexture(GLES30.GL_TEXTURE_2D, dfgTexture.getTextureId());
GLError.maybeThrowGLException("Failed to bind DFG texture", "glBindTexture");
GLES30.glTexImage2D(
GLES30.GL_TEXTURE_2D,
/* level= */ 0,
GLES30.GL_RG16F,
/* width= */ dfgResolution,
/* height= */ dfgResolution,
/* border= */ 0,
GLES30.GL_RG,
GLES30.GL_HALF_FLOAT,
buffer);
GLError.maybeThrowGLException("Failed to populate DFG texture", "glTexImage2D");
// Point cloud
pointCloudShader =
Shader.createFromAssets(
render,
"shaders/point_cloud.vert",
"shaders/point_cloud.frag",
/* defines= */ null)
.setVec4(
"u_Color", new float[] {31.0f / 255.0f, 188.0f / 255.0f, 210.0f / 255.0f, 1.0f})
.setFloat("u_PointSize", 5.0f);
// four entries per vertex: X, Y, Z, confidence
pointCloudVertexBuffer =
new VertexBuffer(render, /* numberOfEntriesPerVertex= */ 4, /* entries= */ null);
final VertexBuffer[] pointCloudVertexBuffers = {pointCloudVertexBuffer};
pointCloudMesh =
new Mesh(
render, Mesh.PrimitiveMode.POINTS, /* indexBuffer= */ null, pointCloudVertexBuffers);
// Virtual object to render (ARCore pawn)
virtualObjectAlbedoTexture =
Texture.createFromAsset(
render,
"models/pawn_albedo.png",
Texture.WrapMode.CLAMP_TO_EDGE,
Texture.ColorFormat.SRGB);
virtualObjectAlbedoInstantPlacementTexture =
Texture.createFromAsset(
render,
"models/pawn_albedo_instant_placement.png",
Texture.WrapMode.CLAMP_TO_EDGE,
Texture.ColorFormat.SRGB);
Texture virtualObjectPbrTexture =
Texture.createFromAsset(
render,
"models/pawn_roughness_metallic_ao.png",
Texture.WrapMode.CLAMP_TO_EDGE,
Texture.ColorFormat.LINEAR);
virtualObjectMesh = Mesh.createFromAsset(render, "models/pawn.obj");
virtualObjectShader =
Shader.createFromAssets(
render,
"shaders/environmental_hdr.vert",
"shaders/environmental_hdr.frag",
/* defines= */ new HashMap<String, String>() {
{
put(
"NUMBER_OF_MIPMAP_LEVELS",
Integer.toString(cubemapFilter.getNumberOfMipmapLevels()));
}
})
.setTexture("u_AlbedoTexture", virtualObjectAlbedoTexture)
.setTexture("u_RoughnessMetallicAmbientOcclusionTexture", virtualObjectPbrTexture)
.setTexture("u_Cubemap", cubemapFilter.getFilteredCubemapTexture())
.setTexture("u_DfgTexture", dfgTexture);
} catch (IOException e) {
Log.e(TAG, "Failed to read a required asset file", e);
messageSnackbarHelper.showError(this, "Failed to read a required asset file: " + e);
}
}
@Override
public void onSurfaceChanged(SampleRender render, int width, int height) {
displayRotationHelper.onSurfaceChanged(width, height);
virtualSceneFramebuffer.resize(width, height);
}
@Override
public void onDrawFrame(SampleRender render) {
if (session == null) {
return;
}
// Texture names should only be set once on a GL thread unless they change. This is done during
// onDrawFrame rather than onSurfaceCreated since the session is not guaranteed to have been
// initialized during the execution of onSurfaceCreated.
if (!hasSetTextureNames) {
session.setCameraTextureNames(
new int[] {backgroundRenderer.getCameraColorTexture().getTextureId()});
hasSetTextureNames = true;
}
// -- Update per-frame state
// Notify ARCore session that the view size changed so that the perspective matrix and
// the video background can be properly adjusted.
displayRotationHelper.updateSessionIfNeeded(session);
// Obtain the current frame from the AR Session. When the configuration is set to
// UpdateMode.BLOCKING (it is by default), this will throttle the rendering to the
// camera framerate.
Frame frame;
try {
frame = session.update();
} catch (CameraNotAvailableException e) {
Log.e(TAG, "Camera not available during onDrawFrame", e);
messageSnackbarHelper.showError(this, "Camera not available. Try restarting the app.");
return;
}
Camera camera = frame.getCamera();
// Update BackgroundRenderer state to match the depth settings.
try {
backgroundRenderer.setUseDepthVisualization(
render, depthSettings.depthColorVisualizationEnabled());
backgroundRenderer.setUseOcclusion(render, depthSettings.useDepthForOcclusion());
} catch (IOException e) {
Log.e(TAG, "Failed to read a required asset file", e);
messageSnackbarHelper.showError(this, "Failed to read a required asset file: " + e);
return;
}
// BackgroundRenderer.updateDisplayGeometry must be called every frame to update the coordinates
// used to draw the background camera image.
backgroundRenderer.updateDisplayGeometry(frame);
if (camera.getTrackingState() == TrackingState.TRACKING
&& (depthSettings.useDepthForOcclusion()
|| depthSettings.depthColorVisualizationEnabled())) {
try (Image depthImage = frame.acquireDepthImage16Bits()) {
backgroundRenderer.updateCameraDepthTexture(depthImage);
} catch (NotYetAvailableException e) {
// This normally means that depth data is not available yet. This is normal so we will not
// spam the logcat with this.
}
}
// Handle one tap per frame.
handleTap(frame, camera);
// Keep the screen unlocked while tracking, but allow it to lock when tracking stops.
trackingStateHelper.updateKeepScreenOnFlag(camera.getTrackingState());
// Show a message based on whether tracking has failed, if planes are detected, and if the user
// has placed any objects.
String message = null;
if (camera.getTrackingState() == TrackingState.PAUSED) {
if (camera.getTrackingFailureReason() == TrackingFailureReason.NONE) {
message = SEARCHING_PLANE_MESSAGE;
} else {
message = TrackingStateHelper.getTrackingFailureReasonString(camera);
}
} else if (hasTrackingPlane()) {
if (wrappedAnchors.isEmpty()) {
message = WAITING_FOR_TAP_MESSAGE;
}
} else {
message = SEARCHING_PLANE_MESSAGE;
}
if (message == null) {
messageSnackbarHelper.hide(this);
} else {
messageSnackbarHelper.showMessage(this, message);
}
// -- Draw background
if (frame.getTimestamp() != 0) {
// Suppress rendering if the camera did not produce the first frame yet. This is to avoid
// drawing possible leftover data from previous sessions if the texture is reused.
backgroundRenderer.drawBackground(render);
}
// If not tracking, don't draw 3D objects.
if (camera.getTrackingState() == TrackingState.PAUSED) {
return;
}
// -- Draw non-occluded virtual objects (planes, point cloud)
// Get projection matrix.
camera.getProjectionMatrix(projectionMatrix, 0, Z_NEAR, Z_FAR);
// Get camera matrix and draw.
camera.getViewMatrix(viewMatrix, 0);
// Visualize tracked points.
// Use try-with-resources to automatically release the point cloud.
try (PointCloud pointCloud = frame.acquirePointCloud()) {
if (pointCloud.getTimestamp() > lastPointCloudTimestamp) {
pointCloudVertexBuffer.set(pointCloud.getPoints());
lastPointCloudTimestamp = pointCloud.getTimestamp();
}
Matrix.multiplyMM(modelViewProjectionMatrix, 0, projectionMatrix, 0, viewMatrix, 0);
pointCloudShader.setMat4("u_ModelViewProjection", modelViewProjectionMatrix);
render.draw(pointCloudMesh, pointCloudShader);
}
// Visualize planes.
planeRenderer.drawPlanes(
render,
session.getAllTrackables(Plane.class),
camera.getDisplayOrientedPose(),
projectionMatrix);
// -- Draw occluded virtual objects
// Update lighting parameters in the shader
updateLightEstimation(frame.getLightEstimate(), viewMatrix);
// Visualize anchors created by touch.
render.clear(virtualSceneFramebuffer, 0f, 0f, 0f, 0f);
for (WrappedAnchor wrappedAnchor : wrappedAnchors) {
Anchor anchor = wrappedAnchor.getAnchor();
Trackable trackable = wrappedAnchor.getTrackable();
if (anchor.getTrackingState() != TrackingState.TRACKING) {
continue;
}
// Get the current pose of an Anchor in world space. The Anchor pose is updated
// during calls to session.update() as ARCore refines its estimate of the world.
anchor.getPose().toMatrix(modelMatrix, 0);
// Calculate model/view/projection matrices
Matrix.multiplyMM(modelViewMatrix, 0, viewMatrix, 0, modelMatrix, 0);
Matrix.multiplyMM(modelViewProjectionMatrix, 0, projectionMatrix, 0, modelViewMatrix, 0);
// Update shader properties and draw
virtualObjectShader.setMat4("u_ModelView", modelViewMatrix);
virtualObjectShader.setMat4("u_ModelViewProjection", modelViewProjectionMatrix);
if (trackable instanceof InstantPlacementPoint
&& ((InstantPlacementPoint) trackable).getTrackingMethod()
== InstantPlacementPoint.TrackingMethod.SCREENSPACE_WITH_APPROXIMATE_DISTANCE) {
virtualObjectShader.setTexture(
"u_AlbedoTexture", virtualObjectAlbedoInstantPlacementTexture);
} else {
virtualObjectShader.setTexture("u_AlbedoTexture", virtualObjectAlbedoTexture);
}
render.draw(virtualObjectMesh, virtualObjectShader, virtualSceneFramebuffer);
}
// Compose the virtual scene with the background.
backgroundRenderer.drawVirtualScene(render, virtualSceneFramebuffer, Z_NEAR, Z_FAR);
}
// Handle only one tap per frame, as taps are usually low frequency compared to frame rate.
private void handleTap(Frame frame, Camera camera) {
MotionEvent tap = tapHelper.poll();
if (tap != null && camera.getTrackingState() == TrackingState.TRACKING) {
List<HitResult> hitResultList;
if (instantPlacementSettings.isInstantPlacementEnabled()) {
hitResultList =
frame.hitTestInstantPlacement(tap.getX(), tap.getY(), APPROXIMATE_DISTANCE_METERS);
} else {
hitResultList = frame.hitTest(tap);
}
for (HitResult hit : hitResultList) {
// If any plane, Oriented Point, or Instant Placement Point was hit, create an anchor.
Trackable trackable = hit.getTrackable();
// If a plane was hit, check that it was hit inside the plane polygon.
// DepthPoints are only returned if Config.DepthMode is set to AUTOMATIC.
if ((trackable instanceof Plane
&& ((Plane) trackable).isPoseInPolygon(hit.getHitPose())
&& (PlaneRenderer.calculateDistanceToPlane(hit.getHitPose(), camera.getPose()) > 0))
|| (trackable instanceof Point
&& ((Point) trackable).getOrientationMode()
== OrientationMode.ESTIMATED_SURFACE_NORMAL)
|| (trackable instanceof InstantPlacementPoint)
|| (trackable instanceof DepthPoint)) {
// Cap the number of objects created. This avoids overloading both the
// rendering system and ARCore.
if (wrappedAnchors.size() >= 20) {
wrappedAnchors.get(0).getAnchor().detach();
wrappedAnchors.remove(0);
}
// Adding an Anchor tells ARCore that it should track this position in
// space. This anchor is created on the Plane to place the 3D model
// in the correct position relative both to the world and to the plane.
wrappedAnchors.add(new WrappedAnchor(hit.createAnchor(), trackable));
// For devices that support the Depth API, shows a dialog to suggest enabling
// depth-based occlusion. This dialog needs to be spawned on the UI thread.
this.runOnUiThread(this::showOcclusionDialogIfNeeded);
// Hits are sorted by depth. Consider only closest hit on a plane, Oriented Point, or
// Instant Placement Point.
break;
}
}
}
}
/**
* Shows a pop-up dialog on the first call, determining whether the user wants to enable
* depth-based occlusion. The result of this dialog can be retrieved with useDepthForOcclusion().
*/
private void showOcclusionDialogIfNeeded() {
boolean isDepthSupported = session.isDepthModeSupported(Config.DepthMode.AUTOMATIC);
if (!depthSettings.shouldShowDepthEnableDialog() || !isDepthSupported) {
return; // Don't need to show dialog.
}
// Asks the user whether they want to use depth-based occlusion.
new AlertDialog.Builder(this)
.setTitle(R.string.options_title_with_depth)
.setMessage(R.string.depth_use_explanation)
.setPositiveButton(
R.string.button_text_enable_depth,
(DialogInterface dialog, int which) -> {
depthSettings.setUseDepthForOcclusion(true);
})
.setNegativeButton(
R.string.button_text_disable_depth,
(DialogInterface dialog, int which) -> {
depthSettings.setUseDepthForOcclusion(false);
})
.show();
}
private void launchInstantPlacementSettingsMenuDialog() {
resetSettingsMenuDialogCheckboxes();
Resources resources = getResources();
new AlertDialog.Builder(this)
.setTitle(R.string.options_title_instant_placement)
.setMultiChoiceItems(
resources.getStringArray(R.array.instant_placement_options_array),
instantPlacementSettingsMenuDialogCheckboxes,
(DialogInterface dialog, int which, boolean isChecked) ->
instantPlacementSettingsMenuDialogCheckboxes[which] = isChecked)
.setPositiveButton(
R.string.done,
(DialogInterface dialogInterface, int which) -> applySettingsMenuDialogCheckboxes())
.setNegativeButton(
android.R.string.cancel,
(DialogInterface dialog, int which) -> resetSettingsMenuDialogCheckboxes())
.show();
}
/** Shows checkboxes to the user to facilitate toggling of depth-based effects. */
private void launchDepthSettingsMenuDialog() {
// Retrieves the current settings to show in the checkboxes.
resetSettingsMenuDialogCheckboxes();
// Shows the dialog to the user.
Resources resources = getResources();
if (session.isDepthModeSupported(Config.DepthMode.AUTOMATIC)) {
// With depth support, the user can select visualization options.
new AlertDialog.Builder(this)
.setTitle(R.string.options_title_with_depth)
.setMultiChoiceItems(
resources.getStringArray(R.array.depth_options_array),
depthSettingsMenuDialogCheckboxes,
(DialogInterface dialog, int which, boolean isChecked) ->
depthSettingsMenuDialogCheckboxes[which] = isChecked)
.setPositiveButton(
R.string.done,
(DialogInterface dialogInterface, int which) -> applySettingsMenuDialogCheckboxes())
.setNegativeButton(
android.R.string.cancel,
(DialogInterface dialog, int which) -> resetSettingsMenuDialogCheckboxes())
.show();
} else {
// Without depth support, no settings are available.
new AlertDialog.Builder(this)
.setTitle(R.string.options_title_without_depth)
.setPositiveButton(
R.string.done,
(DialogInterface dialogInterface, int which) -> applySettingsMenuDialogCheckboxes())
.show();
}
}
private void applySettingsMenuDialogCheckboxes() {
depthSettings.setUseDepthForOcclusion(depthSettingsMenuDialogCheckboxes[0]);
depthSettings.setDepthColorVisualizationEnabled(depthSettingsMenuDialogCheckboxes[1]);
instantPlacementSettings.setInstantPlacementEnabled(
instantPlacementSettingsMenuDialogCheckboxes[0]);
configureSession();
}
private void resetSettingsMenuDialogCheckboxes() {
depthSettingsMenuDialogCheckboxes[0] = depthSettings.useDepthForOcclusion();
depthSettingsMenuDialogCheckboxes[1] = depthSettings.depthColorVisualizationEnabled();
instantPlacementSettingsMenuDialogCheckboxes[0] =
instantPlacementSettings.isInstantPlacementEnabled();
}
/** Checks if we detected at least one plane. */
private boolean hasTrackingPlane() {
for (Plane plane : session.getAllTrackables(Plane.class)) {
if (plane.getTrackingState() == TrackingState.TRACKING) {
return true;
}
}
return false;
}
/** Update state based on the current frame's light estimation. */
private void updateLightEstimation(LightEstimate lightEstimate, float[] viewMatrix) {
if (lightEstimate.getState() != LightEstimate.State.VALID) {
virtualObjectShader.setBool("u_LightEstimateIsValid", false);
return;
}
virtualObjectShader.setBool("u_LightEstimateIsValid", true);
Matrix.invertM(viewInverseMatrix, 0, viewMatrix, 0);
virtualObjectShader.setMat4("u_ViewInverse", viewInverseMatrix);
updateMainLight(
lightEstimate.getEnvironmentalHdrMainLightDirection(),
lightEstimate.getEnvironmentalHdrMainLightIntensity(),
viewMatrix);
updateSphericalHarmonicsCoefficients(
lightEstimate.getEnvironmentalHdrAmbientSphericalHarmonics());
cubemapFilter.update(lightEstimate.acquireEnvironmentalHdrCubeMap());
}
private void updateMainLight(float[] direction, float[] intensity, float[] viewMatrix) {
// We need the direction in a vec4 with 0.0 as the final component to transform it to view space
worldLightDirection[0] = direction[0];
worldLightDirection[1] = direction[1];
worldLightDirection[2] = direction[2];
Matrix.multiplyMV(viewLightDirection, 0, viewMatrix, 0, worldLightDirection, 0);
virtualObjectShader.setVec4("u_ViewLightDirection", viewLightDirection);
virtualObjectShader.setVec3("u_LightIntensity", intensity);
}
private void updateSphericalHarmonicsCoefficients(float[] coefficients) {
// Pre-multiply the spherical harmonics coefficients before passing them to the shader. The
// constants in sphericalHarmonicFactors were derived from three terms:
//
// 1. The normalized spherical harmonics basis functions (y_lm)
//
// 2. The lambertian diffuse BRDF factor (1/pi)
//
// 3. A <cos> convolution. This is done to so that the resulting function outputs the irradiance
// of all incoming light over a hemisphere for a given surface normal, which is what the shader
// (environmental_hdr.frag) expects.
//
// You can read more details about the math here:
// https://google.github.io/filament/Filament.html#annex/sphericalharmonics
if (coefficients.length != 9 * 3) {
throw new IllegalArgumentException(
"The given coefficients array must be of length 27 (3 components per 9 coefficients");
}
// Apply each factor to every component of each coefficient
for (int i = 0; i < 9 * 3; ++i) {
sphericalHarmonicsCoefficients[i] = coefficients[i] * sphericalHarmonicFactors[i / 3];
}
virtualObjectShader.setVec3Array(
"u_SphericalHarmonicsCoefficients", sphericalHarmonicsCoefficients);
}
/** Configures the session with feature settings. */
private void configureSession() {
Config config = session.getConfig();
config.setLightEstimationMode(Config.LightEstimationMode.ENVIRONMENTAL_HDR);
if (session.isDepthModeSupported(Config.DepthMode.AUTOMATIC)) {
config.setDepthMode(Config.DepthMode.AUTOMATIC);
} else {
config.setDepthMode(Config.DepthMode.DISABLED);
}
if (instantPlacementSettings.isInstantPlacementEnabled()) {
config.setInstantPlacementMode(InstantPlacementMode.LOCAL_Y_UP);
} else {
config.setInstantPlacementMode(InstantPlacementMode.DISABLED);
}
session.configure(config);
}
}
/**
* Associates an Anchor with the trackable it was attached to. This is used to be able to check
* whether or not an Anchor originally was attached to an {@link InstantPlacementPoint}.
*/
class WrappedAnchor {
private Anchor anchor;
private Trackable trackable;
public WrappedAnchor(Anchor anchor, Trackable trackable) {
this.anchor = anchor;
this.trackable = trackable;
}
public Anchor getAnchor() {
return anchor;
}
public Trackable getTrackable() {
return trackable;
}
}
|
google/ExoPlayer | 36,661 | library/common/src/test/java/com/google/android/exoplayer2/source/ads/AdPlaybackStateTest.java | /*
* Copyright 2021 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.android.exoplayer2.source.ads;
import static com.google.android.exoplayer2.source.ads.AdPlaybackState.AD_STATE_AVAILABLE;
import static com.google.android.exoplayer2.source.ads.AdPlaybackState.AD_STATE_ERROR;
import static com.google.android.exoplayer2.source.ads.AdPlaybackState.AD_STATE_PLAYED;
import static com.google.android.exoplayer2.source.ads.AdPlaybackState.AD_STATE_SKIPPED;
import static com.google.android.exoplayer2.source.ads.AdPlaybackState.AD_STATE_UNAVAILABLE;
import static com.google.common.truth.Truth.assertThat;
import static org.junit.Assert.fail;
import android.net.Uri;
import android.os.Bundle;
import androidx.test.ext.junit.runners.AndroidJUnit4;
import com.google.android.exoplayer2.C;
import org.junit.Assert;
import org.junit.Test;
import org.junit.runner.RunWith;
/** Unit tests for {@link AdPlaybackState}. */
@RunWith(AndroidJUnit4.class)
public class AdPlaybackStateTest {
private static final long[] TEST_AD_GROUP_TIMES_US = new long[] {0, 5_000_000, 10_000_000};
private static final Uri TEST_URI = Uri.parse("http://www.google.com");
private static final Object TEST_ADS_ID = new Object();
@Test
public void setAdCount() {
AdPlaybackState state =
new AdPlaybackState(TEST_ADS_ID, TEST_AD_GROUP_TIMES_US).withRemovedAdGroupCount(1);
assertThat(state.getAdGroup(1).count).isEqualTo(C.LENGTH_UNSET);
state = state.withAdCount(/* adGroupIndex= */ 1, /* adCount= */ 1);
assertThat(state.getAdGroup(1).count).isEqualTo(1);
}
@Test
public void setAdUriBeforeAdCount() {
AdPlaybackState state =
new AdPlaybackState(TEST_ADS_ID, TEST_AD_GROUP_TIMES_US).withRemovedAdGroupCount(1);
state = state.withAvailableAdUri(/* adGroupIndex= */ 1, /* adIndexInAdGroup= */ 1, TEST_URI);
state = state.withAdCount(/* adGroupIndex= */ 1, /* adCount= */ 2);
assertThat(state.getAdGroup(1).uris[0]).isNull();
assertThat(state.getAdGroup(1).states[0]).isEqualTo(AdPlaybackState.AD_STATE_UNAVAILABLE);
assertThat(state.getAdGroup(1).uris[1]).isSameInstanceAs(TEST_URI);
assertThat(state.getAdGroup(1).states[1]).isEqualTo(AdPlaybackState.AD_STATE_AVAILABLE);
}
@Test
public void setAdErrorBeforeAdCount() {
AdPlaybackState state =
new AdPlaybackState(TEST_ADS_ID, TEST_AD_GROUP_TIMES_US).withRemovedAdGroupCount(1);
state = state.withAdLoadError(/* adGroupIndex= */ 1, /* adIndexInAdGroup= */ 0);
state = state.withAdCount(/* adGroupIndex= */ 1, /* adCount= */ 2);
assertThat(state.getAdGroup(1).uris[0]).isNull();
assertThat(state.getAdGroup(1).states[0]).isEqualTo(AdPlaybackState.AD_STATE_ERROR);
assertThat(state.isAdInErrorState(/* adGroupIndex= */ 1, /* adIndexInAdGroup= */ 0)).isTrue();
assertThat(state.getAdGroup(1).states[1]).isEqualTo(AdPlaybackState.AD_STATE_UNAVAILABLE);
assertThat(state.isAdInErrorState(/* adGroupIndex= */ 1, /* adIndexInAdGroup= */ 1)).isFalse();
}
@Test
public void withAdGroupTimeUs_updatesAdGroupTimeUs() {
AdPlaybackState state =
new AdPlaybackState(TEST_ADS_ID, /* adGroupTimesUs...= */ 0, 5_000, 10_000)
.withRemovedAdGroupCount(1);
state =
state
.withAdGroupTimeUs(/* adGroupIndex= */ 1, 3_000)
.withAdGroupTimeUs(/* adGroupIndex= */ 2, 6_000);
assertThat(state.adGroupCount).isEqualTo(3);
assertThat(state.getAdGroup(1).timeUs).isEqualTo(3_000);
assertThat(state.getAdGroup(2).timeUs).isEqualTo(6_000);
}
@Test
public void withNewAdGroup_addsGroupAndKeepsExistingGroups() {
AdPlaybackState state =
new AdPlaybackState(TEST_ADS_ID, /* adGroupTimesUs...= */ 0, 3_000, 6_000)
.withRemovedAdGroupCount(1)
.withAdCount(/* adGroupIndex= */ 1, /* adCount= */ 2)
.withAdCount(/* adGroupIndex= */ 2, /* adCount= */ 1)
.withAvailableAdUri(/* adGroupIndex= */ 1, /* adIndexInAdGroup= */ 1, TEST_URI)
.withSkippedAd(/* adGroupIndex= */ 2, /* adIndexInAdGroup= */ 0);
state =
state
.withNewAdGroup(/* adGroupIndex= */ 1, /* adGroupTimeUs= */ 1_000)
.withNewAdGroup(/* adGroupIndex= */ 3, /* adGroupTimeUs= */ 5_000)
.withNewAdGroup(/* adGroupIndex= */ 5, /* adGroupTimeUs= */ 8_000);
assertThat(state.adGroupCount).isEqualTo(6);
assertThat(state.getAdGroup(1).count).isEqualTo(C.INDEX_UNSET);
assertThat(state.getAdGroup(2).count).isEqualTo(2);
assertThat(state.getAdGroup(2).uris[1]).isSameInstanceAs(TEST_URI);
assertThat(state.getAdGroup(3).count).isEqualTo(C.INDEX_UNSET);
assertThat(state.getAdGroup(4).count).isEqualTo(1);
assertThat(state.getAdGroup(4).states[0]).isEqualTo(AdPlaybackState.AD_STATE_SKIPPED);
assertThat(state.getAdGroup(5).count).isEqualTo(C.INDEX_UNSET);
}
@Test
public void withAdDurationsUs_updatesAdDurations() {
AdPlaybackState state =
new AdPlaybackState(TEST_ADS_ID, /* adGroupTimesUs...= */ 0, 10_000)
.withAdCount(/* adGroupIndex= */ 0, /* adCount= */ 2)
.withAdCount(/* adGroupIndex= */ 1, /* adCount= */ 2)
.withAdDurationsUs(new long[][] {new long[] {5_000, 6_000}, new long[] {7_000, 8_000}});
state = state.withAdDurationsUs(/* adGroupIndex= */ 1, /* adDurationsUs...= */ 1_000, 2_000);
assertThat(state.getAdGroup(0).durationsUs[0]).isEqualTo(5_000);
assertThat(state.getAdGroup(0).durationsUs[1]).isEqualTo(6_000);
assertThat(state.getAdGroup(1).durationsUs[0]).isEqualTo(1_000);
assertThat(state.getAdGroup(1).durationsUs[1]).isEqualTo(2_000);
}
@Test
public void getFirstAdIndexToPlayIsZero() {
AdPlaybackState state =
new AdPlaybackState(TEST_ADS_ID, TEST_AD_GROUP_TIMES_US).withRemovedAdGroupCount(1);
state = state.withAdCount(/* adGroupIndex= */ 1, /* adCount= */ 3);
state = state.withAvailableAdUri(/* adGroupIndex= */ 1, /* adIndexInAdGroup= */ 0, TEST_URI);
state = state.withAvailableAdUri(/* adGroupIndex= */ 1, /* adIndexInAdGroup= */ 2, TEST_URI);
assertThat(state.getAdGroup(1).getFirstAdIndexToPlay()).isEqualTo(0);
}
@Test
public void getFirstAdIndexToPlaySkipsPlayedAd() {
AdPlaybackState state =
new AdPlaybackState(TEST_ADS_ID, TEST_AD_GROUP_TIMES_US).withRemovedAdGroupCount(1);
state = state.withAdCount(/* adGroupIndex= */ 1, /* adCount= */ 3);
state = state.withAvailableAdUri(/* adGroupIndex= */ 1, /* adIndexInAdGroup= */ 0, TEST_URI);
state = state.withAvailableAdUri(/* adGroupIndex= */ 1, /* adIndexInAdGroup= */ 2, TEST_URI);
state = state.withPlayedAd(/* adGroupIndex= */ 1, /* adIndexInAdGroup= */ 0);
assertThat(state.getAdGroup(1).getFirstAdIndexToPlay()).isEqualTo(1);
assertThat(state.getAdGroup(1).states[1]).isEqualTo(AdPlaybackState.AD_STATE_UNAVAILABLE);
assertThat(state.getAdGroup(1).states[2]).isEqualTo(AdPlaybackState.AD_STATE_AVAILABLE);
}
@Test
public void getFirstAdIndexToPlaySkipsSkippedAd() {
AdPlaybackState state =
new AdPlaybackState(TEST_ADS_ID, TEST_AD_GROUP_TIMES_US).withRemovedAdGroupCount(1);
state = state.withAdCount(/* adGroupIndex= */ 1, /* adCount= */ 3);
state = state.withAvailableAdUri(/* adGroupIndex= */ 1, /* adIndexInAdGroup= */ 0, TEST_URI);
state = state.withAvailableAdUri(/* adGroupIndex= */ 1, /* adIndexInAdGroup= */ 2, TEST_URI);
state = state.withSkippedAd(/* adGroupIndex= */ 1, /* adIndexInAdGroup= */ 0);
assertThat(state.getAdGroup(1).getFirstAdIndexToPlay()).isEqualTo(1);
assertThat(state.getAdGroup(1).states[1]).isEqualTo(AdPlaybackState.AD_STATE_UNAVAILABLE);
assertThat(state.getAdGroup(1).states[2]).isEqualTo(AdPlaybackState.AD_STATE_AVAILABLE);
}
@Test
public void getFirstAdIndexToPlaySkipsErrorAds() {
AdPlaybackState state =
new AdPlaybackState(TEST_ADS_ID, TEST_AD_GROUP_TIMES_US).withRemovedAdGroupCount(1);
state = state.withAdCount(/* adGroupIndex= */ 1, /* adCount= */ 3);
state = state.withAvailableAdUri(/* adGroupIndex= */ 1, /* adIndexInAdGroup= */ 0, TEST_URI);
state = state.withAvailableAdUri(/* adGroupIndex= */ 1, /* adIndexInAdGroup= */ 2, TEST_URI);
state = state.withPlayedAd(/* adGroupIndex= */ 1, /* adIndexInAdGroup= */ 0);
state = state.withAdLoadError(/* adGroupIndex= */ 1, /* adIndexInAdGroup= */ 1);
assertThat(state.getAdGroup(1).getFirstAdIndexToPlay()).isEqualTo(2);
}
@Test
public void getNextAdIndexToPlaySkipsErrorAds() {
AdPlaybackState state =
new AdPlaybackState(TEST_ADS_ID, TEST_AD_GROUP_TIMES_US).withRemovedAdGroupCount(1);
state = state.withAdCount(/* adGroupIndex= */ 1, /* adCount= */ 3);
state = state.withAvailableAdUri(/* adGroupIndex= */ 1, /* adIndexInAdGroup= */ 1, TEST_URI);
state = state.withAdLoadError(/* adGroupIndex= */ 1, /* adIndexInAdGroup= */ 1);
assertThat(state.getAdGroup(1).getNextAdIndexToPlay(0)).isEqualTo(2);
}
@Test
public void getFirstAdIndexToPlay_withPlayedServerSideInsertedAds_returnsFirstIndex() {
AdPlaybackState state =
new AdPlaybackState(TEST_ADS_ID, TEST_AD_GROUP_TIMES_US).withRemovedAdGroupCount(1);
state = state.withIsServerSideInserted(/* adGroupIndex= */ 1, /* isServerSideInserted= */ true);
state = state.withAdCount(/* adGroupIndex= */ 1, /* adCount= */ 3);
state = state.withAvailableAdUri(/* adGroupIndex= */ 1, /* adIndexInAdGroup= */ 0, TEST_URI);
state = state.withAvailableAdUri(/* adGroupIndex= */ 1, /* adIndexInAdGroup= */ 1, TEST_URI);
state = state.withAvailableAdUri(/* adGroupIndex= */ 1, /* adIndexInAdGroup= */ 2, TEST_URI);
state = state.withPlayedAd(/* adGroupIndex= */ 1, /* adIndexInAdGroup= */ 0);
assertThat(state.getAdGroup(1).getFirstAdIndexToPlay()).isEqualTo(0);
}
@Test
public void getNextAdIndexToPlay_withPlayedServerSideInsertedAds_returnsNextIndex() {
AdPlaybackState state =
new AdPlaybackState(TEST_ADS_ID, TEST_AD_GROUP_TIMES_US).withRemovedAdGroupCount(1);
state = state.withIsServerSideInserted(/* adGroupIndex= */ 1, /* isServerSideInserted= */ true);
state = state.withAdCount(/* adGroupIndex= */ 1, /* adCount= */ 3);
state = state.withAvailableAdUri(/* adGroupIndex= */ 1, /* adIndexInAdGroup= */ 0, TEST_URI);
state = state.withAvailableAdUri(/* adGroupIndex= */ 1, /* adIndexInAdGroup= */ 1, TEST_URI);
state = state.withAvailableAdUri(/* adGroupIndex= */ 1, /* adIndexInAdGroup= */ 2, TEST_URI);
state = state.withPlayedAd(/* adGroupIndex= */ 1, /* adIndexInAdGroup= */ 0);
state = state.withPlayedAd(/* adGroupIndex= */ 1, /* adIndexInAdGroup= */ 1);
state = state.withPlayedAd(/* adGroupIndex= */ 1, /* adIndexInAdGroup= */ 2);
assertThat(state.getAdGroup(1).getNextAdIndexToPlay(/* lastPlayedAdIndex= */ 0)).isEqualTo(1);
assertThat(state.getAdGroup(1).getNextAdIndexToPlay(/* lastPlayedAdIndex= */ 1)).isEqualTo(2);
}
@Test
public void setAdStateTwiceThrows() {
AdPlaybackState state = new AdPlaybackState(TEST_ADS_ID, TEST_AD_GROUP_TIMES_US);
state = state.withAdCount(/* adGroupIndex= */ 0, /* adCount= */ 1);
state = state.withPlayedAd(/* adGroupIndex= */ 0, /* adIndexInAdGroup= */ 0);
try {
state.withAdLoadError(/* adGroupIndex= */ 0, /* adIndexInAdGroup= */ 0);
fail();
} catch (RuntimeException e) {
// Expected.
}
}
@Test
public void withAvailableAd() {
int adGroupIndex = 2;
AdPlaybackState state =
new AdPlaybackState(TEST_ADS_ID, TEST_AD_GROUP_TIMES_US)
.withRemovedAdGroupCount(2)
.withAdCount(adGroupIndex, 3)
.withAdDurationsUs(adGroupIndex, /* adDurationsUs...*/ 10, 20, 30)
.withIsServerSideInserted(adGroupIndex, true);
state = state.withAvailableAd(adGroupIndex, /* adIndexInAdGroup= */ 2);
assertThat(state.getAdGroup(adGroupIndex).states)
.asList()
.containsExactly(AD_STATE_UNAVAILABLE, AD_STATE_UNAVAILABLE, AD_STATE_AVAILABLE)
.inOrder();
assertThat(state.getAdGroup(adGroupIndex).uris)
.asList()
.containsExactly(null, null, Uri.EMPTY)
.inOrder();
state =
state
.withAvailableAd(adGroupIndex, /* adIndexInAdGroup= */ 0)
.withAvailableAd(adGroupIndex, /* adIndexInAdGroup= */ 1)
.withAvailableAd(adGroupIndex, /* adIndexInAdGroup= */ 2);
assertThat(state.getAdGroup(adGroupIndex).states)
.asList()
.containsExactly(AD_STATE_AVAILABLE, AD_STATE_AVAILABLE, AD_STATE_AVAILABLE)
.inOrder();
}
@Test
public void withAvailableAd_forClientSideAdGroup_throwsRuntimeException() {
AdPlaybackState state =
new AdPlaybackState(TEST_ADS_ID, TEST_AD_GROUP_TIMES_US)
.withRemovedAdGroupCount(2)
.withAdCount(/* adGroupIndex= */ 2, 3)
.withAdDurationsUs(/* adGroupIndex= */ 2, /* adDurationsUs...*/ 10, 20, 30);
Assert.assertThrows(
IllegalStateException.class, () -> state.withAvailableAd(/* adGroupIndex= */ 2, 1));
}
@Test
public void skipAllWithoutAdCount() {
AdPlaybackState state = new AdPlaybackState(TEST_ADS_ID, TEST_AD_GROUP_TIMES_US);
state = state.withSkippedAdGroup(0);
state = state.withSkippedAdGroup(1);
assertThat(state.getAdGroup(0).count).isEqualTo(0);
assertThat(state.getAdGroup(1).count).isEqualTo(0);
}
@Test
public void withResetAdGroup_beforeSetAdCount_doesNothing() {
AdPlaybackState state = new AdPlaybackState(TEST_ADS_ID, TEST_AD_GROUP_TIMES_US);
state = state.withResetAdGroup(/* adGroupIndex= */ 1);
assertThat(state.getAdGroup(1).count).isEqualTo(C.LENGTH_UNSET);
}
@Test
public void withOriginalAdCount() {
AdPlaybackState state =
new AdPlaybackState(TEST_ADS_ID, /* adGroupTimesUs...= */ 5_000_000)
.withAdCount(/* adGroupIndex= */ 0, /* adCount= */ 2);
state = state.withOriginalAdCount(/* adGroupIndex= */ 0, /* originalAdCount= */ 3);
assertThat(state.getAdGroup(0).count).isEqualTo(2);
assertThat(state.getAdGroup(0).originalCount).isEqualTo(3);
}
@Test
public void withOriginalAdCount_unsetValue_defaultsToIndexUnset() {
AdPlaybackState state =
new AdPlaybackState(TEST_ADS_ID, /* adGroupTimesUs...= */ 5_000_000)
.withAdCount(/* adGroupIndex= */ 0, /* adCount= */ 2);
assertThat(state.getAdGroup(0).count).isEqualTo(2);
assertThat(state.getAdGroup(0).originalCount).isEqualTo(C.INDEX_UNSET);
}
@Test
public void withLastAdGroupRemoved() {
AdPlaybackState state = new AdPlaybackState(TEST_ADS_ID, /* adGroupTimesUs...= */ 5_000_000);
state =
state
.withAdCount(/* adGroupIndex= */ 0, 3)
.withAdDurationsUs(/* adGroupIndex= */ 0, 10_000L, 20_000L, 30_000L)
.withPlayedAd(/* adGroupIndex= */ 0, /* adIndexInAdGroup= */ 0)
.withPlayedAd(/* adGroupIndex= */ 0, /* adIndexInAdGroup= */ 1)
.withIsServerSideInserted(/* adGroupIndex= */ 0, true);
state = state.withLastAdRemoved(0);
assertThat(state.getAdGroup(/* adGroupIndex= */ 0).states).asList().hasSize(2);
assertThat(state.getAdGroup(/* adGroupIndex= */ 0).durationsUs)
.asList()
.containsExactly(10_000L, 20_000L)
.inOrder();
assertThat(state.getAdGroup(/* adGroupIndex= */ 0).states)
.asList()
.containsExactly(AD_STATE_PLAYED, AD_STATE_PLAYED);
}
@Test
public void withResetAdGroup_resetsAdsInFinalStates() {
AdPlaybackState state = new AdPlaybackState(TEST_ADS_ID, TEST_AD_GROUP_TIMES_US);
state = state.withAdCount(/* adGroupIndex= */ 1, /* adCount= */ 5);
state =
state.withAdDurationsUs(
/* adGroupIndex= */ 1, /* adDurationsUs...= */ 1_000L, 2_000L, 3_000L, 4_000L, 5_000L);
state = state.withAvailableAdUri(/* adGroupIndex= */ 1, /* adIndexInAdGroup= */ 1, TEST_URI);
state = state.withAvailableAdUri(/* adGroupIndex= */ 1, /* adIndexInAdGroup= */ 2, TEST_URI);
state = state.withAvailableAdUri(/* adGroupIndex= */ 1, /* adIndexInAdGroup= */ 3, TEST_URI);
state = state.withAvailableAdUri(/* adGroupIndex= */ 1, /* adIndexInAdGroup= */ 4, TEST_URI);
state = state.withPlayedAd(/* adGroupIndex= */ 1, /* adIndexInAdGroup= */ 2);
state = state.withSkippedAd(/* adGroupIndex= */ 1, /* adIndexInAdGroup= */ 3);
state = state.withAdLoadError(/* adGroupIndex= */ 1, /* adIndexInAdGroup= */ 4);
// Verify setup.
assertThat(state.getAdGroup(/* adGroupIndex= */ 1).states)
.asList()
.containsExactly(
AD_STATE_UNAVAILABLE,
AD_STATE_AVAILABLE,
AD_STATE_PLAYED,
AD_STATE_SKIPPED,
AD_STATE_ERROR)
.inOrder();
state = state.withResetAdGroup(/* adGroupIndex= */ 1);
assertThat(state.getAdGroup(/* adGroupIndex= */ 1).states)
.asList()
.containsExactly(
AD_STATE_UNAVAILABLE,
AD_STATE_AVAILABLE,
AD_STATE_AVAILABLE,
AD_STATE_AVAILABLE,
AD_STATE_AVAILABLE)
.inOrder();
assertThat(state.getAdGroup(/* adGroupIndex= */ 1).uris)
.asList()
.containsExactly(null, TEST_URI, TEST_URI, TEST_URI, TEST_URI)
.inOrder();
assertThat(state.getAdGroup(/* adGroupIndex= */ 1).durationsUs)
.asList()
.containsExactly(1_000L, 2_000L, 3_000L, 4_000L, 5_000L);
}
@Test
public void adPlaybackStateWithNoAds_checkValues() {
AdPlaybackState adPlaybackStateWithNoAds = AdPlaybackState.NONE;
// Please refrain from altering these values since doing so would cause issues with backwards
// compatibility.
assertThat(adPlaybackStateWithNoAds.adsId).isNull();
assertThat(adPlaybackStateWithNoAds.adGroupCount).isEqualTo(0);
assertThat(adPlaybackStateWithNoAds.adResumePositionUs).isEqualTo(0);
assertThat(adPlaybackStateWithNoAds.contentDurationUs).isEqualTo(C.TIME_UNSET);
assertThat(adPlaybackStateWithNoAds.removedAdGroupCount).isEqualTo(0);
}
@Test
public void adPlaybackStateWithNoAds_toBundleSkipsDefaultValues_fromBundleRestoresThem() {
AdPlaybackState adPlaybackStateWithNoAds = AdPlaybackState.NONE;
Bundle adPlaybackStateWithNoAdsBundle = adPlaybackStateWithNoAds.toBundle();
// Check that default values are skipped when bundling.
assertThat(adPlaybackStateWithNoAdsBundle.keySet()).isEmpty();
AdPlaybackState adPlaybackStateWithNoAdsFromBundle =
AdPlaybackState.CREATOR.fromBundle(adPlaybackStateWithNoAdsBundle);
assertThat(adPlaybackStateWithNoAdsFromBundle.adsId).isEqualTo(adPlaybackStateWithNoAds.adsId);
assertThat(adPlaybackStateWithNoAdsFromBundle.adGroupCount)
.isEqualTo(adPlaybackStateWithNoAds.adGroupCount);
assertThat(adPlaybackStateWithNoAdsFromBundle.adResumePositionUs)
.isEqualTo(adPlaybackStateWithNoAds.adResumePositionUs);
assertThat(adPlaybackStateWithNoAdsFromBundle.contentDurationUs)
.isEqualTo(adPlaybackStateWithNoAds.contentDurationUs);
assertThat(adPlaybackStateWithNoAdsFromBundle.removedAdGroupCount)
.isEqualTo(adPlaybackStateWithNoAds.removedAdGroupCount);
}
@Test
public void createAdPlaybackState_roundTripViaBundle_yieldsEqualFieldsExceptAdsId() {
AdPlaybackState originalState =
new AdPlaybackState(TEST_ADS_ID, TEST_AD_GROUP_TIMES_US)
.withRemovedAdGroupCount(1)
.withAdCount(/* adGroupIndex= */ 1, /* adCount= */ 1)
.withPlayedAd(/* adGroupIndex= */ 1, /* adIndexInAdGroup= */ 0)
.withAvailableAdUri(/* adGroupIndex= */ 1, /* adIndexInAdGroup= */ 0, TEST_URI)
.withAdCount(/* adGroupIndex= */ 2, /* adCount= */ 2)
.withSkippedAd(/* adGroupIndex= */ 2, /* adIndexInAdGroup= */ 0)
.withPlayedAd(/* adGroupIndex= */ 2, /* adIndexInAdGroup= */ 1)
.withAvailableAdUri(/* adGroupIndex= */ 2, /* adIndexInAdGroup= */ 0, TEST_URI)
.withAvailableAdUri(/* adGroupIndex= */ 2, /* adIndexInAdGroup= */ 1, TEST_URI)
.withContentResumeOffsetUs(/* adGroupIndex= */ 1, /* contentResumeOffsetUs= */ 4444)
.withContentResumeOffsetUs(/* adGroupIndex= */ 2, /* contentResumeOffsetUs= */ 3333)
.withIsServerSideInserted(/* adGroupIndex= */ 1, /* isServerSideInserted= */ true)
.withIsServerSideInserted(/* adGroupIndex= */ 2, /* isServerSideInserted= */ true)
.withAdDurationsUs(/* adGroupIndex= */ 1, /* adDurationsUs...= */ 12)
.withAdDurationsUs(/* adGroupIndex= */ 2, /* adDurationsUs...= */ 34, 56)
.withAdResumePositionUs(123)
.withContentDurationUs(456);
AdPlaybackState restoredState = AdPlaybackState.CREATOR.fromBundle(originalState.toBundle());
assertThat(restoredState.adsId).isNull();
assertThat(restoredState.adGroupCount).isEqualTo(originalState.adGroupCount);
for (int i = 0; i < restoredState.adGroupCount; i++) {
assertThat(restoredState.getAdGroup(i)).isEqualTo(originalState.getAdGroup(i));
}
assertThat(restoredState.adResumePositionUs).isEqualTo(originalState.adResumePositionUs);
assertThat(restoredState.contentDurationUs).isEqualTo(originalState.contentDurationUs);
}
@Test
public void roundTripViaBundle_ofAdGroup_yieldsEqualInstance() {
AdPlaybackState.AdGroup adGroup =
new AdPlaybackState.AdGroup(/* timeUs= */ 42)
.withAdCount(2)
.withAdState(AD_STATE_AVAILABLE, /* index= */ 0)
.withAdState(AD_STATE_PLAYED, /* index= */ 1)
.withAdUri(Uri.parse("https://www.google.com"), /* index= */ 0)
.withAdUri(Uri.EMPTY, /* index= */ 1)
.withAdDurationsUs(new long[] {1234, 5678})
.withContentResumeOffsetUs(4444)
.withIsServerSideInserted(true);
assertThat(AdPlaybackState.AdGroup.CREATOR.fromBundle(adGroup.toBundle())).isEqualTo(adGroup);
}
@Test
public void withLivePostrollPlaceholderAppended_emptyAdPlaybackState_insertsPlaceholder() {
AdPlaybackState adPlaybackState =
new AdPlaybackState("adsId").withLivePostrollPlaceholderAppended();
assertThat(adPlaybackState.adGroupCount).isEqualTo(1);
assertThat(adPlaybackState.getAdGroup(/* adGroupIndex= */ 0).timeUs)
.isEqualTo(C.TIME_END_OF_SOURCE);
assertThat(adPlaybackState.getAdGroup(/* adGroupIndex= */ 0).count).isEqualTo(C.LENGTH_UNSET);
assertThat(adPlaybackState.getAdGroup(/* adGroupIndex= */ 0).isServerSideInserted).isTrue();
}
@Test
public void withLivePostrollPlaceholderAppended_withExistingAdGroups_appendsPlaceholder() {
AdPlaybackState adPlaybackState =
new AdPlaybackState("state", /* adGroupTimesUs...= */ 0L, 10_000_000L)
.withIsServerSideInserted(/* adGroupIndex= */ 0, true)
.withIsServerSideInserted(/* adGroupIndex= */ 1, true)
.withAdCount(/* adGroupIndex= */ 0, /* adCount= */ 1)
.withAdCount(/* adGroupIndex= */ 1, /* adCount= */ 1)
.withAdDurationsUs(/* adGroupIndex= */ 0, /* adDurationsUs...= */ 10_000_000L)
.withAdDurationsUs(/* adGroupIndex= */ 1, /* adDurationsUs...= */ 5_000_000L);
adPlaybackState = adPlaybackState.withLivePostrollPlaceholderAppended();
assertThat(adPlaybackState.adGroupCount).isEqualTo(3);
assertThat(adPlaybackState.getAdGroup(/* adGroupIndex= */ 2).timeUs)
.isEqualTo(C.TIME_END_OF_SOURCE);
assertThat(adPlaybackState.getAdGroup(/* adGroupIndex= */ 2).count).isEqualTo(C.LENGTH_UNSET);
assertThat(adPlaybackState.getAdGroup(/* adGroupIndex= */ 2).isServerSideInserted).isTrue();
}
@Test
public void endsWithLivePostrollPlaceHolder_withExistingAdGroups_postrollDetected() {
AdPlaybackState adPlaybackState =
new AdPlaybackState("adsId", /* adGroupTimesUs...= */ 0L, 10_000_000L)
.withIsServerSideInserted(/* adGroupIndex= */ 0, true)
.withIsServerSideInserted(/* adGroupIndex= */ 1, true)
.withAdCount(/* adGroupIndex= */ 0, /* adCount= */ 1)
.withAdCount(/* adGroupIndex= */ 1, /* adCount= */ 1)
.withAdDurationsUs(/* adGroupIndex= */ 0, /* adDurationsUs...= */ 10_000_000L)
.withAdDurationsUs(/* adGroupIndex= */ 1, /* adDurationsUs...= */ 5_000_000L);
boolean endsWithLivePostrollPlaceHolder = adPlaybackState.endsWithLivePostrollPlaceHolder();
assertThat(endsWithLivePostrollPlaceHolder).isFalse();
adPlaybackState = adPlaybackState.withLivePostrollPlaceholderAppended();
endsWithLivePostrollPlaceHolder = adPlaybackState.endsWithLivePostrollPlaceHolder();
assertThat(endsWithLivePostrollPlaceHolder).isTrue();
}
@Test
public void endsWithLivePostrollPlaceHolder_emptyAdPlaybackState_postrollNotDetected() {
assertThat(AdPlaybackState.NONE.endsWithLivePostrollPlaceHolder()).isFalse();
assertThat(new AdPlaybackState("adsId").endsWithLivePostrollPlaceHolder()).isFalse();
}
@Test
public void
getAdGroupIndexAfterPositionUs_withClientSideInsertedAds_returnsNextAdGroupWithUnplayedAds() {
AdPlaybackState state =
new AdPlaybackState(
/* adsId= */ new Object(),
/* adGroupTimesUs...= */ 0,
1000,
2000,
3000,
4000,
C.TIME_END_OF_SOURCE)
.withAdCount(/* adGroupIndex= */ 0, /* adCount= */ 1)
.withAdCount(/* adGroupIndex= */ 1, /* adCount= */ 1)
.withAdCount(/* adGroupIndex= */ 2, /* adCount= */ 1)
.withAdCount(/* adGroupIndex= */ 3, /* adCount= */ 1)
.withAdCount(/* adGroupIndex= */ 4, /* adCount= */ 1)
.withAdCount(/* adGroupIndex= */ 5, /* adCount= */ 1)
.withPlayedAd(/* adGroupIndex= */ 1, /* adIndexInAdGroup= */ 0)
.withPlayedAd(/* adGroupIndex= */ 3, /* adIndexInAdGroup= */ 0);
assertThat(
state.getAdGroupIndexAfterPositionUs(
/* positionUs= */ 0, /* periodDurationUs= */ C.TIME_UNSET))
.isEqualTo(2);
assertThat(
state.getAdGroupIndexAfterPositionUs(/* positionUs= */ 0, /* periodDurationUs= */ 5000))
.isEqualTo(2);
assertThat(
state.getAdGroupIndexAfterPositionUs(
/* positionUs= */ 1999, /* periodDurationUs= */ C.TIME_UNSET))
.isEqualTo(2);
assertThat(
state.getAdGroupIndexAfterPositionUs(
/* positionUs= */ 1999, /* periodDurationUs= */ 5000))
.isEqualTo(2);
assertThat(
state.getAdGroupIndexAfterPositionUs(
/* positionUs= */ 2000, /* periodDurationUs= */ C.TIME_UNSET))
.isEqualTo(4);
assertThat(
state.getAdGroupIndexAfterPositionUs(
/* positionUs= */ 2000, /* periodDurationUs= */ 5000))
.isEqualTo(4);
assertThat(
state.getAdGroupIndexAfterPositionUs(
/* positionUs= */ 3999, /* periodDurationUs= */ C.TIME_UNSET))
.isEqualTo(4);
assertThat(
state.getAdGroupIndexAfterPositionUs(
/* positionUs= */ 3999, /* periodDurationUs= */ 5000))
.isEqualTo(4);
assertThat(
state.getAdGroupIndexAfterPositionUs(
/* positionUs= */ 4000, /* periodDurationUs= */ C.TIME_UNSET))
.isEqualTo(5);
assertThat(
state.getAdGroupIndexAfterPositionUs(
/* positionUs= */ 4000, /* periodDurationUs= */ 5000))
.isEqualTo(5);
assertThat(
state.getAdGroupIndexAfterPositionUs(
/* positionUs= */ 4999, /* periodDurationUs= */ C.TIME_UNSET))
.isEqualTo(5);
assertThat(
state.getAdGroupIndexAfterPositionUs(
/* positionUs= */ 4999, /* periodDurationUs= */ 5000))
.isEqualTo(5);
assertThat(
state.getAdGroupIndexAfterPositionUs(
/* positionUs= */ 5000, /* periodDurationUs= */ C.TIME_UNSET))
.isEqualTo(5);
assertThat(
state.getAdGroupIndexAfterPositionUs(
/* positionUs= */ 5000, /* periodDurationUs= */ 5000))
.isEqualTo(C.INDEX_UNSET);
assertThat(
state.getAdGroupIndexAfterPositionUs(
/* positionUs= */ C.TIME_END_OF_SOURCE, /* periodDurationUs= */ C.TIME_UNSET))
.isEqualTo(C.INDEX_UNSET);
assertThat(
state.getAdGroupIndexAfterPositionUs(
/* positionUs= */ C.TIME_END_OF_SOURCE, /* periodDurationUs= */ 5000))
.isEqualTo(C.INDEX_UNSET);
}
@Test
public void getAdGroupIndexAfterPositionUs_withServerSideInsertedAds_returnsNextAdGroup() {
AdPlaybackState state =
new AdPlaybackState(/* adsId= */ new Object(), /* adGroupTimesUs...= */ 0, 1000, 2000)
.withIsServerSideInserted(/* adGroupIndex= */ 0, /* isServerSideInserted= */ true)
.withIsServerSideInserted(/* adGroupIndex= */ 1, /* isServerSideInserted= */ true)
.withIsServerSideInserted(/* adGroupIndex= */ 2, /* isServerSideInserted= */ true)
.withAdCount(/* adGroupIndex= */ 0, /* adCount= */ 1)
.withAdCount(/* adGroupIndex= */ 1, /* adCount= */ 1)
.withAdCount(/* adGroupIndex= */ 2, /* adCount= */ 1)
.withPlayedAd(/* adGroupIndex= */ 0, /* adIndexInAdGroup= */ 0)
.withPlayedAd(/* adGroupIndex= */ 2, /* adIndexInAdGroup= */ 0);
assertThat(
state.getAdGroupIndexAfterPositionUs(
/* positionUs= */ 0, /* periodDurationUs= */ C.TIME_UNSET))
.isEqualTo(1);
assertThat(
state.getAdGroupIndexAfterPositionUs(/* positionUs= */ 0, /* periodDurationUs= */ 5000))
.isEqualTo(1);
assertThat(
state.getAdGroupIndexAfterPositionUs(
/* positionUs= */ 999, /* periodDurationUs= */ C.TIME_UNSET))
.isEqualTo(1);
assertThat(
state.getAdGroupIndexAfterPositionUs(
/* positionUs= */ 999, /* periodDurationUs= */ 5000))
.isEqualTo(1);
assertThat(
state.getAdGroupIndexAfterPositionUs(
/* positionUs= */ 1000, /* periodDurationUs= */ C.TIME_UNSET))
.isEqualTo(2);
assertThat(
state.getAdGroupIndexAfterPositionUs(
/* positionUs= */ 1000, /* periodDurationUs= */ 5000))
.isEqualTo(2);
assertThat(
state.getAdGroupIndexAfterPositionUs(
/* positionUs= */ 1999, /* periodDurationUs= */ C.TIME_UNSET))
.isEqualTo(2);
assertThat(
state.getAdGroupIndexAfterPositionUs(
/* positionUs= */ 1999, /* periodDurationUs= */ 5000))
.isEqualTo(2);
assertThat(
state.getAdGroupIndexAfterPositionUs(
/* positionUs= */ 2000, /* periodDurationUs= */ C.TIME_UNSET))
.isEqualTo(C.INDEX_UNSET);
assertThat(
state.getAdGroupIndexAfterPositionUs(
/* positionUs= */ 2000, /* periodDurationUs= */ 5000))
.isEqualTo(C.INDEX_UNSET);
assertThat(
state.getAdGroupIndexAfterPositionUs(
/* positionUs= */ C.TIME_END_OF_SOURCE, /* periodDurationUs= */ C.TIME_UNSET))
.isEqualTo(C.INDEX_UNSET);
assertThat(
state.getAdGroupIndexAfterPositionUs(
/* positionUs= */ C.TIME_END_OF_SOURCE, /* periodDurationUs= */ 5000))
.isEqualTo(C.INDEX_UNSET);
}
@Test
public void
getAdGroupIndexAfterPositionUs_withServerSidePostrollPlaceholderForLive_placeholderAsNextAdGroupIndex() {
AdPlaybackState state =
new AdPlaybackState(/* adsId= */ new Object(), /* adGroupTimesUs...= */ 2000)
.withIsServerSideInserted(/* adGroupIndex= */ 0, /* isServerSideInserted= */ true)
.withAdCount(/* adGroupIndex= */ 0, /* adCount= */ 1)
.withPlayedAd(/* adGroupIndex= */ 0, /* adIndexInAdGroup= */ 0)
.withLivePostrollPlaceholderAppended();
assertThat(
state.getAdGroupIndexAfterPositionUs(
/* positionUs= */ 1999, /* periodDurationUs= */ 5000))
.isEqualTo(0);
assertThat(
state.getAdGroupIndexAfterPositionUs(
/* positionUs= */ 2000, /* periodDurationUs= */ C.TIME_UNSET))
.isEqualTo(1);
assertThat(
state.getAdGroupIndexAfterPositionUs(
/* positionUs= */ 2000, /* periodDurationUs= */ 5000))
.isEqualTo(1);
assertThat(
state.getAdGroupIndexAfterPositionUs(
/* positionUs= */ C.TIME_END_OF_SOURCE, /* periodDurationUs= */ C.TIME_UNSET))
.isEqualTo(C.INDEX_UNSET);
assertThat(
state.getAdGroupIndexAfterPositionUs(
/* positionUs= */ C.TIME_END_OF_SOURCE, /* periodDurationUs= */ 5000))
.isEqualTo(C.INDEX_UNSET);
}
@Test
public void
getAdGroupIndexForPositionUs_withServerSidePostrollPlaceholderForLive_ignoresPlaceholder() {
AdPlaybackState state =
new AdPlaybackState("adsId", /* adGroupTimesUs...= */ 0L, 5_000_000L, C.TIME_END_OF_SOURCE)
.withIsServerSideInserted(/* adGroupIndex= */ 0, /* isServerSideInserted= */ true)
.withIsServerSideInserted(/* adGroupIndex= */ 1, /* isServerSideInserted= */ true)
.withIsServerSideInserted(/* adGroupIndex= */ 2, /* isServerSideInserted= */ true)
.withAdCount(/* adGroupIndex= */ 0, /* adCount= */ 1)
.withAdCount(/* adGroupIndex= */ 1, /* adCount= */ 1)
.withPlayedAd(/* adGroupIndex= */ 0, /* adIndexInAdGroup= */ 0);
assertThat(
state.getAdGroupIndexForPositionUs(
/* positionUs= */ 4_999_999L, /* periodDurationUs= */ 10_000_000L))
.isEqualTo(C.INDEX_UNSET);
assertThat(
state.getAdGroupIndexForPositionUs(
/* positionUs= */ 4_999_999L, /* periodDurationUs= */ C.TIME_UNSET))
.isEqualTo(C.INDEX_UNSET);
assertThat(
state.getAdGroupIndexForPositionUs(
/* positionUs= */ 5_000_000L, /* periodDurationUs= */ 10_000_000L))
.isEqualTo(1);
assertThat(
state.getAdGroupIndexForPositionUs(
/* positionUs= */ 5_000_000L, /* periodDurationUs= */ C.TIME_UNSET))
.isEqualTo(1);
assertThat(
state.getAdGroupIndexForPositionUs(
/* positionUs= */ C.TIME_END_OF_SOURCE, /* periodDurationUs= */ 10_000_000L))
.isEqualTo(1);
assertThat(
state.getAdGroupIndexForPositionUs(
/* positionUs= */ C.TIME_END_OF_SOURCE, /* periodDurationUs= */ C.TIME_UNSET))
.isEqualTo(1);
}
@Test
public void
getAdGroupIndexForPositionUs_withOnlyServerSidePostrollPlaceholderForLive_ignoresPlaceholder() {
AdPlaybackState state =
new AdPlaybackState("adsId", /* adGroupTimesUs...= */ C.TIME_END_OF_SOURCE)
.withIsServerSideInserted(/* adGroupIndex= */ 0, /* isServerSideInserted= */ true);
assertThat(
state.getAdGroupIndexForPositionUs(
/* positionUs= */ 5_000_000L, /* periodDurationUs= */ 10_000_000L))
.isEqualTo(C.INDEX_UNSET);
assertThat(
state.getAdGroupIndexForPositionUs(
/* positionUs= */ 5_000_000L, /* periodDurationUs= */ C.TIME_UNSET))
.isEqualTo(C.INDEX_UNSET);
assertThat(
state.getAdGroupIndexForPositionUs(
/* positionUs= */ 10_000_001L, /* periodDurationUs= */ 10_000_000L))
.isEqualTo(C.INDEX_UNSET);
assertThat(
state.getAdGroupIndexForPositionUs(
/* positionUs= */ C.TIME_END_OF_SOURCE, /* periodDurationUs= */ 10_000_000L))
.isEqualTo(C.INDEX_UNSET);
assertThat(
state.getAdGroupIndexForPositionUs(
/* positionUs= */ C.TIME_END_OF_SOURCE, /* periodDurationUs= */ C.TIME_UNSET))
.isEqualTo(C.INDEX_UNSET);
}
}
|
apache/derby | 34,100 | java/org.apache.derby.tools/org/apache/derby/tools/dblook.java | /*
Derby - Class org.apache.derby.tools.dblook
Licensed to the Apache Software Foundation (ASF) under one or more
contributor license agreements. See the NOTICE file distributed with
this work for additional information regarding copyright ownership.
The ASF licenses this file to You under the Apache License, Version 2.0
(the "License"); you may not use this file except in compliance with
the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package org.apache.derby.tools;
import java.io.BufferedReader;
import java.io.StringReader;
import java.sql.DriverManager;
import java.sql.ResultSet;
import java.sql.Connection;
import java.sql.Driver;
import java.sql.Statement;
import java.sql.PreparedStatement;
import java.sql.SQLException;
import java.sql.Timestamp;
import java.util.HashMap;
import java.util.StringTokenizer;
import java.util.ArrayList;
import org.apache.derby.iapi.tools.i18n.LocalizedResource;
import org.apache.derby.impl.tools.dblook.DB_Check;
import org.apache.derby.impl.tools.dblook.DB_Index;
import org.apache.derby.impl.tools.dblook.DB_Jar;
import org.apache.derby.impl.tools.dblook.DB_Key;
import org.apache.derby.impl.tools.dblook.DB_Table;
import org.apache.derby.impl.tools.dblook.DB_Schema;
import org.apache.derby.impl.tools.dblook.DB_Sequence;
import org.apache.derby.impl.tools.dblook.DB_Alias;
import org.apache.derby.impl.tools.dblook.DB_Trigger;
import org.apache.derby.impl.tools.dblook.DB_View;
import org.apache.derby.impl.tools.dblook.DB_Roles;
import org.apache.derby.impl.tools.dblook.DB_GrantRevoke;
import org.apache.derby.impl.tools.dblook.Logs;
public final class dblook {
// DB2 enforces a maximum of 30 tables to be specified as part of
// the table list.
private static final int DB2_MAX_NUMBER_OF_TABLES = 30;
private Connection conn;
private static PreparedStatement getColNameFromNumberQuery;
// Mappings from id to name for schemas and tables (for ease
// of reference).
private static HashMap<String,String> schemaMap;
private static HashMap<String,String> tableIdToNameMap;
// Command-line Parameters.
private static String sourceDBUrl;
private static String ddlFileName;
private static String stmtDelimiter;
private static boolean appendLogs;
private static ArrayList<String> tableList;
private static String schemaParam;
private static String targetSchema;
private static boolean skipViews;
private static boolean verbose;
private static String sourceDBName;
private static String lookLogName = "dblook.log";
private static LocalizedResource langUtil;
private static boolean sqlAuthorization;
/* ************************************************
* main:
* Initialize program state by creating a dblook object,
* and then start the DDL generation by calling "go".
* ****/
public static void main(String[] args) {
try {
new dblook(args);
} catch (Exception e) {
// All "normal" errors are logged and printed to
// console according to command line arguments,
// so if we get here, something unexpected must
// have happened; print to error stream.
e.printStackTrace();
}
}
/* ************************************************
* Constructor:
* Parse the command line, initialize logs, echo program variables,
* and load the Derby driver.
* @param args Array of dblook command-line arguments.
* ****/
public dblook(String[] args) throws Exception {
// Adjust the application in accordance with derby.ui.locale
// and derby.ui.codeset
langUtil = LocalizedResource.getInstance();
// Initialize class variables.
initState();
// Parse the command line.
if (!parseArgs(args)) {
System.out.println(lookupMessage("DBLOOK_Usage"));
return;
}
showVariables();
if (!loadDriver()) {
// Failed when loading the driver. We already logged
// the exception, so just return.
Logs.cleanup(); // Make sure the error log is flushed to disk.
return;
}
schemaMap = new HashMap<String,String>();
tableIdToNameMap = new HashMap<String,String>();
// Now run the utility.
go();
}
/* ************************************************
* initState:
* Initialize class variables.
****/
private void initState() {
sourceDBUrl = null;
ddlFileName = null;
stmtDelimiter = null;
appendLogs = false;
tableList = null;
targetSchema = null;
schemaParam = null;
skipViews = false;
verbose= false;
sourceDBName = null;
return;
}
/* ************************************************
* parseArgs:
* Parse the command-line arguments.
* @param args args[0] is the url for the source database.
* @return true if all parameters were loaded and the output
* files were successfully created; false otherwise.
****/
private boolean parseArgs(String[] args) {
if (args.length < 2)
// must have minimum of 2 args: "-d" and "<dbUrl>".
return false;
int st = 0;
for (int i = 0; i < args.length; i++) {
st = loadParam(args, i);
if (st == -1)
return false;
i = st;
}
if (sourceDBUrl == null) {
// must have at least a database url.
return false;
}
// At this point, all parameters should have been read into
// their respective class variables. Use those
// variables for some further processing.
// Setup logs.
boolean okay = Logs.initLogs(lookLogName, ddlFileName, appendLogs,
verbose, (stmtDelimiter == null ? ";" : stmtDelimiter));
// Get database name.
sourceDBName = extractDBNameFromUrl(sourceDBUrl);
// Set up schema restriction.
if ((schemaParam != null) && (schemaParam.length() > 0) &&
(schemaParam.charAt(0) != '"'))
// not quoted, so upper case, then add quotes.
{
targetSchema = addQuotes(expandDoubleQuotes(
schemaParam.toUpperCase(java.util.Locale.ENGLISH)));
}
else
targetSchema = addQuotes(expandDoubleQuotes(stripQuotes(schemaParam)));
return okay;
}
/* ************************************************
* loadParam:
* Read in a flag and its corresponding values from
* list of command line arguments, starting at
* the start'th argument.
* @return The position of the argument that was
* most recently processed.
****/
private int loadParam(String [] args, int start) {
if ((args[start].length() == 0) || args[start].charAt(0) != '-')
// starting argument should be a flag; if it's
// not, ignore it.
return start;
boolean haveVal = (args.length > start + 1);
switch (args[start].charAt(1)) {
case 'd':
if (!haveVal)
return -1;
if (args[start].length() == 2) {
sourceDBUrl = stripQuotes(args[++start]);
return start;
}
return -1;
case 'z':
if (!haveVal)
return -1;
if (args[start].length() == 2) {
schemaParam = args[++start];
return start;
}
return -1;
case 't':
if (!haveVal)
return -1;
if (args[start].equals("-td")) {
stmtDelimiter = args[++start];
return start;
}
else if (args[start].equals("-t"))
// list of tables.
return extractTableNamesFromList(args, start+1);
return -1;
case 'o':
if (!haveVal)
return -1;
if ((args[start].length() == 2) && (args[start+1].length() > 0)) {
ddlFileName = args[++start];
return start;
}
return -1;
case 'a':
if (args[start].equals("-append")) {
appendLogs = true;
return start;
}
return -1;
case 'n':
if (args[start].equals("-noview")) {
skipViews = true;
return start;
}
return -1;
case 'v':
if (args[start].equals("-verbose")) {
verbose = true;
return start;
}
return -1;
default:
return -1;
}
}
/* ************************************************
* loadDriver:
* Load derby driver.
* @param precondition sourceDBUrl has been loaded.
* @return false if anything goes wrong; true otherwise.
****/
private boolean loadDriver() {
String derbyDriver = System.getProperty("driver");
if (derbyDriver == null) {
if (sourceDBUrl.indexOf(":net://") != -1)
derbyDriver = "com.ibm.db2.jcc.DB2Driver";
else if (sourceDBUrl.startsWith("jdbc:derby://"))
derbyDriver = "org.apache.derby.jdbc.ClientDriver";
else
derbyDriver = "org.apache.derby.jdbc.EmbeddedDriver";
}
try {
Class<?> klass = Class.forName(derbyDriver);
if (Driver.class.isAssignableFrom(klass)) {
klass.getConstructor().newInstance();
} else {
Logs.debug(
"TL_notInstanceOf",
new String[] { derbyDriver, Driver.class.getName() });
return false;
}
}
catch (Exception e)
{
Logs.debug(e);
return false;
}
return true;
}
/* ************************************************
* extractDBNameFromUrl:
* Given a database url, parse out the actual name
* of the database. This is required for creation
* the DB2JJARS directory (the database name is part
* of the path to the jar).
* @param dbUrl The database url from which to extract the
* the database name.
* @return the name of the database (including its
* path, if provided) that is referenced by the url.
****/
private String extractDBNameFromUrl(String dbUrl) {
if (dbUrl == null)
// shouldn't happen; ignore it here, as an error
// will be thrown we try to connect.
return "";
int start = dbUrl.indexOf("jdbc:derby:");
if (start == -1)
// not a valid url; just ignore it (an error
// will be thrown when we try to connect).
return "";
start = dbUrl.indexOf("://");
if (start == -1)
// standard url (jdbc:derby:<dbname>). Database
// name starts right after "derby:". The "6" in
// the following line is the length of "derby:".
start = dbUrl.indexOf("derby:") + 6;
else
// Network Server url. Database name starts right
// after next slash (":net://hostname:port/<dbname>).
// The "3" in the following line is the length of
// "://".
start = dbUrl.indexOf("/", start+3) + 1;
int stop = -1;
if (dbUrl.charAt(start) == '"') {
// database name is quoted; end of the name is the
// closing quote.
start++;
stop = dbUrl.indexOf("\"", start);
}
else {
// Database name ends with the start of a list of connection
// attributes. This list can begin with either a colon
// or a semi-colon.
stop = dbUrl.indexOf(":", start);
if (stop != -1) {
if ((dbUrl.charAt(stop+1) == '/') ||
(dbUrl.charAt(stop+1) == '\\'))
// then this colon is part of the path (ex. "C:"),
// so ignore it.
stop = dbUrl.indexOf(":", stop+2);
}
int stop2 = dbUrl.length();
if (stop == -1)
// no colons; see if we can find a semi-colon.
stop = dbUrl.indexOf(";", start);
else
stop2 = dbUrl.indexOf(";", start);
stop = (stop <= stop2 ? stop : stop2);
}
if (stop == -1)
// we have a url that ends with database name (no
// other attributes appended).
stop = dbUrl.length();
return dbUrl.substring(start, stop);
}
/* ************************************************
* extractTableNamesFromList:
* Given an array of command line arguments containing
* a list of table names beginning at start'th position,
* read the list of table names and store them as
* our target table list. Names without quotes are
* turned into ALL CAPS and then double quotes are
* added; names whcih already have double quotes are
* stored exactly as they are. NOTE: DB2 enforces
* maximum of 30 tables, and ignores the rest; so
* do we.
* @param args Array of command line arguments.
* @start Position of the start of the list of tables
* with the args array.
* @return The position of the last table name in
* the list of table names.
****/
private int extractTableNamesFromList(String [] args,
int start)
{
int argIndex = start;
int count = 0;
tableList = new ArrayList<String>();
while (argIndex < args.length) {
if (((args[argIndex].length() > 0) && (args[argIndex].charAt(0) == '-')) ||
(++count > DB2_MAX_NUMBER_OF_TABLES))
// we're done with the table list.
break;
if ((args[argIndex].length() > 0) && (args[argIndex].charAt(0) == '"'))
// it's quoted.
tableList.add(addQuotes(expandDoubleQuotes(
stripQuotes(args[argIndex++]))));
else
// not quoted, so make it all caps, then add
// quotes.
tableList.add(addQuotes(
expandDoubleQuotes(args[argIndex++].toUpperCase(
java.util.Locale.ENGLISH))));
}
if (tableList.size() == 0)
tableList = null;
return argIndex - 1;
}
/* ************************************************
* showVariables:
* Echo primary variables to output, so user can see
* what s/he specified.
****/
private void showVariables() {
if (ddlFileName != null) {
Logs.reportString("============================\n");
Logs.reportMessage("DBLOOK_FileCreation");
if (verbose)
writeVerboseOutput("DBLOOK_OutputLocation",
ddlFileName);
}
Logs.reportMessage("DBLOOK_Timestamp",
new Timestamp(System.currentTimeMillis()).toString());
Logs.reportMessage("DBLOOK_DBName", sourceDBName);
Logs.reportMessage("DBLOOK_DBUrl", sourceDBUrl);
if (tableList != null)
Logs.reportMessage("DBLOOK_TargetTables");
if (schemaParam != null)
Logs.reportMessage("DBLOOK_TargetSchema", stripQuotes(schemaParam));
Logs.reportString("appendLogs: " + appendLogs + "\n");
return;
}
/* ************************************************
* go:
* Connect to the source database, prepare statements,
* and load a list of table id-to-name mappings. Then,
* generate the DDL for the various objects in the
* database by making calls to static methods of helper
* classes (one helper class for each type of database
* object). If a particular object type should not be
* generated (because of the user-specified command-
* line), then we enforce that here.
* @precondition all user-specified parameters have
* been loaded.
* @return DDL for the source database has been
* generated and printed to output, subject to
* user-specified restrictions.
* ****/
private void go()
throws Exception
{
try
{
// Connect to the database, prepare statements,
// and load id-to-name mappings.
this.conn = DriverManager.getConnection(sourceDBUrl);
prepForDump();
boolean at10_6 = atVersion( conn, 10, 6 );
boolean at10_9 = atVersion( conn, 10, 9 );
boolean at10_11 = atVersion(conn, 10, 11);
// Generate DDL.
// Start with schemas, since we might need them to
// exist for jars to load properly.
DB_Schema.doSchemas(this.conn,
(tableList != null) && (targetSchema == null));
if ( at10_6 ) { DB_Sequence.doSequences( conn ); }
if (tableList == null) {
// Don't do these if user just wants table-related objects.
DB_Jar.doJars(sourceDBName, this.conn, at10_9);
DB_Alias.doPFAU(this.conn, at10_6 );
}
DB_Table.doTables(this.conn, tableIdToNameMap);
DB_Index.doIndexes(this.conn);
DB_Alias.doSynonyms(this.conn);
DB_Key.doKeys(this.conn);
DB_Check.doChecks(this.conn);
if (!skipViews)
DB_View.doViews(this.conn);
DB_Trigger.doTriggers(this.conn, at10_11);
DB_Roles.doRoles(this.conn);
DB_GrantRevoke.doAuthorizations(this.conn, at10_6);
// That's it; we're done.
if (getColNameFromNumberQuery != null)
getColNameFromNumberQuery.close();
Logs.cleanup();
}
catch (SQLException sqlE)
{
Logs.debug(sqlE);
Logs.debug(Logs.unRollExceptions(sqlE), (String)null);
Logs.cleanup();
return;
}
catch (Exception e)
{
Logs.debug(e);
Logs.cleanup();
return;
}
finally {
// Close our connection.
if (conn != null) {
conn.commit();
conn.close();
}
}
}
/* ************************************************
* prepForDump:
* Prepare any useful statements (i.e. statements that
* are required by more than one helper class) and load
* the id-to-name mappings for the source database.
****/
private void prepForDump() throws Exception {
// We're only SELECTing throughout all of this, so no need
// to commit (plus, disabling commit makes it easier to
// have multiple ResultSets open on the same connection).
this.conn.setAutoCommit(false);
// Set the system schema to ensure that UCS_BASIC collation is used.
Statement stmt = conn.createStatement();
stmt.executeUpdate("SET SCHEMA SYS");
// Prepare statements.
getColNameFromNumberQuery = conn.prepareStatement(
"SELECT COLUMNNAME FROM SYS.SYSCOLUMNS WHERE " +
"REFERENCEID = ? AND COLUMNNUMBER = ?");
// Load list of user tables and table ids, for general use.
ResultSet rs = stmt.executeQuery("SELECT T.TABLEID, T.TABLENAME, " +
"S.SCHEMANAME FROM SYS.SYSTABLES T, SYS.SYSSCHEMAS S " +
"WHERE T.TABLETYPE = 'T' AND T.SCHEMAID = S.SCHEMAID");
while (rs.next()) {
String tableName = addQuotes(expandDoubleQuotes(rs.getString(2)));
String schemaName = addQuotes(expandDoubleQuotes(rs.getString(3)));
tableIdToNameMap.put(rs.getString(1),
schemaName + "." + tableName);
}
// Load schema id's and names.
rs = stmt.executeQuery("SELECT SCHEMAID, SCHEMANAME FROM " +
"SYS.SYSSCHEMAS");
while (rs.next()) {
schemaMap.put(rs.getString(1),
addQuotes(expandDoubleQuotes(rs.getString(2))));
}
// Check if sqlAuthorization mode is on. If so, need to generate
// authorization statements.
rs = stmt.executeQuery("VALUES SYSCS_UTIL.SYSCS_GET_DATABASE_PROPERTY" +
"('derby.database.sqlAuthorization')");
if (rs.next())
{
String sqlAuth = rs.getString(1);
if (Boolean.valueOf(sqlAuth).booleanValue())
sqlAuthorization = true;
}
stmt.close();
// Load default property values.
return;
}
/* ************************************************
* getColumnListFromDescription:
* Takes string description of column numbers in the
* form of "(2, 1, 3...)" and the id of the table
* having those columns, and then returns a string
* with the column numbers replaced by their actual
* names ('2' is replaced with the 2nd column in the
* table, '1' with the first column, etc.).
* @param tableId the id of the table to which the column
* numbers should be applied.
* @param description a string holding a list of column
* numbers, enclosed in parentheses and separated
* by commas.
* @return a new string with the column numbers in
* 'description' replaced by their column names;
* also, the parentheses have been stripped off.
****/
public static String getColumnListFromDescription(String tableId,
String description) throws SQLException
{
StringBuffer sb = new StringBuffer();
StringTokenizer tokenizer = new StringTokenizer(
description.substring(description.indexOf("(") + 1,
description.lastIndexOf(")")), " ,", true);
boolean firstCol = true;
while (tokenizer.hasMoreTokens()) {
String tok = tokenizer.nextToken().trim();
if (tok.equals(""))
continue;
else if (tok.equals(",")) {
firstCol = false;
continue;
}
try {
String colName = getColNameFromNumber(tableId,
(Integer.parseInt(tok)));
if (!firstCol)
sb.append(", ");
sb.append(colName);
} catch (NumberFormatException e) {
// not a number; could be "ASC" or "DESC" tag,
// which is okay; otherwise, something's wrong.
tok = tok.toUpperCase();
if (tok.equals("DESC") || tok.equals("ASC"))
// then this is okay; just add the token to result.
sb.append(" " + tok);
else
// shouldn't happen.
Logs.debug("INTERNAL ERROR: read a non-number (" +
tok + ") when a column number was expected:\n" +
description, (String)null);
}
}
return sb.toString();
}
/* ************************************************
* getColNameFromNumber:
* Takes a tableid and a column number colNum, and
* returns the name of the colNum'th column in the
* table with tableid.
* @param tableid id of the table.
* @param colNum number of the column for which we want
* the name.
* @return The name of the colNum'th column in the
* table with tableid.
****/
public static String getColNameFromNumber(String tableId,
int colNum) throws SQLException
{
getColNameFromNumberQuery.setString(1, tableId);
getColNameFromNumberQuery.setInt(2, colNum);
ResultSet rs = getColNameFromNumberQuery.executeQuery();
if (!rs.next()) {
// shouldn't happen.
Logs.debug("INTERNAL ERROR: Failed column number " +
"lookup for table " + lookupTableId(tableId) +
", column " + colNum, (String)null);
rs.close();
return "";
}
else {
String colName = addQuotes(expandDoubleQuotes(rs.getString(1)));
rs.close();
return colName;
}
}
/* ************************************************
* addQuotes:
* Add quotes to the received object name, and return
* the result.
* @param name the name to which to add quotes.
* @return the name with double quotes around it.
****/
public static String addQuotes(String name) {
if (name == null)
return null;
return "\"" + name + "\"";
}
public static String addSingleQuotes(String name) {
if (name == null)
return null;
return "'" + name + "'";
}
/* ************************************************
* stripQuotes:
* Takes a name and, if the name is enclosed in
* quotes, strips the quotes off. This method
* assumes that the received String either has no quotes,
* or has a quote (double or single) as the very first
* AND very last character.
* @param quotedName a name with quotes as the first
* and last character, or else with no quotes at all.
* @return quotedName, without the quotes.
****/
public static String stripQuotes(String quotedName) {
if (quotedName == null)
return null;
if (!(quotedName.startsWith("'") || quotedName.startsWith("\"")))
// name doesn't _start_ with a quote, so we do nothing.
return quotedName;
if (!(quotedName.endsWith("'") || quotedName.endsWith("\"")))
// name doesn't _end_ with a quote, so we do nothing.
return quotedName;
// Remove starting and ending quotes.
return quotedName.substring(1, quotedName.length() - 1);
}
/* ************************************************
* isExcludedTable:
* Takes a table name and determines whether or not
* the DDL for objects related to that table should be
* generated.
* @param tableName name of the table to check.
* @return true if 1) the user specified a table list
* and that list does NOT include the received name; or
* 2) if the user specified a schema restriction and
* the received name does NOT have that schema; false
* otherwise.
****/
public static boolean isExcludedTable(String tableName) {
if (tableName == null)
return true;
int dot = tableName.indexOf(".");
if (dot != -1) {
// strip off the schema part of the name, and see if we're
// okay to use it.
if (isIgnorableSchema(tableName.substring(0, dot)))
// then we exclude this table.
return true;
tableName = tableName.substring(dot + 1,
tableName.length());
}
return ((tableList != null) && !tableList.contains(tableName));
}
/* ************************************************
* Takes a schema name and determines whether or
* not the DDL for objects with that schema should
* be generated.
* @param schemaName schema name to be checked.
* @return true if 1) the user specified a target
* schema and that target is NOT the same as the
* received schema name, or 2) the schema is a
* system schema (SYS, SYSVISUAL, or SYSIBM);
* false otherwise;
****/
private static final String[] ignorableSchemaNames = {
"SYSIBM",
"SYS",
"SYSVISUAL",
"SYSCAT",
"SYSFUN",
"SYSPROC",
"SYSSTAT",
"NULLID",
"SYSCS_ADMIN",
"SYSCS_DIAG",
"SYSCS_UTIL",
"SQLJ"};
public static boolean isIgnorableSchema(String schemaName) {
if ((targetSchema != null) && (!schemaName.equals(targetSchema)))
return true;
schemaName = stripQuotes(schemaName);
boolean ret = false;
for (int i = ignorableSchemaNames.length - 1; i >= 0;)
{
if ((ret = ignorableSchemaNames[i--].equalsIgnoreCase(schemaName)))
break;
}
return(ret);
}
/* ************************************************
* Takes a string and determines whether or not that
* string makes reference to any of the table names
* in the user-specified table list.
* @param str The string in which to search for table names.
* @return true if 1) the user didn't specify a
* target table list, or 2) the received string
* contains at least one of the table names in the
* user-specified target list; false otherwise.
****/
public static boolean stringContainsTargetTable(String str) {
if (str == null)
// if the string is null, it can't possibly contain
// any table names.
return false;
if (tableList == null)
// if we have no target tables, then default to true.
return true;
int strLen = str.length();
for (int i = 0; i < tableList.size(); i++) {
String tableName = tableList.get(i);
tableName = expandDoubleQuotes(stripQuotes(tableName));
int nameLen = tableName.length();
String strCopy;
if (tableName.equals(tableName.toUpperCase(
java.util.Locale.ENGLISH)))
// case doesn't matter.
strCopy = str.toUpperCase();
else
strCopy = str;
int pos = strCopy.indexOf(tableName);
while (pos != -1) {
// If we found it, make sure it's really a match.
// First, see if it's part of another word.
if (!partOfWord(str, pos, nameLen, strLen)) {
// See if the match is in quotes--if so, then
// it should match the table name's case.
if ((pos >= 1) && (strCopy.charAt(pos-1) == '"') &&
(pos + nameLen < strCopy.length()) &&
(strCopy.charAt(pos+nameLen) == '"'))
{ // match is quoted; check it's case.
if (str.substring(pos,
pos + nameLen).equals(tableName))
// everything checks out.
return true;
}
else
// match isn't quoted, so we're okay as is.
return true;
}
pos = str.indexOf(tableName, pos + nameLen);
}
}
// If we get here, we didn't find it.
return false;
}
/* ************************************************
* partOfWord:
* Returns true if the part of the string given by
* str.substring(pos, pos + nameLen) is part of
* another word.
* @param str The string in which we're looking.
* @param pos The position at which the substring in
* question begins.
* @param nameLen the length of the substring in
* question.
* @param strLen The length of the string in which
* we're looking.
* @return true if the substring from pos to
* pos+nameLen is part of larger word (i.e.
* if it has a letter/digit immediately before
* or after); false otherwise.
****/
private static boolean partOfWord (String str,
int pos, int nameLen, int strLen)
{
boolean somethingBefore = false;
if (pos > 0) {
char c = str.charAt(pos-1);
somethingBefore = ((c == '_') ||
Character.isLetterOrDigit(c));
}
boolean somethingAfter = false;
if (pos + nameLen < strLen) {
char c = str.charAt(pos + nameLen);
somethingAfter = ((c == '_') ||
Character.isLetterOrDigit(c));
}
return (somethingBefore || somethingAfter);
}
/* ************************************************
* expandDoubleQuotes:
* If the received SQL id contains a quote, we have
* to expand it into TWO quotes so that it can be
* treated correctly at parse time.
* @param name Id that we want to print.
****/
public static String expandDoubleQuotes(String name) {
if ((name == null) || (name.indexOf("\"") < 0))
// nothing to do.
return name;
char [] cA = name.toCharArray();
// Worst (and extremely unlikely) case is every
// character is a double quote, which means the
// escaped string would need to be 2 times as long.
char [] result = new char[2*cA.length];
int j = 0;
for (int i = 0; i < cA.length; i++) {
if (cA[i] == '"') {
result[j++] = '"';
result[j++] = '"';
}
else
result[j++] = cA[i];
}
return new String(result, 0, j);
}
/**
* inverse of expandDoubleQuotes
*/
public static String unExpandDoubleQuotes(String name) {
if ((name == null) || (name.indexOf("\"") < 0))
// nothing to do.
return name;
char [] cA = name.toCharArray();
char [] result = new char[cA.length];
int j = 0;
for (int i = 0; i < cA.length; i++) {
if (cA[i] == '"') {
result[j++] = cA[i];
j++; // skip next char which must be " also
}
else
result[j++] = cA[i];
}
return new String(result, 0, j);
}
/* ************************************************
* lookupSchemaId:
* Return the schema name corresponding to the
* received schema id.
* @param schemaId The id to look up.
* @return the schema name.
****/
public static String lookupSchemaId(String schemaId) {
return schemaMap.get(schemaId);
}
/* ************************************************
* lookupTableId:
* Return the table name corresponding to the
* received table id.
* @param tableId The id to look up.
* @return the table name.
****/
public static String lookupTableId(String tableId) {
return tableIdToNameMap.get(tableId);
}
/* ************************************************
* writeVerboseOutput:
* Writes the received string as "verbose" output,
* meaning that we write it to System.err. We
* choose System.err so that the string doesn't
* show up if the user pipes dblook output to
* a file (unless s/he explicitly pipes System.err
* output to that file, as well).
* @param key Key for the message to be printed as
* verbose output.
* @param value Value to be substituted into the
* message.
* @return message for received key has been printed
* to System.err.
****/
public static void writeVerboseOutput(String key,
String value) {
if (value == null)
System.err.println(lookupMessage(key));
else
System.err.println(lookupMessage(key,
new String [] {value}));
return;
}
/* ************************************************
* lookupMessage:
* Retrieve a localized message.
* @param key The key for the localized message.
* @return the message corresponding to the received
* key.
****/
public static String lookupMessage(String key) {
return lookupMessage(key, null);
}
/* ************************************************
* lookupMessage:
* Retreive a localized message.
* @param key The key for the localized message.
* @param vals Array of values to be used in the
* message.
* @return the message corresponding to the received
* key, with the received values substituted where
* appropriate.
****/
public static String lookupMessage(String key, String[] vals) {
String msg = "";
if (vals == null)
msg = langUtil.getTextMessage(key);
else {
switch (vals.length) {
case 1: msg = langUtil.getTextMessage(
key, vals[0]);
break;
case 2: msg = langUtil.getTextMessage(
key, vals[0], vals[1]);
break;
default: /* shouldn't happen */
break;
}
}
return msg;
}
/* ************************************************
* removeNewlines:
* Remove any newline characters from the received
* string (replace them with spaces).
* @param str The string from which we are removing
* all newline characters.
* @return The string, with all newline characters
* replaced with spaces.
****/
public static String removeNewlines(String str) {
if (str == null)
// don't do anything.
return null;
StringBuffer result = null;
try {
BufferedReader strVal = new BufferedReader (new StringReader(str));
for (String txt = strVal.readLine(); txt != null;
txt = strVal.readLine())
{
if (result == null)
result = new StringBuffer(txt);
else {
result.append(" ");
result.append(txt);
}
}
return result.toString();
} catch (Exception e) {
// if something went wrong, just return the string as is--
// worst case is that the generated DDL is correct, it just
// can't be run in some SQL script apps (because of the newline
// characters).
return str;
}
}
/**
* Return true if we are at 10.6 or later.
*/
private static boolean atVersion( Connection conn, int major, int minor ) throws SQLException
{
PreparedStatement ps = null;
ResultSet rs = null;
try {
ps = conn.prepareStatement( "values syscs_util.syscs_get_database_property('DataDictionaryVersion')" );
rs = ps.executeQuery();
rs.next();
String versionString = rs.getString( 1 );
int dotIdx = versionString.indexOf( '.' );
int actualMajor = Integer.parseInt( versionString.substring( 0, dotIdx ) );
int actualMinor = Integer.parseInt( versionString.substring( dotIdx + 1, versionString.length() ) );
if ( actualMajor > major ) { return true; }
if ( actualMajor < major ) { return false; }
boolean result = ( actualMinor >= minor );
return result;
}
finally
{
if ( rs != null ) { rs.close(); }
if ( ps != null ) { ps.close(); }
}
}
}
|
apache/royale-compiler | 36,383 | compiler/src/main/java/org/apache/royale/compiler/internal/tree/mxml/MXMLClassReferenceNodeBase.java | /*
*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package org.apache.royale.compiler.internal.tree.mxml;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.apache.royale.compiler.constants.IASLanguageConstants;
import org.apache.royale.compiler.constants.IMXMLCoreConstants;
import org.apache.royale.compiler.constants.IMetaAttributeConstants;
import org.apache.royale.compiler.definitions.IClassDefinition;
import org.apache.royale.compiler.definitions.IDefinition;
import org.apache.royale.compiler.definitions.IEffectDefinition;
import org.apache.royale.compiler.definitions.IEventDefinition;
import org.apache.royale.compiler.definitions.ISetterDefinition;
import org.apache.royale.compiler.definitions.IStyleDefinition;
import org.apache.royale.compiler.definitions.IVariableDefinition;
import org.apache.royale.compiler.internal.definitions.ClassDefinition;
import org.apache.royale.compiler.internal.mxml.MXMLDialect;
import org.apache.royale.compiler.internal.mxml.MXMLTagData;
import org.apache.royale.compiler.internal.mxml.MXMLTextData;
import org.apache.royale.compiler.internal.parsing.SourceFragment;
import org.apache.royale.compiler.internal.parsing.mxml.MXMLToken;
import org.apache.royale.compiler.internal.projects.RoyaleProject;
import org.apache.royale.compiler.internal.scopes.ASProjectScope;
import org.apache.royale.compiler.internal.tree.as.NodeBase;
import org.apache.royale.compiler.mxml.IMXMLTagAttributeData;
import org.apache.royale.compiler.mxml.IMXMLTagData;
import org.apache.royale.compiler.mxml.IMXMLTextData;
import org.apache.royale.compiler.mxml.IMXMLUnitData;
import org.apache.royale.compiler.parsing.MXMLTokenTypes;
import org.apache.royale.compiler.problems.ICompilerProblem;
import org.apache.royale.compiler.problems.MXMLDuplicateChildTagProblem;
import org.apache.royale.compiler.problems.MXMLUnresolvedTagProblem;
import org.apache.royale.compiler.projects.ICompilerProject;
import org.apache.royale.compiler.tree.ASTNodeID;
import org.apache.royale.compiler.tree.as.IASNode;
import org.apache.royale.compiler.tree.mxml.IMXMLClassReferenceNode;
import org.apache.royale.compiler.tree.mxml.IMXMLEventSpecifierNode;
import org.apache.royale.compiler.tree.mxml.IMXMLNode;
import org.apache.royale.compiler.tree.mxml.IMXMLPropertySpecifierNode;
import org.apache.royale.compiler.tree.mxml.IMXMLSpecifierNode;
/**
* {@code MXMLClassReferenceNodeBase} is the abstract base class for AST nodes
* that represent MXML tags which map to ActionScript classes
* (either as instances of those classes
* or as definitions of subclasses of those classes).
*/
abstract class MXMLClassReferenceNodeBase extends MXMLNodeBase implements IMXMLClassReferenceNode
{
/**
* Constructor
*
* @param parent The parent node of this node, or <code>null</code> if there
* is no parent.
*/
MXMLClassReferenceNodeBase(NodeBase parent)
{
super(parent);
}
/**
* The class definition to which this node refers. For example,
* <code><s:Button></code> typically refers to the class definition
* for <code>spark.components.Button</code>. An {@code MXMLInstanceNode}
* creates an instance of this class, while an
* {@code MXMLClassDefinitionNode} declares a subclass of this class.
*/
private IClassDefinition classReference;
/**
* A flag that keeps track of whether the node represents a class that
* implements mx.core.IMXML.
*/
private boolean isMXMLObject = false;
/**
* A flag that keeps track of whether the node represents an MX container
* (i.e., an mx.core.IContainer).
*/
private boolean isContainer = false;
/**
* A flag that keeps track of whether the node represents a visual element
* container (i.e., an mx.core.IVisualElementContainer).
*/
private boolean isVisualElementContainer = false;
/**
* A flag that keeps track of whether the node represents a UIComponent
* supporting deferred instantiation.
*/
private boolean isDeferredInstantiationUIComponent = false;
/**
* The child nodes of this node. For {@code MXMLInstanceNode} the children
* will all be property/event/style specifiers. For
* {@code MXMLClassDefinitionNode} the children may include other nodes such
* as {@MXMLMetadataNode}, {@MXMLScriptNode
* }, etc. If there are no children, this will be null.
*/
private IMXMLNode[] children;
/**
* A map of the child nodes of this node which specify properties. The keys
* are the property names. If there are no properties specified, this will
* be null.
*/
private Map<String, IMXMLPropertySpecifierNode> propertyNodeMap;
/**
* All child property nodes. The propertyNodeMap only has the last value
* specified for a property name. There can be more than one value
* specified if there is different values for different states.
*/
private List<IMXMLPropertySpecifierNode> allPropertyNodes;
/**
* A map of child nodes of this node which specify events. The keys are the
* event names. If there are no events specified, this will be null.
*/
private Map<String, IMXMLEventSpecifierNode> eventNodeMap;
/**
* All child event nodes. The eventNodeMap only has the last value
* specified for a event name. There can be more than one value
* specified if there is different values for different states.
*/
private List<IMXMLEventSpecifierNode> allEventNodes;
/**
* A map of suffix (specifying a state or state group) to the child nodes
* with this suffix.
*/
private Map<String, Collection<IMXMLSpecifierNode>> suffixSpecifierMap;
/**
* The definition of the default property. This gets lazily initialized by
* {@code getDefaultPropertyDefinition()} if we need to know it.
*/
private IVariableDefinition defaultPropertyDefinition;
/**
* The definition of an alternate default property. This gets lazily initialized by
* {@code getDefaultPropertyDefinition()} if we need to know it.
*/
private IVariableDefinition altDefaultPropertyDefinition;
/**
* A flag that keeps track of whether the {@code defaultPropertyDefinition}
* field has been initialized. Simply checking whether it is
* <code>null</code> doesn't work, because <code>null</code> means
* "no default property" rather than "default property not determined yet".
*/
private boolean defaultPropertyDefinitionInitialized = false;
/**
* A flag that keeps track of whether we are processing a content unit for
* the default property. For example, you can have MXML like
*
* <pre>
* <Application>
* <width>100</width>
* <Button/>
* <Button/>
* <height>100</height>
* </Application>
* </pre>
*
* where the two <code>Button</code> tags specify an implicit array for the
* <code>mxmlContentFactory</code> property. This flag is set true on the
* first <code>Button</code> tag and then set back to false on the
* <code>height</code> tag.
*/
private boolean processingDefaultProperty = false;
/**
* A flag that keeps track of whether we have complete the processing of the
* content units for the default property, so that we don't process
* non-contiguous units.
*/
private boolean processedDefaultProperty = false;
/**
* The implicit node created to represent the default property.
*/
private MXMLPropertySpecifierNode defaultPropertyNode;
/**
* A list that accumulates content units for the default property.
*/
private List<IMXMLUnitData> defaultPropertyContentUnits;
@Override
public IASNode getChild(int i)
{
return children != null ? children[i] : null;
}
@Override
public int getChildCount()
{
return children != null ? children.length : 0;
}
@Override
public String getName()
{
// The classReference can be null when getName() is called from toString()
// in the debugger if the node is not yet fully initialized.
return classReference != null ? classReference.getQualifiedName() : "";
}
@Override
public IClassDefinition getClassReference(ICompilerProject project)
{
return classReference;
}
@Override
public boolean isMXMLObject()
{
return isMXMLObject;
}
@Override
public boolean isContainer()
{
return isContainer;
}
@Override
public boolean isVisualElementContainer()
{
return isVisualElementContainer;
}
@Override
public boolean isDeferredInstantiationUIComponent()
{
return isDeferredInstantiationUIComponent;
}
/**
* Sets the definition of the ActionScript class to which this node refers.
*/
void setClassReference(RoyaleProject project, IClassDefinition classReference)
{
this.classReference = classReference;
// TODO Optimize this by enumerating all interfaces one time.
// Keep track of whether the class implements mx.core.IMXML,
// because that affects code generation.
String mxmlObjectInterface = project.getMXMLObjectInterface();
isMXMLObject = classReference.isInstanceOf(mxmlObjectInterface, project);
// Keep track of whether the class implements mx.core.IVisualElementContainer,
// because that affects code generation.
String visualElementContainerInterface = project.getVisualElementContainerInterface();
isVisualElementContainer = classReference.isInstanceOf(visualElementContainerInterface, project);
// Keep track of whether the class implements mx.core.IContainer,
// because that affects code generation.
String containerInterface = project.getContainerInterface();
isContainer = classReference.isInstanceOf(containerInterface, project);
// Keep track of whether the class implements mx.core.IDeferredInstantiationUIComponent
// because that affects code generation.
String deferredInstantiationUIComponentInterface = project.getDeferredInstantiationUIComponentInterface();
isDeferredInstantiationUIComponent = classReference.isInstanceOf(deferredInstantiationUIComponentInterface, project);
}
/**
* Sets the definition of the ActionScript class to which this node refers,
* from its fully qualified name.
*
* @param project An {@code ICompilerProject}, used for finding the class by
* name.
* @param qname A fully qualified class name.
*/
void setClassReference(RoyaleProject project, String qname)
{
ASProjectScope projectScope = (ASProjectScope)project.getScope();
IDefinition definition = projectScope.findDefinitionByName(qname);
// TODO This method is getting called by MXML tree-building
// with an interface qname if there is a property whose type is an interface.
// Until databinding is implemented, we need to protect against this.
if (definition instanceof IClassDefinition)
setClassReference(project, (IClassDefinition)definition);
}
/**
* Sets the child nodes of this node.
*
* @param children An array of {@code IMXMLNode} objects.
*/
void setChildren(IMXMLNode[] children)
{
this.children = children;
if (children != null)
{
for (IMXMLNode child : children)
{
if (child instanceof IMXMLPropertySpecifierNode)
{
if (propertyNodeMap == null)
{
propertyNodeMap = new HashMap<String, IMXMLPropertySpecifierNode>();
allPropertyNodes = new ArrayList<IMXMLPropertySpecifierNode>();
}
propertyNodeMap.put(child.getName(), (IMXMLPropertySpecifierNode)child);
allPropertyNodes.add((IMXMLPropertySpecifierNode)child);
}
else if (child instanceof IMXMLEventSpecifierNode)
{
if (eventNodeMap == null)
{
eventNodeMap = new HashMap<String, IMXMLEventSpecifierNode>();
allEventNodes = new ArrayList<IMXMLEventSpecifierNode>();
}
eventNodeMap.put(child.getName(), (IMXMLEventSpecifierNode)child);
allEventNodes.add((IMXMLEventSpecifierNode)child);
}
if (child instanceof IMXMLSpecifierNode)
{
if (suffixSpecifierMap == null)
suffixSpecifierMap = new HashMap<String, Collection<IMXMLSpecifierNode>>();
// suffixSpecifierMap.put(((IMXMLSpecifierNode)child).getSuffix(),
// (IMXMLSpecifierNode)child);
}
}
}
}
@Override
public IMXMLPropertySpecifierNode getPropertySpecifierNode(String name)
{
return propertyNodeMap != null ? propertyNodeMap.get(name) : null;
}
@Override
public IMXMLPropertySpecifierNode[] getPropertySpecifierNodes()
{
return allPropertyNodes != null ?
allPropertyNodes.toArray(new IMXMLPropertySpecifierNode[0]) :
null;
}
@Override
public IMXMLEventSpecifierNode getEventSpecifierNode(String name)
{
return eventNodeMap != null ? eventNodeMap.get(name) : null;
}
@Override
public IMXMLEventSpecifierNode[] getEventSpecifierNodes()
{
return allEventNodes != null ?
allEventNodes.toArray(new IMXMLEventSpecifierNode[0]) :
null;
}
@Override
public IMXMLSpecifierNode[] getSpecifierNodesWithSuffix(String suffix)
{
return suffixSpecifierMap != null ?
suffixSpecifierMap.get(suffix).toArray(new IMXMLSpecifierNode[0]) :
null;
}
@Override
protected MXMLNodeInfo createNodeInfo(MXMLTreeBuilder builder)
{
return new MXMLNodeInfo(builder);
}
@Override
protected void processTagSpecificAttribute(MXMLTreeBuilder builder, IMXMLTagData tag,
IMXMLTagAttributeData attribute,
MXMLNodeInfo info)
{
MXMLSpecifierNodeBase childNode = createSpecifierNode(builder, attribute.getName());
if (childNode != null)
{
childNode.setLocation(attribute);
childNode.setSuffix(builder, attribute.getStateName());
childNode.initializeFromAttribute(builder, attribute, info);
info.addChildNode(childNode);
}
else
{
super.processTagSpecificAttribute(builder, tag, attribute, info);
}
}
@Override
protected void processChildTag(MXMLTreeBuilder builder, IMXMLTagData tag,
IMXMLTagData childTag,
MXMLNodeInfo info)
{
if (info.hasSpecifierWithName(childTag.getShortName(), childTag.getStateName()))
{
ICompilerProblem problem = new MXMLDuplicateChildTagProblem(childTag);
builder.addProblem(problem);
return ;
}
RoyaleProject project = builder.getProject();
// Handle child tags that are property/style/event specifiers.
MXMLSpecifierNodeBase childNode = null;
// ...but only if the child has the same prefix as the parent -JT
// apache/royale-compiler#101
if(tag.getPrefix().equals(childTag.getPrefix()))
{
childNode = createSpecifierNode(builder, childTag.getShortName());
}
if (childNode != null)
{
// This tag is not part of the default property value.
processNonDefaultPropertyContentUnit(builder, info, tag);
childNode.setSuffix(builder, childTag.getStateName());
childNode.initializeFromTag(builder, childTag);
info.addChildNode(childNode);
}
else if (builder.getFileScope().isScriptTag(childTag) &&
(builder.getMXMLDialect().isEqualToOrBefore(MXMLDialect.MXML_2009)))
{
// In MXML 2006 and 2009, allow a <Script> tag
// inside any class reference tag
if (!processingDefaultProperty)
{
// Not processing the default property, just make a script
// node and put it in the tree.
MXMLScriptNode scriptNode = new MXMLScriptNode(this);
scriptNode.initializeFromTag(builder, childTag);
info.addChildNode(scriptNode);
}
else
{
// We are processing a default property. Script nodes need
// to be a child of that default specifier nodes so that
// finding a node by offset works properly.
// See: http://bugs.adobe.com/jira/browse/CMP-955
processDefaultPropertyContentUnit(builder, childTag, info);
}
}
else if (builder.getFileScope().isReparentTag(childTag))
{
MXMLReparentNode reparentNode = new MXMLReparentNode(this);
reparentNode.initializeFromTag(builder, childTag);
info.addChildNode(reparentNode);
}
else
{
IDefinition definition = builder.getFileScope().resolveTagToDefinition(childTag);
if (definition instanceof ClassDefinition)
{
// Handle child tags that are instance tags.
IVariableDefinition defaultPropertyDefinition = getDefaultPropertyDefinition(builder);
if (defaultPropertyDefinition != null)
{
if (processedDefaultProperty)
{
MXMLDuplicateChildTagProblem problem = new MXMLDuplicateChildTagProblem(childTag);
problem.childTag = defaultPropertyDefinition.getBaseName();
problem.element = tag.getShortName();
builder.addProblem(problem);
return ;
}
else
{
// Since there is a default property and we haven't already processed it,
// assume this child instance tag is part of its value.
processDefaultPropertyContentUnit(builder, childTag, info);
}
}
else
{
// This tag is not part of the default property value.
processNonDefaultPropertyContentUnit(builder, info, tag);
MXMLInstanceNode instanceNode = MXMLInstanceNode.createInstanceNode(
builder, definition.getQualifiedName(), this);
instanceNode.setClassReference(project, (IClassDefinition)definition); // TODO Move this logic to initializeFromTag().
instanceNode.initializeFromTag(builder, childTag);
info.addChildNode(instanceNode);
}
}
else
{
IVariableDefinition defaultPropertyDefinition = getDefaultPropertyDefinition(builder);
if (defaultPropertyDefinition != null && !processedDefaultProperty && defaultPropertyDefinition.getBaseName().equals("text"))
{
String uri = childTag.getURI();
if (uri != null && uri.equals("http://www.w3.org/1999/xhtml"))
{
IVariableDefinition htmlDef = (IVariableDefinition)project.resolveSpecifier(classReference, "html");
if (htmlDef != null)
{
defaultPropertyDefinition = this.defaultPropertyDefinition = htmlDef;
processDefaultPropertyContentUnit(builder, childTag, info);
// seems strange we have to finish default property processing
// by calling nonDefaultProperty code
processNonDefaultPropertyContentUnit(builder, info, tag);
return;
}
}
}
else if (altDefaultPropertyDefinition != null && !processedDefaultProperty && altDefaultPropertyDefinition.getBaseName().equals("innerHTML"))
{
String uri = childTag.getURI();
if (uri != null && uri.equals("library://ns.apache.org/royale/html"))
{
IVariableDefinition textDef = (IVariableDefinition)project.resolveSpecifier(classReference, "innerHTML");
if (textDef != null)
{
List<IMXMLNode> nodes = info.getChildNodeList();
if (nodes.size() > 0)
{
IMXMLNode lastNode = nodes.get(nodes.size() - 1);
if (lastNode.getNodeID() == ASTNodeID.MXMLPropertySpecifierID)
{
MXMLPropertySpecifierNode propNode = (MXMLPropertySpecifierNode)lastNode;
String name = propNode.getName();
if (name.equals("innerHTML"))
{
/*
MXMLStringNode stringNode = (MXMLStringNode)propNode.getChild(0);
MXMLLiteralNode valueNode = (MXMLLiteralNode)stringNode.getChild(0);
String tagAsString = ((MXMLTagData)childTag).stringify();
String currentString = (String)valueNode.getValue();
MXMLLiteralNode newValueNode = new MXMLLiteralNode(stringNode,
currentString + tagAsString);
IMXMLNode[] newChildren = new IMXMLNode[1];
newChildren[0] = newValueNode;
stringNode.setChildren(newChildren);
stringNode.setExpressionNode(newValueNode);
*/
SourceFragment[] sourceFragments = new SourceFragment[1];
String tagAsString = ((MXMLTagData)childTag).stringify();
SourceFragment sourceFragment = new SourceFragment(tagAsString, tagAsString, childTag.getLocationOfChildUnits());
sourceFragments[0] = sourceFragment;
info.addSourceFragments(childTag.getSourcePath(), sourceFragments);
}
}
}
else
{
childNode = createSpecifierNode(builder, "innerHTML");
if (childNode != null)
{
childNode.setSuffix(builder, childTag.getStateName());
String tagAsString = ((MXMLTagData)childTag).stringify();
String tagAsCData = IMXMLCoreConstants.cDataStart + tagAsString + IMXMLCoreConstants.cDataEnd;
MXMLToken token = new MXMLToken(MXMLTokenTypes.TOKEN_CDATA,
childTag.getStart(), childTag.getEnd(),
childTag.getLine(), childTag.getColumn(),
tagAsCData);
MXMLTextData text = new MXMLTextData(token);
text.setSourceLocation(childTag.getLocationOfChildUnits());
childNode.initializeFromText(builder, text, info);
info.addChildNode(childNode);
}
}
return;
}
}
}
if (processingDefaultProperty && definition == null)
{
builder.getProblems().add(new MXMLUnresolvedTagProblem(childTag));
return;
}
// Handle child tags that are something other than property/style/event tags
// or instance tags.
// This tag is not part of the default property value.
processNonDefaultPropertyContentUnit(builder, info, tag);
super.processChildTag(builder, tag, childTag, info);
}
}
}
/**
* Determines, and caches, the default property for the class to which this
* node refers.
*/
private IVariableDefinition getDefaultPropertyDefinition(MXMLTreeBuilder builder)
{
if (!defaultPropertyDefinitionInitialized)
{
RoyaleProject project = builder.getProject();
String defaultPropertyName = classReference.getDefaultPropertyName(project);
if (defaultPropertyName != null)
{
if (defaultPropertyName.contains("|"))
{
int c = defaultPropertyName.indexOf("|");
String alt = defaultPropertyName.substring(c + 1);
defaultPropertyName = defaultPropertyName.substring(0, c);
altDefaultPropertyDefinition = (IVariableDefinition)project.resolveSpecifier(classReference, alt);
}
defaultPropertyDefinition =
(IVariableDefinition)project.resolveSpecifier(classReference, defaultPropertyName);
}
defaultPropertyDefinitionInitialized = true;
}
return defaultPropertyDefinition;
}
/**
* Called on each content unit that is part of the default value.
*/
private void processDefaultPropertyContentUnit(MXMLTreeBuilder builder,
IMXMLTagData childTag,
MXMLNodeInfo info)
{
// If this gets called and we're not already processing the default property,
// then childTag is the first child tag of the default property value.
if (!processingDefaultProperty)
{
processingDefaultProperty = true;
String defaultPropertyName = getDefaultPropertyDefinition(builder).getBaseName();
// Create an implicit MXMLPropertySpecifierNode for the default property,
// at the correct location in the child list.
defaultPropertyNode =
(MXMLPropertySpecifierNode)createSpecifierNode(builder, defaultPropertyName);
info.addChildNode(defaultPropertyNode);
// Create a list in which we'll accumulate the tags for the default property.
defaultPropertyContentUnits = new ArrayList<IMXMLUnitData>(1);
}
defaultPropertyContentUnits.add((IMXMLUnitData)childTag);
}
/**
* Called on each content unit that is not part of the default value.
*/
private void processNonDefaultPropertyContentUnit(MXMLTreeBuilder builder, MXMLNodeInfo info, IMXMLTagData parentTag)
{
// If this gets called and we're processing the default property,
// then childTag is the first child tag after the default property value tags.
if (processingDefaultProperty)
{
processingDefaultProperty = false;
processedDefaultProperty = true;
assert defaultPropertyContentUnits.size() > 0;
assert !builder.getFileScope().isScriptTag(defaultPropertyContentUnits.get(0)) : "First default property content unit must not be a script tag!";
// We've accumulated all the default property child tags
// in defaultPropertyChildTags. Use them to initialize
// the defaultPropertyNode.
// But first find all the trailing script tags
// and remove those from the list of default
// property content units.
// Script tags are put in the defaultPropertyContentUnits collection
// to fix http://bugs.adobe.com/jira/browse/CMP-955.
int lastNonScriptTagIndex;
for (lastNonScriptTagIndex = (defaultPropertyContentUnits.size() - 1); lastNonScriptTagIndex > 0; --lastNonScriptTagIndex)
{
IMXMLUnitData unitData = defaultPropertyContentUnits.get(lastNonScriptTagIndex);
if (!builder.getFileScope().isScriptTag(unitData))
break;
}
assert lastNonScriptTagIndex >= 0;
assert lastNonScriptTagIndex < defaultPropertyContentUnits.size();
List<IMXMLUnitData> trailingScriptTags = defaultPropertyContentUnits.subList(lastNonScriptTagIndex + 1, defaultPropertyContentUnits.size());
List<IMXMLUnitData> defaultPropertyContentUnitsWithoutTrailingScriptTags =
defaultPropertyContentUnits.subList(0, lastNonScriptTagIndex + 1);
// process the default property content units with the trailing
// script tags removed.
IVariableDefinition defaultPropertyDefinition =
getDefaultPropertyDefinition(builder);
defaultPropertyNode.initializeDefaultProperty(
builder, defaultPropertyDefinition, parentTag, defaultPropertyContentUnitsWithoutTrailingScriptTags);
// Now create MXMLScriptNode's for all the trailing script tags.
for (IMXMLUnitData scriptTagData : trailingScriptTags)
{
assert builder.getFileScope().isScriptTag(scriptTagData);
MXMLScriptNode scriptNode = new MXMLScriptNode(this);
scriptNode.initializeFromTag(builder, (IMXMLTagData)scriptTagData);
info.addChildNode(scriptNode);
}
}
}
@Override
protected void processChildNonWhitespaceUnit(MXMLTreeBuilder builder, IMXMLTagData tag,
IMXMLTextData text,
MXMLNodeInfo info)
{
// Non-whitespace may be the value of a default property.
IVariableDefinition defaultPropertyDefinition = getDefaultPropertyDefinition(builder);
IVariableDefinition getterDefinition = (defaultPropertyDefinition instanceof ISetterDefinition) ?
((ISetterDefinition)defaultPropertyDefinition).resolveCorrespondingAccessor(builder.getProject()) :null;
if (defaultPropertyDefinition != null &&
(defaultPropertyDefinition.getTypeAsDisplayString().equals(IASLanguageConstants.String) ||
(defaultPropertyDefinition.getMetaTagByName(IMetaAttributeConstants.ATTRIBUTE_RICHTEXTCONTENT) != null) ||
(getterDefinition != null &&
(getterDefinition.getMetaTagByName(IMetaAttributeConstants.ATTRIBUTE_RICHTEXTCONTENT) != null))))
{
MXMLSpecifierNodeBase childNode =
createSpecifierNode(builder, defaultPropertyDefinition.getBaseName());
if (childNode != null)
{
childNode.initializeFromText(builder, text, info);
info.addChildNode(childNode);
}
}
else if (altDefaultPropertyDefinition != null && altDefaultPropertyDefinition.getTypeAsDisplayString().equals(IASLanguageConstants.String))
{
MXMLSpecifierNodeBase childNode =
createSpecifierNode(builder, altDefaultPropertyDefinition.getBaseName());
if (childNode != null)
{
childNode.initializeFromText(builder, text, info);
info.addChildNode(childNode);
}
}
else
{
super.processChildNonWhitespaceUnit(builder, tag, text, info);
}
}
/**
* Resolve the specifier name in the class definition to a member
* definition, and create a specifier node based on the member type.
*
* @param builder MXML tree builder.
* @param specifierName Specifier name.
* @return A MXML specifier node.
*/
protected MXMLSpecifierNodeBase createSpecifierNode(MXMLTreeBuilder builder, String specifierName)
{
MXMLSpecifierNodeBase specifierNode = null;
// Check if the attribute is a declared property, style, or event.
RoyaleProject project = builder.getProject();
IDefinition specifierDefinition = project.resolveSpecifier(classReference, specifierName);
if (specifierDefinition instanceof ISetterDefinition ||
specifierDefinition instanceof IVariableDefinition)
{
specifierNode = new MXMLPropertySpecifierNode(this);
}
else if (specifierDefinition instanceof IEventDefinition)
{
specifierNode = new MXMLEventSpecifierNode(this);
}
else if (specifierDefinition instanceof IStyleDefinition)
{
specifierNode = new MXMLStyleSpecifierNode(this);
}
else if (specifierDefinition instanceof IEffectDefinition)
{
specifierNode = new MXMLEffectSpecifierNode(this);
}
if (specifierNode != null)
{
specifierNode.setDefinition(specifierDefinition); // TODO Move this logic
}
// If not, dynamic classes allow new properties to be set via attributes.
else if (classReference.isDynamic())
{
specifierNode = new MXMLPropertySpecifierNode(this);
((MXMLPropertySpecifierNode)specifierNode).setDynamicName(specifierName); // TODO Move this logic
}
return specifierNode;
}
@Override
protected void initializationComplete(MXMLTreeBuilder builder, IMXMLTagData tag,
MXMLNodeInfo info)
{
super.initializationComplete(builder, tag, info);
// If the last child unit was part of the default property,
// we don't know to process the default property units
// until we get here.
processNonDefaultPropertyContentUnit(builder, info, tag);
setChildren(info.getChildNodeList().toArray(new IMXMLNode[0]));
// If the class references by this node implements mx.core.IContainer,
// add an expression dependency on mx.core.UIComponentDescriptor
// because we'll have to codegen descriptors.
if (isContainer)
{
RoyaleProject project = builder.getProject();
builder.addExpressionDependency(project.getUIComponentDescriptorClass());
}
}
/**
* For debugging only. Builds a string such as
* <code>"spark.components.Application"</code> from the qualified name of
* the class reference by the node.
*/
@Override
protected boolean buildInnerString(StringBuilder sb)
{
sb.append('"');
sb.append(getName());
sb.append('"');
return true;
}
}
|
google/guava | 36,242 | guava-tests/test/com/google/common/collect/ListsTest.java | /*
* Copyright (C) 2007 The Guava Authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.common.collect;
import static com.google.common.base.Preconditions.checkNotNull;
import static com.google.common.collect.Iterables.elementsEqual;
import static com.google.common.collect.Lists.cartesianProduct;
import static com.google.common.collect.Lists.charactersOf;
import static com.google.common.collect.Lists.computeArrayListCapacity;
import static com.google.common.collect.Lists.newArrayListWithExpectedSize;
import static com.google.common.collect.Lists.partition;
import static com.google.common.collect.Lists.transform;
import static com.google.common.collect.ReflectionFreeAssertThrows.assertThrows;
import static com.google.common.collect.testing.IteratorFeature.UNMODIFIABLE;
import static com.google.common.truth.Truth.assertThat;
import static java.lang.System.arraycopy;
import static java.util.Arrays.asList;
import static java.util.Collections.emptyList;
import static java.util.Collections.nCopies;
import static java.util.Collections.singletonList;
import com.google.common.annotations.GwtCompatible;
import com.google.common.annotations.GwtIncompatible;
import com.google.common.annotations.J2ktIncompatible;
import com.google.common.base.Function;
import com.google.common.base.Functions;
import com.google.common.collect.testing.IteratorTester;
import com.google.common.collect.testing.ListTestSuiteBuilder;
import com.google.common.collect.testing.TestStringListGenerator;
import com.google.common.collect.testing.features.CollectionFeature;
import com.google.common.collect.testing.features.CollectionSize;
import com.google.common.collect.testing.features.ListFeature;
import com.google.common.collect.testing.google.ListGenerators.CharactersOfCharSequenceGenerator;
import com.google.common.collect.testing.google.ListGenerators.CharactersOfStringGenerator;
import com.google.common.testing.NullPointerTester;
import com.google.common.testing.SerializableTester;
import java.io.Serializable;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Iterator;
import java.util.LinkedList;
import java.util.List;
import java.util.ListIterator;
import java.util.NoSuchElementException;
import java.util.RandomAccess;
import java.util.concurrent.CopyOnWriteArrayList;
import junit.framework.Test;
import junit.framework.TestCase;
import junit.framework.TestSuite;
import org.jspecify.annotations.NullMarked;
/**
* Unit test for {@code Lists}.
*
* @author Kevin Bourrillion
* @author Mike Bostock
* @author Jared Levy
*/
@GwtCompatible
@NullMarked
public class ListsTest extends TestCase {
private static final Collection<Integer> SOME_COLLECTION = asList(0, 1, 1);
private static final Iterable<Integer> SOME_ITERABLE = new SomeIterable();
private static final class RemoveFirstFunction implements Function<String, String>, Serializable {
@Override
public String apply(String from) {
return (from.length() == 0) ? from : from.substring(1);
}
}
private static class SomeIterable implements Iterable<Integer>, Serializable {
@Override
public Iterator<Integer> iterator() {
return SOME_COLLECTION.iterator();
}
@GwtIncompatible @J2ktIncompatible private static final long serialVersionUID = 0;
}
private static final List<Integer> SOME_LIST = Lists.newArrayList(1, 2, 3, 4);
private static final List<Integer> SOME_SEQUENTIAL_LIST = new LinkedList<>(asList(1, 2, 3, 4));
private static final List<String> SOME_STRING_LIST = asList("1", "2", "3", "4");
private static final Function<Number, String> SOME_FUNCTION = new SomeFunction();
private static class SomeFunction implements Function<Number, String>, Serializable {
@Override
public String apply(Number n) {
return String.valueOf(n);
}
@GwtIncompatible @J2ktIncompatible private static final long serialVersionUID = 0;
}
@J2ktIncompatible
@GwtIncompatible // suite
@AndroidIncompatible // test-suite builders
public static Test suite() {
TestSuite suite = new TestSuite();
suite.addTestSuite(ListsTest.class);
suite.addTest(
ListTestSuiteBuilder.using(
new TestStringListGenerator() {
@Override
protected List<String> create(String[] elements) {
String[] rest = new String[elements.length - 1];
arraycopy(elements, 1, rest, 0, elements.length - 1);
return Lists.asList(elements[0], rest);
}
})
.named("Lists.asList, 2 parameter")
.withFeatures(
CollectionSize.SEVERAL,
CollectionSize.ONE,
CollectionFeature.SERIALIZABLE,
CollectionFeature.ALLOWS_NULL_VALUES)
.createTestSuite());
suite.addTest(
ListTestSuiteBuilder.using(
new TestStringListGenerator() {
@Override
protected List<String> create(String[] elements) {
String[] rest = new String[elements.length - 2];
arraycopy(elements, 2, rest, 0, elements.length - 2);
return Lists.asList(elements[0], elements[1], rest);
}
})
.named("Lists.asList, 3 parameter")
.withFeatures(
CollectionSize.SEVERAL,
CollectionFeature.SERIALIZABLE,
CollectionFeature.ALLOWS_NULL_VALUES)
.createTestSuite());
Function<String, String> removeFirst = new RemoveFirstFunction();
suite.addTest(
ListTestSuiteBuilder.using(
new TestStringListGenerator() {
@Override
protected List<String> create(String[] elements) {
List<String> fromList = new ArrayList<>();
for (String element : elements) {
fromList.add("q" + checkNotNull(element));
}
return transform(fromList, removeFirst);
}
})
.named("Lists.transform, random access, no nulls")
.withFeatures(
CollectionSize.ANY,
ListFeature.REMOVE_OPERATIONS,
CollectionFeature.SERIALIZABLE,
CollectionFeature.ALLOWS_NULL_QUERIES)
.createTestSuite());
suite.addTest(
ListTestSuiteBuilder.using(
new TestStringListGenerator() {
@Override
protected List<String> create(String[] elements) {
List<String> fromList = new LinkedList<>();
for (String element : elements) {
fromList.add("q" + checkNotNull(element));
}
return transform(fromList, removeFirst);
}
})
.named("Lists.transform, sequential access, no nulls")
.withFeatures(
CollectionSize.ANY,
ListFeature.REMOVE_OPERATIONS,
CollectionFeature.SERIALIZABLE,
CollectionFeature.ALLOWS_NULL_QUERIES)
.createTestSuite());
suite.addTest(
ListTestSuiteBuilder.using(
new TestStringListGenerator() {
@Override
protected List<String> create(String[] elements) {
List<String> fromList = Lists.newArrayList(elements);
return transform(fromList, Functions.<String>identity());
}
})
.named("Lists.transform, random access, nulls")
.withFeatures(
CollectionSize.ANY,
ListFeature.REMOVE_OPERATIONS,
CollectionFeature.SERIALIZABLE,
CollectionFeature.ALLOWS_NULL_VALUES)
.createTestSuite());
suite.addTest(
ListTestSuiteBuilder.using(
new TestStringListGenerator() {
@Override
protected List<String> create(String[] elements) {
List<String> fromList = new LinkedList<>(asList(elements));
return transform(fromList, Functions.<String>identity());
}
})
.named("Lists.transform, sequential access, nulls")
.withFeatures(
CollectionSize.ANY,
ListFeature.REMOVE_OPERATIONS,
CollectionFeature.SERIALIZABLE,
CollectionFeature.ALLOWS_NULL_VALUES)
.createTestSuite());
suite.addTest(
ListTestSuiteBuilder.using(
new TestStringListGenerator() {
@Override
protected List<String> create(String[] elements) {
List<String> list = new ArrayList<>();
for (int i = elements.length - 1; i >= 0; i--) {
list.add(elements[i]);
}
return Lists.reverse(list);
}
})
.named("Lists.reverse[ArrayList]")
.withFeatures(
CollectionSize.ANY,
CollectionFeature.ALLOWS_NULL_VALUES,
ListFeature.GENERAL_PURPOSE)
.createTestSuite());
suite.addTest(
ListTestSuiteBuilder.using(
new TestStringListGenerator() {
@Override
protected List<String> create(String[] elements) {
String[] reverseElements = new String[elements.length];
for (int i = elements.length - 1, j = 0; i >= 0; i--, j++) {
reverseElements[j] = elements[i];
}
return Lists.reverse(asList(reverseElements));
}
})
.named("Lists.reverse[Arrays.asList]")
.withFeatures(
CollectionSize.ANY, CollectionFeature.ALLOWS_NULL_VALUES, ListFeature.SUPPORTS_SET)
.createTestSuite());
suite.addTest(
ListTestSuiteBuilder.using(
new TestStringListGenerator() {
@Override
protected List<String> create(String[] elements) {
List<String> list = new LinkedList<>();
for (int i = elements.length - 1; i >= 0; i--) {
list.add(elements[i]);
}
return Lists.reverse(list);
}
})
.named("Lists.reverse[LinkedList]")
.withFeatures(
CollectionSize.ANY,
CollectionFeature.ALLOWS_NULL_VALUES,
ListFeature.GENERAL_PURPOSE)
.createTestSuite());
suite.addTest(
ListTestSuiteBuilder.using(
new TestStringListGenerator() {
@Override
protected List<String> create(String[] elements) {
ImmutableList.Builder<String> builder = ImmutableList.builder();
for (int i = elements.length - 1; i >= 0; i--) {
builder.add(elements[i]);
}
return Lists.reverse(builder.build());
}
})
.named("Lists.reverse[ImmutableList]")
.withFeatures(CollectionSize.ANY, CollectionFeature.ALLOWS_NULL_QUERIES)
.createTestSuite());
suite.addTest(
ListTestSuiteBuilder.using(new CharactersOfStringGenerator())
.named("Lists.charactersOf[String]")
.withFeatures(
CollectionSize.ANY,
CollectionFeature.SERIALIZABLE,
CollectionFeature.ALLOWS_NULL_QUERIES)
.createTestSuite());
suite.addTest(
ListTestSuiteBuilder.using(new CharactersOfCharSequenceGenerator())
.named("Lists.charactersOf[CharSequence]")
.withFeatures(CollectionSize.ANY, CollectionFeature.ALLOWS_NULL_QUERIES)
.createTestSuite());
return suite;
}
public void testCharactersOfIsView() {
StringBuilder builder = new StringBuilder("abc");
List<Character> chars = charactersOf(builder);
assertEquals(asList('a', 'b', 'c'), chars);
builder.append("def");
assertEquals(asList('a', 'b', 'c', 'd', 'e', 'f'), chars);
builder.deleteCharAt(5);
assertEquals(asList('a', 'b', 'c', 'd', 'e'), chars);
}
public void testNewArrayListEmpty() {
@SuppressWarnings("UseCollectionConstructor") // test of factory method
ArrayList<Integer> list = Lists.newArrayList();
assertEquals(emptyList(), list);
}
public void testNewArrayListWithCapacity() {
ArrayList<Integer> list = Lists.newArrayListWithCapacity(0);
assertEquals(emptyList(), list);
ArrayList<Integer> bigger = Lists.newArrayListWithCapacity(256);
assertEquals(emptyList(), bigger);
}
public void testNewArrayListWithCapacity_negative() {
assertThrows(IllegalArgumentException.class, () -> Lists.newArrayListWithCapacity(-1));
}
public void testNewArrayListWithExpectedSize() {
ArrayList<Integer> list = newArrayListWithExpectedSize(0);
assertEquals(emptyList(), list);
ArrayList<Integer> bigger = newArrayListWithExpectedSize(256);
assertEquals(emptyList(), bigger);
}
public void testNewArrayListWithExpectedSize_negative() {
assertThrows(IllegalArgumentException.class, () -> newArrayListWithExpectedSize(-1));
}
public void testNewArrayListVarArgs() {
ArrayList<Integer> list = Lists.newArrayList(0, 1, 1);
assertEquals(SOME_COLLECTION, list);
}
public void testComputeArrayListCapacity() {
assertEquals(5, computeArrayListCapacity(0));
assertEquals(13, computeArrayListCapacity(8));
assertEquals(89, computeArrayListCapacity(77));
assertEquals(22000005, computeArrayListCapacity(20000000));
assertEquals(Integer.MAX_VALUE, computeArrayListCapacity(Integer.MAX_VALUE - 1000));
}
public void testNewArrayListFromCollection() {
@SuppressWarnings("UseCollectionConstructor") // test of factory method
ArrayList<Integer> list = Lists.newArrayList(SOME_COLLECTION);
assertEquals(SOME_COLLECTION, list);
}
public void testNewArrayListFromIterable() {
ArrayList<Integer> list = Lists.newArrayList(SOME_ITERABLE);
assertEquals(SOME_COLLECTION, list);
}
public void testNewArrayListFromIterator() {
ArrayList<Integer> list = Lists.newArrayList(SOME_COLLECTION.iterator());
assertEquals(SOME_COLLECTION, list);
}
public void testNewLinkedListEmpty() {
@SuppressWarnings("UseCollectionConstructor") // test of factory method
LinkedList<Integer> list = Lists.newLinkedList();
assertEquals(emptyList(), list);
}
public void testNewLinkedListFromCollection() {
@SuppressWarnings("UseCollectionConstructor") // test of factory method
LinkedList<Integer> list = Lists.newLinkedList(SOME_COLLECTION);
assertEquals(SOME_COLLECTION, list);
}
public void testNewLinkedListFromIterable() {
LinkedList<Integer> list = Lists.newLinkedList(SOME_ITERABLE);
assertEquals(SOME_COLLECTION, list);
}
@J2ktIncompatible
@GwtIncompatible // CopyOnWriteArrayList
public void testNewCOWALEmpty() {
CopyOnWriteArrayList<Integer> list = Lists.newCopyOnWriteArrayList();
assertEquals(emptyList(), list);
}
@J2ktIncompatible
@GwtIncompatible // CopyOnWriteArrayList
public void testNewCOWALFromIterable() {
CopyOnWriteArrayList<Integer> list = Lists.newCopyOnWriteArrayList(SOME_ITERABLE);
assertEquals(SOME_COLLECTION, list);
}
@J2ktIncompatible
@GwtIncompatible // NullPointerTester
public void testNullPointerExceptions() {
NullPointerTester tester = new NullPointerTester();
tester.testAllPublicStaticMethods(Lists.class);
}
/**
* This is just here to illustrate how {@code Arrays#asList} differs from {@code
* Lists#newArrayList}.
*/
public void testArraysAsList() {
List<String> ourWay = Lists.newArrayList("foo", "bar", "baz");
List<String> otherWay = asList("foo", "bar", "baz");
// They're logically equal
assertEquals(ourWay, otherWay);
// The result of Arrays.asList() is mutable
otherWay.set(0, "FOO");
assertEquals("FOO", otherWay.get(0));
// But it can't grow
assertThrows(UnsupportedOperationException.class, () -> otherWay.add("nope"));
// And it can't shrink
assertThrows(UnsupportedOperationException.class, () -> otherWay.remove(2));
}
@J2ktIncompatible
@GwtIncompatible // SerializableTester
public void testAsList1() {
List<String> list = Lists.asList("foo", new String[] {"bar", "baz"});
checkFooBarBazList(list);
SerializableTester.reserializeAndAssert(list);
assertTrue(list instanceof RandomAccess);
new IteratorTester<String>(
5, UNMODIFIABLE, asList("foo", "bar", "baz"), IteratorTester.KnownOrder.KNOWN_ORDER) {
@Override
protected Iterator<String> newTargetIterator() {
return Lists.asList("foo", new String[] {"bar", "baz"}).iterator();
}
}.test();
}
private void checkFooBarBazList(List<String> list) {
assertThat(list).containsExactly("foo", "bar", "baz").inOrder();
assertEquals(3, list.size());
assertIndexIsOutOfBounds(list, -1);
assertEquals("foo", list.get(0));
assertEquals("bar", list.get(1));
assertEquals("baz", list.get(2));
assertIndexIsOutOfBounds(list, 3);
}
public void testAsList1Small() {
List<String> list = Lists.asList("foo", new String[0]);
assertThat(list).contains("foo");
assertEquals(1, list.size());
assertIndexIsOutOfBounds(list, -1);
assertEquals("foo", list.get(0));
assertIndexIsOutOfBounds(list, 1);
assertTrue(list instanceof RandomAccess);
new IteratorTester<String>(
3, UNMODIFIABLE, singletonList("foo"), IteratorTester.KnownOrder.KNOWN_ORDER) {
@Override
protected Iterator<String> newTargetIterator() {
return Lists.asList("foo", new String[0]).iterator();
}
}.test();
}
public void testAsList2() {
List<String> list = Lists.asList("foo", "bar", new String[] {"baz"});
checkFooBarBazList(list);
assertTrue(list instanceof RandomAccess);
new IteratorTester<String>(
5, UNMODIFIABLE, asList("foo", "bar", "baz"), IteratorTester.KnownOrder.KNOWN_ORDER) {
@Override
protected Iterator<String> newTargetIterator() {
return Lists.asList("foo", "bar", new String[] {"baz"}).iterator();
}
}.test();
}
@J2ktIncompatible
@GwtIncompatible // SerializableTester
public void testAsList2Small() {
List<String> list = Lists.asList("foo", "bar", new String[0]);
assertThat(list).containsExactly("foo", "bar").inOrder();
assertEquals(2, list.size());
assertIndexIsOutOfBounds(list, -1);
assertEquals("foo", list.get(0));
assertEquals("bar", list.get(1));
assertIndexIsOutOfBounds(list, 2);
SerializableTester.reserializeAndAssert(list);
assertTrue(list instanceof RandomAccess);
new IteratorTester<String>(
5, UNMODIFIABLE, asList("foo", "bar"), IteratorTester.KnownOrder.KNOWN_ORDER) {
@Override
protected Iterator<String> newTargetIterator() {
return Lists.asList("foo", "bar", new String[0]).iterator();
}
}.test();
}
private static void assertIndexIsOutOfBounds(List<String> list, int index) {
try {
list.get(index);
fail();
} catch (IndexOutOfBoundsException expected) {
}
}
public void testReverseViewRandomAccess() {
List<Integer> fromList = new ArrayList<>(SOME_LIST);
List<Integer> toList = Lists.reverse(fromList);
assertReverseView(fromList, toList);
}
public void testReverseViewSequential() {
List<Integer> fromList = new LinkedList<>(SOME_SEQUENTIAL_LIST);
List<Integer> toList = Lists.reverse(fromList);
assertReverseView(fromList, toList);
}
private static void assertReverseView(List<Integer> fromList, List<Integer> toList) {
/* fromList modifications reflected in toList */
fromList.set(0, 5);
assertEquals(asList(4, 3, 2, 5), toList);
fromList.add(6);
assertEquals(asList(6, 4, 3, 2, 5), toList);
fromList.add(2, 9);
assertEquals(asList(6, 4, 3, 9, 2, 5), toList);
fromList.remove(Integer.valueOf(2));
assertEquals(asList(6, 4, 3, 9, 5), toList);
fromList.remove(3);
assertEquals(asList(6, 3, 9, 5), toList);
/* toList modifications reflected in fromList */
toList.remove(0);
assertEquals(asList(5, 9, 3), fromList);
toList.add(7);
assertEquals(asList(7, 5, 9, 3), fromList);
toList.add(5);
assertEquals(asList(5, 7, 5, 9, 3), fromList);
toList.remove(Integer.valueOf(5));
assertEquals(asList(5, 7, 9, 3), fromList);
toList.set(1, 8);
assertEquals(asList(5, 7, 8, 3), fromList);
toList.clear();
assertEquals(emptyList(), fromList);
}
@SafeVarargs
private static <E> List<E> list(E... elements) {
return ImmutableList.copyOf(elements);
}
public void testCartesianProduct_binary1x1() {
assertThat(cartesianProduct(list(1), list(2))).contains(list(1, 2));
}
public void testCartesianProduct_binary1x2() {
assertThat(cartesianProduct(list(1), list(2, 3)))
.containsExactly(list(1, 2), list(1, 3))
.inOrder();
}
public void testCartesianProduct_binary2x2() {
assertThat(cartesianProduct(list(1, 2), list(3, 4)))
.containsExactly(list(1, 3), list(1, 4), list(2, 3), list(2, 4))
.inOrder();
}
public void testCartesianProduct_2x2x2() {
assertThat(cartesianProduct(list(0, 1), list(0, 1), list(0, 1)))
.containsExactly(
list(0, 0, 0),
list(0, 0, 1),
list(0, 1, 0),
list(0, 1, 1),
list(1, 0, 0),
list(1, 0, 1),
list(1, 1, 0),
list(1, 1, 1))
.inOrder();
}
public void testCartesianProduct_contains() {
List<List<Integer>> actual = cartesianProduct(list(1, 2), list(3, 4));
assertTrue(actual.contains(list(1, 3)));
assertTrue(actual.contains(list(1, 4)));
assertTrue(actual.contains(list(2, 3)));
assertTrue(actual.contains(list(2, 4)));
assertFalse(actual.contains(list(3, 1)));
}
public void testCartesianProduct_indexOf() {
List<List<Integer>> actual = cartesianProduct(list(1, 2), list(3, 4));
assertEquals(0, actual.indexOf(list(1, 3)));
assertEquals(1, actual.indexOf(list(1, 4)));
assertEquals(2, actual.indexOf(list(2, 3)));
assertEquals(3, actual.indexOf(list(2, 4)));
assertEquals(-1, actual.indexOf(list(3, 1)));
assertEquals(-1, actual.indexOf(list(1)));
assertEquals(-1, actual.indexOf(list(1, 1, 1)));
}
public void testCartesianProduct_lastIndexOf() {
List<List<Integer>> actual = cartesianProduct(list(1, 1), list(2, 3));
assertThat(actual.lastIndexOf(list(1, 2))).isEqualTo(2);
assertThat(actual.lastIndexOf(list(1, 3))).isEqualTo(3);
assertThat(actual.lastIndexOf(list(1, 1))).isEqualTo(-1);
assertThat(actual.lastIndexOf(list(1))).isEqualTo(-1);
assertThat(actual.lastIndexOf(list(1, 1, 1))).isEqualTo(-1);
}
public void testCartesianProduct_unrelatedTypes() {
List<Integer> x = list(1, 2);
List<String> y = list("3", "4");
List<Object> exp1 = list((Object) 1, "3");
List<Object> exp2 = list((Object) 1, "4");
List<Object> exp3 = list((Object) 2, "3");
List<Object> exp4 = list((Object) 2, "4");
assertThat(Lists.<Object>cartesianProduct(x, y))
.containsExactly(exp1, exp2, exp3, exp4)
.inOrder();
}
public void testCartesianProductTooBig() {
List<String> list = nCopies(10000, "foo");
assertThrows(
IllegalArgumentException.class, () -> cartesianProduct(list, list, list, list, list));
}
public void testTransformHashCodeRandomAccess() {
List<String> list = transform(SOME_LIST, SOME_FUNCTION);
assertEquals(SOME_STRING_LIST.hashCode(), list.hashCode());
}
public void testTransformHashCodeSequential() {
List<String> list = transform(SOME_SEQUENTIAL_LIST, SOME_FUNCTION);
assertEquals(SOME_STRING_LIST.hashCode(), list.hashCode());
}
public void testTransformModifiableRandomAccess() {
List<Integer> fromList = new ArrayList<>(SOME_LIST);
List<String> list = transform(fromList, SOME_FUNCTION);
assertTransformModifiable(list);
}
public void testTransformModifiableSequential() {
List<Integer> fromList = new LinkedList<>(SOME_SEQUENTIAL_LIST);
List<String> list = transform(fromList, SOME_FUNCTION);
assertTransformModifiable(list);
}
private static void assertTransformModifiable(List<String> list) {
try {
list.add("5");
fail("transformed list is addable");
} catch (UnsupportedOperationException expected) {
}
list.remove(0);
assertEquals(asList("2", "3", "4"), list);
list.remove("3");
assertEquals(asList("2", "4"), list);
try {
list.set(0, "5");
fail("transformed list is setable");
} catch (UnsupportedOperationException expected) {
}
list.clear();
assertEquals(emptyList(), list);
}
public void testTransformViewRandomAccess() {
List<Integer> fromList = new ArrayList<>(SOME_LIST);
List<String> toList = transform(fromList, SOME_FUNCTION);
assertTransformView(fromList, toList);
}
public void testTransformViewSequential() {
List<Integer> fromList = new LinkedList<>(SOME_SEQUENTIAL_LIST);
List<String> toList = transform(fromList, SOME_FUNCTION);
assertTransformView(fromList, toList);
}
private static void assertTransformView(List<Integer> fromList, List<String> toList) {
/* fromList modifications reflected in toList */
fromList.set(0, 5);
assertEquals(asList("5", "2", "3", "4"), toList);
fromList.add(6);
assertEquals(asList("5", "2", "3", "4", "6"), toList);
fromList.remove(Integer.valueOf(2));
assertEquals(asList("5", "3", "4", "6"), toList);
fromList.remove(2);
assertEquals(asList("5", "3", "6"), toList);
/* toList modifications reflected in fromList */
toList.remove(2);
assertEquals(asList(5, 3), fromList);
toList.remove("5");
assertEquals(asList(3), fromList);
toList.clear();
assertEquals(emptyList(), fromList);
}
public void testTransformRandomAccess() {
List<String> list = transform(SOME_LIST, SOME_FUNCTION);
assertTrue(list instanceof RandomAccess);
}
public void testTransformSequential() {
List<String> list = transform(SOME_SEQUENTIAL_LIST, SOME_FUNCTION);
assertFalse(list instanceof RandomAccess);
}
public void testTransformRandomAccessIsNotEmpty() {
List<String> transformedList = transform(SOME_LIST, SOME_FUNCTION);
assertFalse(transformedList.isEmpty());
}
public void testTransformSequentialIsNotEmpty() {
List<String> transformedList = transform(SOME_SEQUENTIAL_LIST, SOME_FUNCTION);
assertFalse(transformedList.isEmpty());
}
public void testTransformListIteratorRandomAccess() {
List<Integer> fromList = new ArrayList<>(SOME_LIST);
List<String> list = transform(fromList, SOME_FUNCTION);
assertTransformListIterator(list);
}
public void testTransformListIteratorSequential() {
List<Integer> fromList = new LinkedList<>(SOME_SEQUENTIAL_LIST);
List<String> list = transform(fromList, SOME_FUNCTION);
assertTransformListIterator(list);
}
public void testTransformPreservesIOOBEsThrownByFunction() {
assertThrows(
IndexOutOfBoundsException.class,
() ->
transform(
ImmutableList.of("foo", "bar"),
new Function<String, String>() {
@Override
public String apply(String input) {
throw new IndexOutOfBoundsException();
}
})
.toArray());
}
private static void assertTransformListIterator(List<String> list) {
ListIterator<String> iterator = list.listIterator(1);
assertEquals(1, iterator.nextIndex());
assertEquals("2", iterator.next());
assertEquals("3", iterator.next());
assertEquals("4", iterator.next());
assertEquals(4, iterator.nextIndex());
try {
iterator.next();
fail("did not detect end of list");
} catch (NoSuchElementException expected) {
}
assertEquals(3, iterator.previousIndex());
assertEquals("4", iterator.previous());
assertEquals("3", iterator.previous());
assertEquals("2", iterator.previous());
assertTrue(iterator.hasPrevious());
assertEquals("1", iterator.previous());
assertFalse(iterator.hasPrevious());
assertEquals(-1, iterator.previousIndex());
try {
iterator.previous();
fail("did not detect beginning of list");
} catch (NoSuchElementException expected) {
}
iterator.remove();
assertEquals(asList("2", "3", "4"), list);
assertFalse(list.isEmpty());
// An UnsupportedOperationException or IllegalStateException may occur.
try {
iterator.add("1");
fail("transformed list iterator is addable");
} catch (UnsupportedOperationException | IllegalStateException expected) {
}
try {
iterator.set("1");
fail("transformed list iterator is settable");
} catch (UnsupportedOperationException | IllegalStateException expected) {
}
}
public void testTransformIteratorRandomAccess() {
List<Integer> fromList = new ArrayList<>(SOME_LIST);
List<String> list = transform(fromList, SOME_FUNCTION);
assertTransformIterator(list);
}
public void testTransformIteratorSequential() {
List<Integer> fromList = new LinkedList<>(SOME_SEQUENTIAL_LIST);
List<String> list = transform(fromList, SOME_FUNCTION);
assertTransformIterator(list);
}
/**
* This test depends on the fact that {@code AbstractSequentialList.iterator} transforms the
* {@code iterator()} call into a call on {@code listIterator(int)}. This is fine because the
* behavior is clearly documented so it's not expected to change.
*/
public void testTransformedSequentialIterationUsesBackingListIterationOnly() {
List<Integer> randomAccessList = new ArrayList<>(SOME_SEQUENTIAL_LIST);
List<Integer> listIteratorOnlyList = new ListIterationOnlyList<>(randomAccessList);
List<String> transform = transform(listIteratorOnlyList, SOME_FUNCTION);
assertTrue(elementsEqual(transform, transform(randomAccessList, SOME_FUNCTION)));
}
private static class ListIterationOnlyList<E> extends ForwardingList<E> {
private final List<E> realDelegate;
private ListIterationOnlyList(List<E> realDelegate) {
this.realDelegate = realDelegate;
}
@Override
public int size() {
return realDelegate.size();
}
@Override
public ListIterator<E> listIterator(int index) {
return realDelegate.listIterator(index);
}
@Override
protected List<E> delegate() {
throw new UnsupportedOperationException("This list only supports ListIterator");
}
}
private static void assertTransformIterator(List<String> list) {
Iterator<String> iterator = list.iterator();
assertTrue(iterator.hasNext());
assertEquals("1", iterator.next());
assertTrue(iterator.hasNext());
assertEquals("2", iterator.next());
assertTrue(iterator.hasNext());
assertEquals("3", iterator.next());
assertTrue(iterator.hasNext());
assertEquals("4", iterator.next());
assertFalse(iterator.hasNext());
try {
iterator.next();
fail("did not detect end of list");
} catch (NoSuchElementException expected) {
}
iterator.remove();
assertEquals(asList("1", "2", "3"), list);
assertFalse(iterator.hasNext());
}
public void testPartition_badSize() {
List<Integer> source = singletonList(1);
assertThrows(IllegalArgumentException.class, () -> partition(source, 0));
}
public void testPartition_empty() {
List<Integer> source = emptyList();
List<List<Integer>> partitions = partition(source, 1);
assertTrue(partitions.isEmpty());
assertEquals(0, partitions.size());
}
public void testPartition_1_1() {
List<Integer> source = singletonList(1);
List<List<Integer>> partitions = partition(source, 1);
assertEquals(1, partitions.size());
assertEquals(singletonList(1), partitions.get(0));
}
public void testPartition_1_2() {
List<Integer> source = singletonList(1);
List<List<Integer>> partitions = partition(source, 2);
assertEquals(1, partitions.size());
assertEquals(singletonList(1), partitions.get(0));
}
public void testPartition_2_1() {
List<Integer> source = asList(1, 2);
List<List<Integer>> partitions = partition(source, 1);
assertEquals(2, partitions.size());
assertEquals(singletonList(1), partitions.get(0));
assertEquals(singletonList(2), partitions.get(1));
}
public void testPartition_3_2() {
List<Integer> source = asList(1, 2, 3);
List<List<Integer>> partitions = partition(source, 2);
assertEquals(2, partitions.size());
assertEquals(asList(1, 2), partitions.get(0));
assertEquals(asList(3), partitions.get(1));
}
@J2ktIncompatible // Arrays.asList(...).subList() doesn't implement RandomAccess in J2KT.
@GwtIncompatible // ArrayList.subList doesn't implement RandomAccess in GWT.
public void testPartitionRandomAccessTrue() {
List<Integer> source = asList(1, 2, 3);
List<List<Integer>> partitions = partition(source, 2);
assertTrue(
"partition should be RandomAccess, but not: " + partitions.getClass(),
partitions instanceof RandomAccess);
assertTrue(
"partition[0] should be RandomAccess, but not: " + partitions.get(0).getClass(),
partitions.get(0) instanceof RandomAccess);
assertTrue(
"partition[1] should be RandomAccess, but not: " + partitions.get(1).getClass(),
partitions.get(1) instanceof RandomAccess);
}
public void testPartitionRandomAccessFalse() {
List<Integer> source = new LinkedList<>(asList(1, 2, 3));
List<List<Integer>> partitions = partition(source, 2);
assertFalse(partitions instanceof RandomAccess);
assertFalse(partitions.get(0) instanceof RandomAccess);
assertFalse(partitions.get(1) instanceof RandomAccess);
}
// TODO: use the ListTestSuiteBuilder
public void testPartition_view() {
List<Integer> list = asList(1, 2, 3);
List<List<Integer>> partitions = partition(list, 3);
// Changes before the partition is retrieved are reflected
list.set(0, 3);
Iterator<List<Integer>> iterator = partitions.iterator();
// Changes before the partition is retrieved are reflected
list.set(1, 4);
List<Integer> first = iterator.next();
// Changes after are too (unlike Iterables.partition)
list.set(2, 5);
assertEquals(asList(3, 4, 5), first);
// Changes to a sublist also write through to the original list
first.set(1, 6);
assertEquals(asList(3, 6, 5), list);
}
public void testPartitionSize_1() {
List<Integer> list = asList(1, 2, 3);
assertEquals(1, partition(list, Integer.MAX_VALUE).size());
assertEquals(1, partition(list, Integer.MAX_VALUE - 1).size());
}
@GwtIncompatible // cannot do such a big explicit copy
@J2ktIncompatible // too slow
public void testPartitionSize_2() {
assertEquals(2, partition(nCopies(0x40000001, 1), 0x40000000).size());
}
}
|
google/j2objc | 35,693 | jre_emul/android/platform/external/icu/android_icu4j/src/main/tests/android/icu/dev/test/util/CurrencyTest.java | /* GENERATED SOURCE. DO NOT MODIFY. */
// © 2016 and later: Unicode, Inc. and others.
// License & terms of use: http://www.unicode.org/copyright.html#License
/*
**********************************************************************
* Copyright (c) 2002-2016, International Business Machines
* Corporation and others. All Rights Reserved.
**********************************************************************
* Author: Alan Liu
* Created: December 18 2002
* Since: ICU 2.4
**********************************************************************
*/
package android.icu.dev.test.util;
import java.util.Arrays;
import java.util.Date;
import java.util.HashSet;
import java.util.List;
import java.util.Locale;
import java.util.Set;
import org.junit.Test;
import android.icu.dev.test.TestFmwk;
import android.icu.dev.test.TestUtil;
import android.icu.dev.test.TestUtil.JavaVendor;
import android.icu.impl.CurrencyData;
import android.icu.text.CurrencyDisplayNames;
import android.icu.text.CurrencyMetaInfo;
import android.icu.text.CurrencyMetaInfo.CurrencyFilter;
import android.icu.text.CurrencyMetaInfo.CurrencyInfo;
import android.icu.text.DateFormat;
import android.icu.text.DecimalFormatSymbols;
import android.icu.text.SimpleDateFormat;
import android.icu.util.Currency;
import android.icu.util.GregorianCalendar;
import android.icu.util.TimeZone;
import android.icu.util.ULocale;
/**
* @test
* @summary General test of Currency
*/
public class CurrencyTest extends TestFmwk {
/**
* Test of basic API.
*/
@Test
public void TestAPI() {
Currency usd = Currency.getInstance("USD");
/*int hash = */usd.hashCode();
Currency jpy = Currency.getInstance("JPY");
if (usd.equals(jpy)) {
errln("FAIL: USD == JPY");
}
if (usd.equals("abc")) {
errln("FAIL: USD == (String)");
}
if (usd.equals(null)) {
errln("FAIL: USD == (null)");
}
if (!usd.equals(usd)) {
errln("FAIL: USD != USD");
}
try {
Currency nullCurrency = Currency.getInstance((String)null);
errln("FAIL: Expected getInstance(null) to throw "
+ "a NullPointerException, but returned " + nullCurrency);
} catch (NullPointerException npe) {
logln("PASS: getInstance(null) threw a NullPointerException");
}
try {
Currency bogusCurrency = Currency.getInstance("BOGUS");
errln("FAIL: Expected getInstance(\"BOGUS\") to throw "
+ "an IllegalArgumentException, but returned " + bogusCurrency);
} catch (IllegalArgumentException iae) {
logln("PASS: getInstance(\"BOGUS\") threw an IllegalArgumentException");
}
Locale[] avail = Currency.getAvailableLocales();
if(avail==null){
errln("FAIL: getAvailableLocales returned null");
}
try {
usd.getName(ULocale.US, 5, new boolean[1]);
errln("expected getName with invalid type parameter to throw exception");
}
catch (Exception e) {
logln("PASS: getName failed as expected");
}
}
/**
* Test registration.
*/
@Test
public void TestRegistration() {
final Currency jpy = Currency.getInstance("JPY");
final Currency usd = Currency.getInstance(Locale.US);
try {
Currency.unregister(null); // should fail, coverage
errln("expected unregister of null to throw exception");
}
catch (Exception e) {
logln("PASS: unregister of null failed as expected");
}
if (Currency.unregister("")) { // coverage
errln("unregister before register erroneously succeeded");
}
ULocale fu_FU = new ULocale("fu_FU");
Object key1 = Currency.registerInstance(jpy, ULocale.US);
Object key2 = Currency.registerInstance(jpy, fu_FU);
Currency nus = Currency.getInstance(Locale.US);
if (!nus.equals(jpy)) {
errln("expected " + jpy + " but got: " + nus);
}
// converage, make sure default factory works
Currency nus1 = Currency.getInstance(Locale.JAPAN);
if (!nus1.equals(jpy)) {
errln("expected " + jpy + " but got: " + nus1);
}
ULocale[] locales = Currency.getAvailableULocales();
boolean found = false;
for (int i = 0; i < locales.length; ++i) {
if (locales[i].equals(fu_FU)) {
found = true;
break;
}
}
if (!found) {
errln("did not find locale" + fu_FU + " in currency locales");
}
if (!Currency.unregister(key1)) {
errln("unable to unregister currency using key1");
}
if (!Currency.unregister(key2)) {
errln("unable to unregister currency using key2");
}
Currency nus2 = Currency.getInstance(Locale.US);
if (!nus2.equals(usd)) {
errln("expected " + usd + " but got: " + nus2);
}
locales = Currency.getAvailableULocales();
found = false;
for (int i = 0; i < locales.length; ++i) {
if (locales[i].equals(fu_FU)) {
found = true;
break;
}
}
if (found) {
errln("found locale" + fu_FU + " in currency locales after unregister");
}
Locale[] locs = Currency.getAvailableLocales();
found = false;
for (int i = 0; i < locs.length; ++i) {
if (locs[i].equals(fu_FU)) {
found = true;
break;
}
}
if (found) {
errln("found locale" + fu_FU + " in currency locales after unregister");
}
}
/**
* Test names.
*/
@Test
public void TestNames() {
// Do a basic check of getName()
// USD { "US$", "US Dollar" } // 04/04/1792-
ULocale en = ULocale.ENGLISH;
boolean[] isChoiceFormat = new boolean[1];
Currency usd = Currency.getInstance("USD");
// Warning: HARD-CODED LOCALE DATA in this test. If it fails, CHECK
// THE LOCALE DATA before diving into the code.
assertEquals("USD.getName(SYMBOL_NAME)",
"$",
usd.getName(en, Currency.SYMBOL_NAME, isChoiceFormat));
assertEquals("USD.getName(LONG_NAME)",
"US Dollar",
usd.getName(en, Currency.LONG_NAME, isChoiceFormat));
// TODO add more tests later
}
@Test
public void testGetName_Locale_Int_String_BooleanArray() {
Currency currency = Currency.getInstance(ULocale.CHINA);
boolean[] isChoiceFormat = new boolean[1];
int nameStyle = Currency.LONG_NAME;
String pluralCount = "";
String ulocaleName =
currency.getName(ULocale.CANADA, nameStyle, pluralCount, isChoiceFormat);
assertEquals("currency name mismatch", "Chinese Yuan", ulocaleName);
String localeName = currency.getName(Locale.CANADA, nameStyle, pluralCount, isChoiceFormat);
assertEquals("currency name mismatch", ulocaleName, localeName);
}
@Test
public void TestCoverage() {
Currency usd = Currency.getInstance("USD");
assertEquals("USD.getSymbol()",
"$",
usd.getSymbol());
}
// A real test of the CurrencyDisplayNames class.
@Test
public void TestCurrencyDisplayNames() {
if (!CurrencyDisplayNames.hasData()) {
errln("hasData() should return true.");
}
// with substitute
CurrencyDisplayNames cdn = CurrencyDisplayNames.getInstance(ULocale.GERMANY);
assertEquals("de_USD_name", "US-Dollar", cdn.getName("USD"));
assertEquals("de_USD_symbol", "$", cdn.getSymbol("USD"));
assertEquals("de_USD_plural_other", "US-Dollar", cdn.getPluralName("USD", "other"));
// unknown plural category, substitute "other"
assertEquals("de_USD_plural_foo", "US-Dollar", cdn.getPluralName("USD", "foo"));
cdn = CurrencyDisplayNames.getInstance(ULocale.forLanguageTag("en-US"));
assertEquals("en-US_USD_name", "US Dollar", cdn.getName("USD"));
assertEquals("en-US_USD_symbol", "$", cdn.getSymbol("USD"));
assertEquals("en-US_USD_plural_one", "US dollar", cdn.getPluralName("USD", "one"));
assertEquals("en-US_USD_plural_other", "US dollars", cdn.getPluralName("USD", "other"));
assertEquals("en-US_FOO_name", "FOO", cdn.getName("FOO"));
assertEquals("en-US_FOO_symbol", "FOO", cdn.getSymbol("FOO"));
assertEquals("en-US_FOO_plural_other", "FOO", cdn.getPluralName("FOO", "other"));
assertEquals("en-US bundle", "en", cdn.getULocale().toString());
cdn = CurrencyDisplayNames.getInstance(ULocale.forLanguageTag("zz-Gggg-YY"));
assertEquals("bundle from current locale", "en", cdn.getULocale().toString());
// with no substitute
cdn = CurrencyDisplayNames.getInstance(ULocale.GERMANY, true);
assertNotNull("have currency data for Germany", cdn);
// known currency, behavior unchanged
assertEquals("de_USD_name", "US-Dollar", cdn.getName("USD"));
assertEquals("de_USD_symbol", "$", cdn.getSymbol("USD"));
assertEquals("de_USD_plural_other", "US-Dollar", cdn.getPluralName("USD", "other"));
// known currency but unknown plural category
assertNull("de_USD_plural_foo", cdn.getPluralName("USD", "foo"));
// unknown currency, get null
assertNull("de_FOO_name", cdn.getName("FOO"));
assertNull("de_FOO_symbol", cdn.getSymbol("FOO"));
assertNull("de_FOO_plural_other", cdn.getPluralName("FOO", "other"));
assertNull("de_FOO_plural_foo", cdn.getPluralName("FOO", "foo"));
// unknown locale with no substitute
cdn = CurrencyDisplayNames.getInstance(ULocale.forLanguageTag("zz-Gggg-YY"), true);
String ln = "";
if (cdn != null) {
ln = " (" + cdn.getULocale().toString() + ")";
}
assertNull("no fallback from unknown locale" + ln , cdn);
// Locale version
cdn = CurrencyDisplayNames.getInstance(Locale.GERMANY, true);
assertNotNull("have currency data for Germany (Java Locale)", cdn);
assertEquals("de_USD_name (Locale)", "US-Dollar", cdn.getName("USD"));
assertNull("de_FOO_name (Locale)", cdn.getName("FOO"));
}
// Coverage-only test of CurrencyData
@Test
public void TestCurrencyData() {
CurrencyData.DefaultInfo info_fallback = (CurrencyData.DefaultInfo)CurrencyData.DefaultInfo.getWithFallback(true);
if (info_fallback == null) {
errln("getWithFallback() returned null.");
return;
}
CurrencyData.DefaultInfo info_nofallback = (CurrencyData.DefaultInfo)CurrencyData.DefaultInfo.getWithFallback(false);
if (info_nofallback == null) {
errln("getWithFallback() returned null.");
return;
}
if (!info_fallback.getName("isoCode").equals("isoCode") || info_nofallback.getName("isoCode") != null) {
errln("Error calling getName().");
return;
}
if (!info_fallback.getPluralName("isoCode", "type").equals("isoCode") || info_nofallback.getPluralName("isoCode", "type") != null) {
errln("Error calling getPluralName().");
return;
}
if (!info_fallback.getSymbol("isoCode").equals("isoCode") || info_nofallback.getSymbol("isoCode") != null) {
errln("Error calling getSymbol().");
return;
}
if (!info_fallback.symbolMap().isEmpty()) {
errln("symbolMap() should return empty map.");
return;
}
if (!info_fallback.nameMap().isEmpty()) {
errln("nameMap() should return empty map.");
return;
}
if (!info_fallback.getUnitPatterns().isEmpty() || info_nofallback.getUnitPatterns() != null) {
errln("Error calling getUnitPatterns().");
return;
}
if (!info_fallback.getSpacingInfo().equals((CurrencyData.CurrencySpacingInfo.DEFAULT)) ||
info_nofallback.getSpacingInfo() != null) {
errln("Error calling getSpacingInfo().");
return;
}
if (info_fallback.getULocale() != ULocale.ROOT) {
errln("Error calling getLocale().");
return;
}
if (info_fallback.getFormatInfo("isoCode") != null) {
errln("Error calling getFormatInfo().");
return;
}
}
// A real test of CurrencyMetaInfo.
@Test
public void testCurrencyMetaInfoRanges() {
CurrencyMetaInfo metainfo = CurrencyMetaInfo.getInstance(true);
assertNotNull("have metainfo", metainfo);
CurrencyFilter filter = CurrencyFilter.onRegion("DE"); // must be capitalized
List<CurrencyInfo> currenciesInGermany = metainfo.currencyInfo(filter);
logln("currencies: " + currenciesInGermany.size());
DateFormat fmt = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss.SSS z");
fmt.setTimeZone(TimeZone.getTimeZone("GMT"));
Date demLastDate = new Date(Long.MAX_VALUE);
Date eurFirstDate = new Date(Long.MIN_VALUE);
for (CurrencyInfo info : currenciesInGermany) {
logln(info.toString());
logln("from: " + fmt.format(info.from)+ Long.toHexString(info.from));
logln(" to: " + fmt.format(info.to) + Long.toHexString(info.to));
if (info.code.equals("DEM")) {
demLastDate = new Date(info.to);
} else if (info.code.equals("EUR")) {
eurFirstDate = new Date(info.from);
}
}
// the Euro and Deutschmark overlapped for several years
assertEquals("DEM available at last date", 2, metainfo.currencyInfo(filter.withDate(demLastDate)).size());
// demLastDate + 1 millisecond is not the start of the last day, we consider it the next day, so...
Date demLastDatePlus1ms = new Date(demLastDate.getTime() + 1);
assertEquals("DEM not available after very start of last date", 1, metainfo.currencyInfo(filter.withDate(demLastDatePlus1ms)).size());
// both available for start of euro
assertEquals("EUR available on start of first date", 2, metainfo.currencyInfo(filter.withDate(eurFirstDate)).size());
// but not one millisecond before the start of the first day
Date eurFirstDateMinus1ms = new Date(eurFirstDate.getTime() - 1);
assertEquals("EUR not avilable before very start of first date", 1, metainfo.currencyInfo(filter.withDate(eurFirstDateMinus1ms)).size());
// end time is last millisecond of day
GregorianCalendar cal = new GregorianCalendar();
cal.setTimeZone(TimeZone.getTimeZone("GMT"));
cal.setTime(demLastDate);
assertEquals("hour is 23", 23, cal.get(GregorianCalendar.HOUR_OF_DAY));
assertEquals("minute is 59", 59, cal.get(GregorianCalendar.MINUTE));
assertEquals("second is 59", 59, cal.get(GregorianCalendar.SECOND));
assertEquals("millisecond is 999", 999, cal.get(GregorianCalendar.MILLISECOND));
// start time is first millisecond of day
cal.setTime(eurFirstDate);
assertEquals("hour is 0", 0, cal.get(GregorianCalendar.HOUR_OF_DAY));
assertEquals("minute is 0", 0, cal.get(GregorianCalendar.MINUTE));
assertEquals("second is 0", 0, cal.get(GregorianCalendar.SECOND));
assertEquals("millisecond is 0", 0, cal.get(GregorianCalendar.MILLISECOND));
}
@Test
public void testCurrencyMetaInfoRangesWithLongs() {
CurrencyMetaInfo metainfo = CurrencyMetaInfo.getInstance(true);
assertNotNull("have metainfo", metainfo);
CurrencyFilter filter = CurrencyFilter.onRegion("DE"); // must be capitalized
List<CurrencyInfo> currenciesInGermany = metainfo.currencyInfo(filter);
CurrencyFilter filter_br = CurrencyFilter.onRegion("BR"); // must be capitalized
List<CurrencyInfo> currenciesInBrazil = metainfo.currencyInfo(filter_br);
logln("currencies Germany: " + currenciesInGermany.size());
logln("currencies Brazil: " + currenciesInBrazil.size());
long demFirstDate = Long.MIN_VALUE;
long demLastDate = Long.MAX_VALUE;
long eurFirstDate = Long.MIN_VALUE;
CurrencyInfo demInfo = null;
for (CurrencyInfo info : currenciesInGermany) {
logln(info.toString());
if (info.code.equals("DEM")) {
demInfo = info;
demFirstDate = info.from;
demLastDate = info.to;
} else if (info.code.equals("EUR")) {
eurFirstDate = info.from;
}
}
// the Euro and Deutschmark overlapped for several years
assertEquals("DEM available at last date", 2, metainfo.currencyInfo(filter.withDate(demLastDate)).size());
// demLastDate + 1 millisecond is not the start of the last day, we consider it the next day, so...
long demLastDatePlus1ms = demLastDate + 1;
assertEquals("DEM not available after very start of last date", 1, metainfo.currencyInfo(filter.withDate(demLastDatePlus1ms)).size());
// both available for start of euro
assertEquals("EUR available on start of first date", 2, metainfo.currencyInfo(filter.withDate(eurFirstDate)).size());
// but not one millisecond before the start of the first day
long eurFirstDateMinus1ms = eurFirstDate - 1;
assertEquals("EUR not avilable before very start of first date", 1,
metainfo.currencyInfo(filter.withDate(eurFirstDateMinus1ms)).size());
// Deutschmark available from first millisecond on
assertEquals("Millisecond of DEM Big Bang", 1,
metainfo.currencyInfo(CurrencyFilter.onDate(demFirstDate).withRegion("DE")).size());
assertEquals("From Deutschmark to Euro", 2,
metainfo.currencyInfo(CurrencyFilter.onDateRange(demFirstDate, eurFirstDate).withRegion("DE")).size());
assertEquals("all Tender for Brazil", 7,
metainfo.currencyInfo(CurrencyFilter.onTender().withRegion("BR")).size());
assertTrue("No legal tender", demInfo.isTender());
}
@Test
public void TestWithTender() {
CurrencyMetaInfo metainfo = CurrencyMetaInfo.getInstance();
if (metainfo == null) {
errln("Unable to get CurrencyMetaInfo instance.");
return;
}
CurrencyMetaInfo.CurrencyFilter filter =
CurrencyMetaInfo.CurrencyFilter.onRegion("CH");
List<String> currencies = metainfo.currencies(filter);
assertTrue("More than one currency for switzerland", currencies.size() > 1);
assertEquals(
"With tender",
Arrays.asList(new String[] {"CHF", "CHE", "CHW"}),
metainfo.currencies(filter.withTender()));
}
// Coverage-only test of the CurrencyMetaInfo class
@Test
public void TestCurrencyMetaInfo() {
CurrencyMetaInfo metainfo = CurrencyMetaInfo.getInstance();
if (metainfo == null) {
errln("Unable to get CurrencyMetaInfo instance.");
return;
}
if (!CurrencyMetaInfo.hasData()) {
errln("hasData() should note return false.");
return;
}
CurrencyMetaInfo.CurrencyFilter filter;
CurrencyMetaInfo.CurrencyInfo info;
CurrencyMetaInfo.CurrencyDigits digits;
{ // CurrencyFilter
filter = CurrencyMetaInfo.CurrencyFilter.onCurrency("currency");
CurrencyMetaInfo.CurrencyFilter filter2 = CurrencyMetaInfo.CurrencyFilter.onCurrency("test");
if (filter == null) {
errln("Unable to create CurrencyFilter.");
return;
}
if (filter.equals(new Object())) {
errln("filter should not equal to Object");
return;
}
if (filter.equals(filter2)) {
errln("filter should not equal filter2");
return;
}
if (filter.hashCode() == 0) {
errln("Error getting filter hashcode");
return;
}
if (filter.toString() == null) {
errln("Error calling toString()");
return;
}
}
{ // CurrencyInfo
info = new CurrencyMetaInfo.CurrencyInfo("region", "code", 0, 1, 1, false);
if (info == null) {
errln("Error creating CurrencyInfo.");
return;
}
if (info.toString() == null) {
errln("Error calling toString()");
return;
}
}
{ // CurrencyDigits
digits = metainfo.currencyDigits("isoCode");
if (digits == null) {
errln("Unable to get CurrencyDigits.");
return;
}
if (digits.toString() == null) {
errln("Error calling toString()");
return;
}
}
}
@Test
public void TestCurrencyKeyword() {
ULocale locale = new ULocale("th_TH@collation=traditional;currency=QQQ");
Currency currency = Currency.getInstance(locale);
String result = currency.getCurrencyCode();
if (!"QQQ".equals(result)) {
errln("got unexpected currency: " + result);
}
}
@Test
public void TestAvailableCurrencyCodes() {
String[][] tests = {
{ "eo_AM", "1950-01-05" },
{ "eo_AM", "1969-12-31", "SUR" },
{ "eo_AM", "1991-12-26", "RUR" },
{ "eo_AM", "2000-12-23", "AMD" },
{ "eo_AD", "2000-12-23", "EUR", "ESP", "FRF", "ADP" },
{ "eo_AD", "1969-12-31", "ESP", "FRF", "ADP" },
{ "eo_AD", "1950-01-05", "ESP", "ADP" },
{ "eo_AD", "1900-01-17", "ESP" },
{ "eo_UA", "1994-12-25" },
{ "eo_QQ", "1969-12-31" },
{ "eo_AO", "2000-12-23", "AOA" },
{ "eo_AO", "1995-12-25", "AOR", "AON" },
{ "eo_AO", "1990-12-26", "AON", "AOK" },
{ "eo_AO", "1979-12-29", "AOK" },
{ "eo_AO", "1969-12-31" },
{ "eo_DE@currency=DEM", "2000-12-23", "EUR", "DEM" },
{ "eo-DE-u-cu-dem", "2000-12-23", "EUR", "DEM" },
{ "en_US", null, "USD", "USN" },
{ "en_US_PREEURO", null, "USD", "USN" },
{ "en_US_Q", null, "USD", "USN" },
};
DateFormat fmt = new SimpleDateFormat("yyyy-MM-dd", Locale.US);
for (String[] test : tests) {
ULocale locale = new ULocale(test[0]);
String timeString = test[1];
Date date;
if (timeString == null) {
date = new Date();
timeString = "today";
} else {
try {
date = fmt.parse(timeString);
} catch (Exception e) {
fail("could not parse date: " + timeString);
continue;
}
}
String[] expected = null;
if (test.length > 2) {
expected = new String[test.length - 2];
System.arraycopy(test, 2, expected, 0, expected.length);
}
String[] actual = Currency.getAvailableCurrencyCodes(locale, date);
// Order is not important as of 4.4. We never documented that it was.
Set<String> expectedSet = new HashSet<String>();
if (expected != null) {
expectedSet.addAll(Arrays.asList(expected));
}
Set<String> actualSet = new HashSet<String>();
if (actual != null) {
actualSet.addAll(Arrays.asList(actual));
}
assertEquals(locale + " on " + timeString, expectedSet, actualSet);
// With Java Locale
// Note: skip this test on Java 6 or older when keywords are available
if (locale.getKeywords() == null || TestUtil.getJavaVendor() == JavaVendor.Android || TestUtil.getJavaVersion() >= 7) {
Locale javaloc = locale.toLocale();
String[] actualWithJavaLocale = Currency.getAvailableCurrencyCodes(javaloc, date);
// should be exactly same with the ULocale version
boolean same = true;
if (actual == null) {
if (actualWithJavaLocale != null) {
same = false;
}
} else {
if (actualWithJavaLocale == null || actual.length != actualWithJavaLocale.length) {
same = false;
} else {
same = true;
for (int i = 0; i < actual.length; i++) {
if (!actual[i].equals(actualWithJavaLocale[i])) {
same = false;
break;
}
}
}
}
assertTrue("getAvailableCurrencyCodes with ULocale vs Locale", same);
}
}
}
@Test
public void TestDeprecatedCurrencyFormat() {
// bug 5952
Locale locale = new Locale("sr", "QQ");
DecimalFormatSymbols icuSymbols = new
android.icu.text.DecimalFormatSymbols(locale);
String symbol = icuSymbols.getCurrencySymbol();
Currency currency = icuSymbols.getCurrency();
String expectCur = null;
String expectSym = "\u00A4";
if(!symbol.toString().equals(expectSym) || currency != null) {
errln("for " + locale + " expected " + expectSym+"/"+expectCur + " but got " + symbol+"/"+currency);
} else {
logln("for " + locale + " expected " + expectSym+"/"+expectCur + " and got " + symbol+"/"+currency);
}
}
@Test
public void TestGetKeywordValues(){
final String[][] PREFERRED = {
{"root", },
{"und", },
{"und_ZZ", "XAG", "XAU", "XBA", "XBB", "XBC", "XBD", "XDR", "XPD", "XPT", "XSU", "XTS", "XUA", "XXX"},
{"en_US", "USD", "USN"},
{"en_029", },
{"en_TH", "THB"},
{"de", "EUR"},
{"de_DE", "EUR"},
{"de_ZZ", "XAG", "XAU", "XBA", "XBB", "XBC", "XBD", "XDR", "XPD", "XPT", "XSU", "XTS", "XUA", "XXX"},
{"ar", "EGP"},
{"ar_PS", "ILS", "JOD"},
{"en@currency=CAD", "USD", "USN"},
{"fr@currency=ZZZ", "EUR"},
{"de_DE@currency=DEM", "EUR"},
{"en_US@rg=THZZZZ", "THB"},
{"de@rg=USZZZZ", "USD", "USN"},
{"en_US@currency=CAD;rg=THZZZZ", "THB"},
};
String[] ALL = Currency.getKeywordValuesForLocale("currency", ULocale.getDefault(), false);
HashSet ALLSET = new HashSet();
for (int i = 0; i < ALL.length; i++) {
ALLSET.add(ALL[i]);
}
for (int i = 0; i < PREFERRED.length; i++) {
ULocale loc = new ULocale(PREFERRED[i][0]);
String[] expected = new String[PREFERRED[i].length - 1];
System.arraycopy(PREFERRED[i], 1, expected, 0, expected.length);
String[] pref = Currency.getKeywordValuesForLocale("currency", loc, true);
assertEquals(loc.toString(), expected, pref);
String[] all = Currency.getKeywordValuesForLocale("currency", loc, false);
// The items in the two collections should match (ignore order,
// behavior change from 4.3.3)
Set<String> returnedSet = new HashSet<String>();
returnedSet.addAll(Arrays.asList(all));
assertEquals(loc.toString(), ALLSET, returnedSet);
}
}
@Test
public void TestIsAvailable() {
Date d1995 = new Date(788918400000L); // 1995-01-01 00:00 GMT
Date d2000 = new Date(946684800000L); // 2000-01-01 00:00 GMT
Date d2005 = new Date(1104537600000L); // 2005-01-01 00:00 GMT
assertTrue("USD all time", Currency.isAvailable("USD", null, null));
assertTrue("USD before 1995", Currency.isAvailable("USD", null, d1995));
assertTrue("USD 1995-2005", Currency.isAvailable("USD", d1995, d2005));
assertTrue("USD after 2005", Currency.isAvailable("USD", d2005, null));
assertTrue("USD on 2005-01-01", Currency.isAvailable("USD", d2005, d2005));
assertTrue("usd all time", Currency.isAvailable("usd", null, null));
assertTrue("DEM all time", Currency.isAvailable("DEM", null, null));
assertTrue("DEM before 1995", Currency.isAvailable("DEM", null, d1995));
assertTrue("DEM 1995-2000", Currency.isAvailable("DEM", d1995, d2000));
assertTrue("DEM 1995-2005", Currency.isAvailable("DEM", d1995, d2005));
assertFalse("DEM after 2005", Currency.isAvailable("DEM", d2005, null));
assertTrue("DEM on 2000-01-01", Currency.isAvailable("DEM", d2000, d2000));
assertFalse("DEM on 2005-01-01", Currency.isAvailable("DEM", d2005, d2005));
assertTrue("CHE all the time", Currency.isAvailable("CHE", null, null));
assertFalse("XXY unknown code", Currency.isAvailable("XXY", null, null));
assertFalse("USDOLLAR invalid code", Currency.isAvailable("USDOLLAR", null, null));
// illegal argument combination
try {
Currency.isAvailable("USD", d2005, d1995);
errln("Expected IllegalArgumentException, because lower range is after upper range");
} catch (IllegalArgumentException e) {
logln("IllegalArgumentException, because lower range is after upper range");
}
}
/**
* Test case for getAvailableCurrencies()
*/
@Test
public void TestGetAvailableCurrencies() {
Set<Currency> avail1 = Currency.getAvailableCurrencies();
// returned set must be modifiable - add one more currency
avail1.add(Currency.getInstance("ZZZ")); // ZZZ is not defined by ISO 4217
Set<Currency> avail2 = Currency.getAvailableCurrencies();
assertTrue("avail1 does not contain all currencies in avail2", avail1.containsAll(avail2));
assertTrue("avail1 must have one more currency", (avail1.size() - avail2.size() == 1));
}
/**
* Test case for getNumericCode()
*/
@Test
public void TestGetNumericCode() {
final Object[][] NUMCODE_TESTDATA = {
{"USD", 840},
{"Usd", 840}, /* mixed casing */
{"EUR", 978},
{"JPY", 392},
{"XFU", 0}, /* XFU: no numeric code */
{"ZZZ", 0}, /* ZZZ: undefined ISO currency code */
};
for (Object[] data : NUMCODE_TESTDATA) {
Currency cur = Currency.getInstance((String)data[0]);
int numCode = cur.getNumericCode();
int expected = ((Integer)data[1]).intValue();
if (numCode != expected) {
errln("FAIL: getNumericCode returned " + numCode + " for "
+ cur.getCurrencyCode() + " - expected: " + expected);
}
}
}
/**
* Test case for getDisplayName()
*/
@Test
public void TestGetDisplayName() {
final String[][] DISPNAME_TESTDATA = {
{"USD", "US Dollar"},
{"EUR", "Euro"},
{"JPY", "Japanese Yen"},
};
Locale defLocale = Locale.getDefault();
Locale jaJP = new Locale("ja", "JP");
Locale root = new Locale("");
for (String[] data : DISPNAME_TESTDATA) {
Currency cur = Currency.getInstance(data[0]);
assertEquals("getDisplayName() for " + data[0], data[1], cur.getDisplayName());
assertEquals("getDisplayName() for " + data[0] + " in locale " + defLocale, data[1], cur.getDisplayName(defLocale));
// ICU has localized display name for ja
assertNotEquals("getDisplayName() for " + data[0] + " in locale " + jaJP, data[1], cur.getDisplayName(jaJP));
// root locale does not have any localized display names,
// so the currency code itself should be returned
assertEquals("getDisplayName() for " + data[0] + " in locale " + root, data[0], cur.getDisplayName(root));
}
}
@Test
public void TestCurrencyInfoCtor() {
new CurrencyMetaInfo.CurrencyInfo("region", "code", 0, 0, 1);
}
/**
* Class CurrencyMetaInfo has methods which are overwritten by its derived classes.
* A derived class is defined here for the purpose of testing these methods.
* Since the creator of CurrencyMetaInfo is defined as 'protected', no instance of
* this class can be created directly.
*/
public class TestCurrencyMetaInfo extends CurrencyMetaInfo {
}
final TestCurrencyMetaInfo tcurrMetaInfo = new TestCurrencyMetaInfo();
/*
*
* Test methods of base class CurrencyMetaInfo. ICU4J only creates subclasses,
* never an instance of the base class.
*/
@Test
public void TestCurrMetaInfoBaseClass() {
CurrencyFilter usFilter = CurrencyFilter.onRegion("US");
assertEquals("Empty list expected", 0, tcurrMetaInfo.currencyInfo(usFilter).size());
assertEquals("Empty list expected", 0, tcurrMetaInfo.currencies(usFilter).size());
assertEquals("Empty list expected", 0, tcurrMetaInfo.regions(usFilter).size());
assertEquals("Iso format for digits expected",
"CurrencyDigits(fractionDigits='2',roundingIncrement='0')",
tcurrMetaInfo.currencyDigits("isoCode").toString());
}
/**
* Test cases for rounding and fractions.
*/
@Test
public void testGetDefaultFractionDigits_CurrencyUsage() {
Currency currency = Currency.getInstance(ULocale.CHINA);
int cashFractionDigits = currency.getDefaultFractionDigits(Currency.CurrencyUsage.CASH);
assertEquals("number of digits in fraction incorrect", 2, cashFractionDigits);
}
@Test
public void testGetRoundingIncrement() {
Currency currency = Currency.getInstance(ULocale.JAPAN);
// It appears as though this always returns 0 irrespective of the currency.
double roundingIncrement = currency.getRoundingIncrement();
assertEquals("Rounding increment not zero", 0.0, roundingIncrement, 0.0);
}
@Test
public void testGetRoundingIncrement_CurrencyUsage() {
Currency currency = Currency.getInstance(ULocale.JAPAN);
// It appears as though this always returns 0 irrespective of the currency or usage.
double roundingIncrement = currency.getRoundingIncrement(Currency.CurrencyUsage.CASH);
// TODO: replace the JUnit import with TestFmwk assertEquals.
assertEquals("Rounding increment not zero", 0.0, roundingIncrement, 0.0);
}
@Test
public void TestCurrencyDataCtor() throws Exception {
checkDefaultPrivateConstructor(CurrencyData.class);
}
}
|
googleapis/google-cloud-java | 36,233 | java-container/proto-google-cloud-container-v1beta1/src/main/java/com/google/container/v1beta1/LoggingComponentConfig.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/container/v1beta1/cluster_service.proto
// Protobuf Java Version: 3.25.8
package com.google.container.v1beta1;
/**
*
*
* <pre>
* LoggingComponentConfig is cluster logging component configuration.
* </pre>
*
* Protobuf type {@code google.container.v1beta1.LoggingComponentConfig}
*/
public final class LoggingComponentConfig extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.container.v1beta1.LoggingComponentConfig)
LoggingComponentConfigOrBuilder {
private static final long serialVersionUID = 0L;
// Use LoggingComponentConfig.newBuilder() to construct.
private LoggingComponentConfig(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private LoggingComponentConfig() {
enableComponents_ = java.util.Collections.emptyList();
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new LoggingComponentConfig();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.container.v1beta1.ClusterServiceProto
.internal_static_google_container_v1beta1_LoggingComponentConfig_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.container.v1beta1.ClusterServiceProto
.internal_static_google_container_v1beta1_LoggingComponentConfig_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.container.v1beta1.LoggingComponentConfig.class,
com.google.container.v1beta1.LoggingComponentConfig.Builder.class);
}
/**
*
*
* <pre>
* GKE components exposing logs
* </pre>
*
* Protobuf enum {@code google.container.v1beta1.LoggingComponentConfig.Component}
*/
public enum Component implements com.google.protobuf.ProtocolMessageEnum {
/**
*
*
* <pre>
* Default value. This shouldn't be used.
* </pre>
*
* <code>COMPONENT_UNSPECIFIED = 0;</code>
*/
COMPONENT_UNSPECIFIED(0),
/**
*
*
* <pre>
* system components
* </pre>
*
* <code>SYSTEM_COMPONENTS = 1;</code>
*/
SYSTEM_COMPONENTS(1),
/**
*
*
* <pre>
* workloads
* </pre>
*
* <code>WORKLOADS = 2;</code>
*/
WORKLOADS(2),
/**
*
*
* <pre>
* kube-apiserver
* </pre>
*
* <code>APISERVER = 3;</code>
*/
APISERVER(3),
/**
*
*
* <pre>
* kube-scheduler
* </pre>
*
* <code>SCHEDULER = 4;</code>
*/
SCHEDULER(4),
/**
*
*
* <pre>
* kube-controller-manager
* </pre>
*
* <code>CONTROLLER_MANAGER = 5;</code>
*/
CONTROLLER_MANAGER(5),
/**
*
*
* <pre>
* kcp-sshd
* </pre>
*
* <code>KCP_SSHD = 7;</code>
*/
KCP_SSHD(7),
/**
*
*
* <pre>
* kcp connection logs
* </pre>
*
* <code>KCP_CONNECTION = 8;</code>
*/
KCP_CONNECTION(8),
/**
*
*
* <pre>
* horizontal pod autoscaler decision logs
* </pre>
*
* <code>KCP_HPA = 9;</code>
*/
KCP_HPA(9),
UNRECOGNIZED(-1),
;
/**
*
*
* <pre>
* Default value. This shouldn't be used.
* </pre>
*
* <code>COMPONENT_UNSPECIFIED = 0;</code>
*/
public static final int COMPONENT_UNSPECIFIED_VALUE = 0;
/**
*
*
* <pre>
* system components
* </pre>
*
* <code>SYSTEM_COMPONENTS = 1;</code>
*/
public static final int SYSTEM_COMPONENTS_VALUE = 1;
/**
*
*
* <pre>
* workloads
* </pre>
*
* <code>WORKLOADS = 2;</code>
*/
public static final int WORKLOADS_VALUE = 2;
/**
*
*
* <pre>
* kube-apiserver
* </pre>
*
* <code>APISERVER = 3;</code>
*/
public static final int APISERVER_VALUE = 3;
/**
*
*
* <pre>
* kube-scheduler
* </pre>
*
* <code>SCHEDULER = 4;</code>
*/
public static final int SCHEDULER_VALUE = 4;
/**
*
*
* <pre>
* kube-controller-manager
* </pre>
*
* <code>CONTROLLER_MANAGER = 5;</code>
*/
public static final int CONTROLLER_MANAGER_VALUE = 5;
/**
*
*
* <pre>
* kcp-sshd
* </pre>
*
* <code>KCP_SSHD = 7;</code>
*/
public static final int KCP_SSHD_VALUE = 7;
/**
*
*
* <pre>
* kcp connection logs
* </pre>
*
* <code>KCP_CONNECTION = 8;</code>
*/
public static final int KCP_CONNECTION_VALUE = 8;
/**
*
*
* <pre>
* horizontal pod autoscaler decision logs
* </pre>
*
* <code>KCP_HPA = 9;</code>
*/
public static final int KCP_HPA_VALUE = 9;
public final int getNumber() {
if (this == UNRECOGNIZED) {
throw new java.lang.IllegalArgumentException(
"Can't get the number of an unknown enum value.");
}
return value;
}
/**
* @param value The numeric wire value of the corresponding enum entry.
* @return The enum associated with the given numeric wire value.
* @deprecated Use {@link #forNumber(int)} instead.
*/
@java.lang.Deprecated
public static Component valueOf(int value) {
return forNumber(value);
}
/**
* @param value The numeric wire value of the corresponding enum entry.
* @return The enum associated with the given numeric wire value.
*/
public static Component forNumber(int value) {
switch (value) {
case 0:
return COMPONENT_UNSPECIFIED;
case 1:
return SYSTEM_COMPONENTS;
case 2:
return WORKLOADS;
case 3:
return APISERVER;
case 4:
return SCHEDULER;
case 5:
return CONTROLLER_MANAGER;
case 7:
return KCP_SSHD;
case 8:
return KCP_CONNECTION;
case 9:
return KCP_HPA;
default:
return null;
}
}
public static com.google.protobuf.Internal.EnumLiteMap<Component> internalGetValueMap() {
return internalValueMap;
}
private static final com.google.protobuf.Internal.EnumLiteMap<Component> internalValueMap =
new com.google.protobuf.Internal.EnumLiteMap<Component>() {
public Component findValueByNumber(int number) {
return Component.forNumber(number);
}
};
public final com.google.protobuf.Descriptors.EnumValueDescriptor getValueDescriptor() {
if (this == UNRECOGNIZED) {
throw new java.lang.IllegalStateException(
"Can't get the descriptor of an unrecognized enum value.");
}
return getDescriptor().getValues().get(ordinal());
}
public final com.google.protobuf.Descriptors.EnumDescriptor getDescriptorForType() {
return getDescriptor();
}
public static final com.google.protobuf.Descriptors.EnumDescriptor getDescriptor() {
return com.google.container.v1beta1.LoggingComponentConfig.getDescriptor()
.getEnumTypes()
.get(0);
}
private static final Component[] VALUES = values();
public static Component valueOf(com.google.protobuf.Descriptors.EnumValueDescriptor desc) {
if (desc.getType() != getDescriptor()) {
throw new java.lang.IllegalArgumentException("EnumValueDescriptor is not for this type.");
}
if (desc.getIndex() == -1) {
return UNRECOGNIZED;
}
return VALUES[desc.getIndex()];
}
private final int value;
private Component(int value) {
this.value = value;
}
// @@protoc_insertion_point(enum_scope:google.container.v1beta1.LoggingComponentConfig.Component)
}
public static final int ENABLE_COMPONENTS_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private java.util.List<java.lang.Integer> enableComponents_;
private static final com.google.protobuf.Internal.ListAdapter.Converter<
java.lang.Integer, com.google.container.v1beta1.LoggingComponentConfig.Component>
enableComponents_converter_ =
new com.google.protobuf.Internal.ListAdapter.Converter<
java.lang.Integer, com.google.container.v1beta1.LoggingComponentConfig.Component>() {
public com.google.container.v1beta1.LoggingComponentConfig.Component convert(
java.lang.Integer from) {
com.google.container.v1beta1.LoggingComponentConfig.Component result =
com.google.container.v1beta1.LoggingComponentConfig.Component.forNumber(from);
return result == null
? com.google.container.v1beta1.LoggingComponentConfig.Component.UNRECOGNIZED
: result;
}
};
/**
*
*
* <pre>
* Select components to collect logs. An empty set would disable all logging.
* </pre>
*
* <code>
* repeated .google.container.v1beta1.LoggingComponentConfig.Component enable_components = 1;
* </code>
*
* @return A list containing the enableComponents.
*/
@java.lang.Override
public java.util.List<com.google.container.v1beta1.LoggingComponentConfig.Component>
getEnableComponentsList() {
return new com.google.protobuf.Internal.ListAdapter<
java.lang.Integer, com.google.container.v1beta1.LoggingComponentConfig.Component>(
enableComponents_, enableComponents_converter_);
}
/**
*
*
* <pre>
* Select components to collect logs. An empty set would disable all logging.
* </pre>
*
* <code>
* repeated .google.container.v1beta1.LoggingComponentConfig.Component enable_components = 1;
* </code>
*
* @return The count of enableComponents.
*/
@java.lang.Override
public int getEnableComponentsCount() {
return enableComponents_.size();
}
/**
*
*
* <pre>
* Select components to collect logs. An empty set would disable all logging.
* </pre>
*
* <code>
* repeated .google.container.v1beta1.LoggingComponentConfig.Component enable_components = 1;
* </code>
*
* @param index The index of the element to return.
* @return The enableComponents at the given index.
*/
@java.lang.Override
public com.google.container.v1beta1.LoggingComponentConfig.Component getEnableComponents(
int index) {
return enableComponents_converter_.convert(enableComponents_.get(index));
}
/**
*
*
* <pre>
* Select components to collect logs. An empty set would disable all logging.
* </pre>
*
* <code>
* repeated .google.container.v1beta1.LoggingComponentConfig.Component enable_components = 1;
* </code>
*
* @return A list containing the enum numeric values on the wire for enableComponents.
*/
@java.lang.Override
public java.util.List<java.lang.Integer> getEnableComponentsValueList() {
return enableComponents_;
}
/**
*
*
* <pre>
* Select components to collect logs. An empty set would disable all logging.
* </pre>
*
* <code>
* repeated .google.container.v1beta1.LoggingComponentConfig.Component enable_components = 1;
* </code>
*
* @param index The index of the value to return.
* @return The enum numeric value on the wire of enableComponents at the given index.
*/
@java.lang.Override
public int getEnableComponentsValue(int index) {
return enableComponents_.get(index);
}
private int enableComponentsMemoizedSerializedSize;
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
getSerializedSize();
if (getEnableComponentsList().size() > 0) {
output.writeUInt32NoTag(10);
output.writeUInt32NoTag(enableComponentsMemoizedSerializedSize);
}
for (int i = 0; i < enableComponents_.size(); i++) {
output.writeEnumNoTag(enableComponents_.get(i));
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
{
int dataSize = 0;
for (int i = 0; i < enableComponents_.size(); i++) {
dataSize +=
com.google.protobuf.CodedOutputStream.computeEnumSizeNoTag(enableComponents_.get(i));
}
size += dataSize;
if (!getEnableComponentsList().isEmpty()) {
size += 1;
size += com.google.protobuf.CodedOutputStream.computeUInt32SizeNoTag(dataSize);
}
enableComponentsMemoizedSerializedSize = dataSize;
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.container.v1beta1.LoggingComponentConfig)) {
return super.equals(obj);
}
com.google.container.v1beta1.LoggingComponentConfig other =
(com.google.container.v1beta1.LoggingComponentConfig) obj;
if (!enableComponents_.equals(other.enableComponents_)) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (getEnableComponentsCount() > 0) {
hash = (37 * hash) + ENABLE_COMPONENTS_FIELD_NUMBER;
hash = (53 * hash) + enableComponents_.hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.container.v1beta1.LoggingComponentConfig parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.container.v1beta1.LoggingComponentConfig parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.container.v1beta1.LoggingComponentConfig parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.container.v1beta1.LoggingComponentConfig parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.container.v1beta1.LoggingComponentConfig parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.container.v1beta1.LoggingComponentConfig parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.container.v1beta1.LoggingComponentConfig parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.container.v1beta1.LoggingComponentConfig parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.container.v1beta1.LoggingComponentConfig parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.container.v1beta1.LoggingComponentConfig parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.container.v1beta1.LoggingComponentConfig parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.container.v1beta1.LoggingComponentConfig parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.container.v1beta1.LoggingComponentConfig prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* LoggingComponentConfig is cluster logging component configuration.
* </pre>
*
* Protobuf type {@code google.container.v1beta1.LoggingComponentConfig}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.container.v1beta1.LoggingComponentConfig)
com.google.container.v1beta1.LoggingComponentConfigOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.container.v1beta1.ClusterServiceProto
.internal_static_google_container_v1beta1_LoggingComponentConfig_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.container.v1beta1.ClusterServiceProto
.internal_static_google_container_v1beta1_LoggingComponentConfig_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.container.v1beta1.LoggingComponentConfig.class,
com.google.container.v1beta1.LoggingComponentConfig.Builder.class);
}
// Construct using com.google.container.v1beta1.LoggingComponentConfig.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
enableComponents_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000001);
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.container.v1beta1.ClusterServiceProto
.internal_static_google_container_v1beta1_LoggingComponentConfig_descriptor;
}
@java.lang.Override
public com.google.container.v1beta1.LoggingComponentConfig getDefaultInstanceForType() {
return com.google.container.v1beta1.LoggingComponentConfig.getDefaultInstance();
}
@java.lang.Override
public com.google.container.v1beta1.LoggingComponentConfig build() {
com.google.container.v1beta1.LoggingComponentConfig result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.container.v1beta1.LoggingComponentConfig buildPartial() {
com.google.container.v1beta1.LoggingComponentConfig result =
new com.google.container.v1beta1.LoggingComponentConfig(this);
buildPartialRepeatedFields(result);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartialRepeatedFields(
com.google.container.v1beta1.LoggingComponentConfig result) {
if (((bitField0_ & 0x00000001) != 0)) {
enableComponents_ = java.util.Collections.unmodifiableList(enableComponents_);
bitField0_ = (bitField0_ & ~0x00000001);
}
result.enableComponents_ = enableComponents_;
}
private void buildPartial0(com.google.container.v1beta1.LoggingComponentConfig result) {
int from_bitField0_ = bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.container.v1beta1.LoggingComponentConfig) {
return mergeFrom((com.google.container.v1beta1.LoggingComponentConfig) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.container.v1beta1.LoggingComponentConfig other) {
if (other == com.google.container.v1beta1.LoggingComponentConfig.getDefaultInstance())
return this;
if (!other.enableComponents_.isEmpty()) {
if (enableComponents_.isEmpty()) {
enableComponents_ = other.enableComponents_;
bitField0_ = (bitField0_ & ~0x00000001);
} else {
ensureEnableComponentsIsMutable();
enableComponents_.addAll(other.enableComponents_);
}
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 8:
{
int tmpRaw = input.readEnum();
ensureEnableComponentsIsMutable();
enableComponents_.add(tmpRaw);
break;
} // case 8
case 10:
{
int length = input.readRawVarint32();
int oldLimit = input.pushLimit(length);
while (input.getBytesUntilLimit() > 0) {
int tmpRaw = input.readEnum();
ensureEnableComponentsIsMutable();
enableComponents_.add(tmpRaw);
}
input.popLimit(oldLimit);
break;
} // case 10
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.util.List<java.lang.Integer> enableComponents_ = java.util.Collections.emptyList();
private void ensureEnableComponentsIsMutable() {
if (!((bitField0_ & 0x00000001) != 0)) {
enableComponents_ = new java.util.ArrayList<java.lang.Integer>(enableComponents_);
bitField0_ |= 0x00000001;
}
}
/**
*
*
* <pre>
* Select components to collect logs. An empty set would disable all logging.
* </pre>
*
* <code>
* repeated .google.container.v1beta1.LoggingComponentConfig.Component enable_components = 1;
* </code>
*
* @return A list containing the enableComponents.
*/
public java.util.List<com.google.container.v1beta1.LoggingComponentConfig.Component>
getEnableComponentsList() {
return new com.google.protobuf.Internal.ListAdapter<
java.lang.Integer, com.google.container.v1beta1.LoggingComponentConfig.Component>(
enableComponents_, enableComponents_converter_);
}
/**
*
*
* <pre>
* Select components to collect logs. An empty set would disable all logging.
* </pre>
*
* <code>
* repeated .google.container.v1beta1.LoggingComponentConfig.Component enable_components = 1;
* </code>
*
* @return The count of enableComponents.
*/
public int getEnableComponentsCount() {
return enableComponents_.size();
}
/**
*
*
* <pre>
* Select components to collect logs. An empty set would disable all logging.
* </pre>
*
* <code>
* repeated .google.container.v1beta1.LoggingComponentConfig.Component enable_components = 1;
* </code>
*
* @param index The index of the element to return.
* @return The enableComponents at the given index.
*/
public com.google.container.v1beta1.LoggingComponentConfig.Component getEnableComponents(
int index) {
return enableComponents_converter_.convert(enableComponents_.get(index));
}
/**
*
*
* <pre>
* Select components to collect logs. An empty set would disable all logging.
* </pre>
*
* <code>
* repeated .google.container.v1beta1.LoggingComponentConfig.Component enable_components = 1;
* </code>
*
* @param index The index to set the value at.
* @param value The enableComponents to set.
* @return This builder for chaining.
*/
public Builder setEnableComponents(
int index, com.google.container.v1beta1.LoggingComponentConfig.Component value) {
if (value == null) {
throw new NullPointerException();
}
ensureEnableComponentsIsMutable();
enableComponents_.set(index, value.getNumber());
onChanged();
return this;
}
/**
*
*
* <pre>
* Select components to collect logs. An empty set would disable all logging.
* </pre>
*
* <code>
* repeated .google.container.v1beta1.LoggingComponentConfig.Component enable_components = 1;
* </code>
*
* @param value The enableComponents to add.
* @return This builder for chaining.
*/
public Builder addEnableComponents(
com.google.container.v1beta1.LoggingComponentConfig.Component value) {
if (value == null) {
throw new NullPointerException();
}
ensureEnableComponentsIsMutable();
enableComponents_.add(value.getNumber());
onChanged();
return this;
}
/**
*
*
* <pre>
* Select components to collect logs. An empty set would disable all logging.
* </pre>
*
* <code>
* repeated .google.container.v1beta1.LoggingComponentConfig.Component enable_components = 1;
* </code>
*
* @param values The enableComponents to add.
* @return This builder for chaining.
*/
public Builder addAllEnableComponents(
java.lang.Iterable<? extends com.google.container.v1beta1.LoggingComponentConfig.Component>
values) {
ensureEnableComponentsIsMutable();
for (com.google.container.v1beta1.LoggingComponentConfig.Component value : values) {
enableComponents_.add(value.getNumber());
}
onChanged();
return this;
}
/**
*
*
* <pre>
* Select components to collect logs. An empty set would disable all logging.
* </pre>
*
* <code>
* repeated .google.container.v1beta1.LoggingComponentConfig.Component enable_components = 1;
* </code>
*
* @return This builder for chaining.
*/
public Builder clearEnableComponents() {
enableComponents_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
return this;
}
/**
*
*
* <pre>
* Select components to collect logs. An empty set would disable all logging.
* </pre>
*
* <code>
* repeated .google.container.v1beta1.LoggingComponentConfig.Component enable_components = 1;
* </code>
*
* @return A list containing the enum numeric values on the wire for enableComponents.
*/
public java.util.List<java.lang.Integer> getEnableComponentsValueList() {
return java.util.Collections.unmodifiableList(enableComponents_);
}
/**
*
*
* <pre>
* Select components to collect logs. An empty set would disable all logging.
* </pre>
*
* <code>
* repeated .google.container.v1beta1.LoggingComponentConfig.Component enable_components = 1;
* </code>
*
* @param index The index of the value to return.
* @return The enum numeric value on the wire of enableComponents at the given index.
*/
public int getEnableComponentsValue(int index) {
return enableComponents_.get(index);
}
/**
*
*
* <pre>
* Select components to collect logs. An empty set would disable all logging.
* </pre>
*
* <code>
* repeated .google.container.v1beta1.LoggingComponentConfig.Component enable_components = 1;
* </code>
*
* @param index The index to set the value at.
* @param value The enum numeric value on the wire for enableComponents to set.
* @return This builder for chaining.
*/
public Builder setEnableComponentsValue(int index, int value) {
ensureEnableComponentsIsMutable();
enableComponents_.set(index, value);
onChanged();
return this;
}
/**
*
*
* <pre>
* Select components to collect logs. An empty set would disable all logging.
* </pre>
*
* <code>
* repeated .google.container.v1beta1.LoggingComponentConfig.Component enable_components = 1;
* </code>
*
* @param value The enum numeric value on the wire for enableComponents to add.
* @return This builder for chaining.
*/
public Builder addEnableComponentsValue(int value) {
ensureEnableComponentsIsMutable();
enableComponents_.add(value);
onChanged();
return this;
}
/**
*
*
* <pre>
* Select components to collect logs. An empty set would disable all logging.
* </pre>
*
* <code>
* repeated .google.container.v1beta1.LoggingComponentConfig.Component enable_components = 1;
* </code>
*
* @param values The enum numeric values on the wire for enableComponents to add.
* @return This builder for chaining.
*/
public Builder addAllEnableComponentsValue(java.lang.Iterable<java.lang.Integer> values) {
ensureEnableComponentsIsMutable();
for (int value : values) {
enableComponents_.add(value);
}
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.container.v1beta1.LoggingComponentConfig)
}
// @@protoc_insertion_point(class_scope:google.container.v1beta1.LoggingComponentConfig)
private static final com.google.container.v1beta1.LoggingComponentConfig DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.container.v1beta1.LoggingComponentConfig();
}
public static com.google.container.v1beta1.LoggingComponentConfig getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<LoggingComponentConfig> PARSER =
new com.google.protobuf.AbstractParser<LoggingComponentConfig>() {
@java.lang.Override
public LoggingComponentConfig parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<LoggingComponentConfig> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<LoggingComponentConfig> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.container.v1beta1.LoggingComponentConfig getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
apache/fineract | 36,475 | fineract-charge/src/main/java/org/apache/fineract/portfolio/charge/domain/Charge.java | /**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.fineract.portfolio.charge.domain;
import jakarta.persistence.Column;
import jakarta.persistence.Entity;
import jakarta.persistence.FetchType;
import jakarta.persistence.JoinColumn;
import jakarta.persistence.ManyToOne;
import jakarta.persistence.Table;
import jakarta.persistence.UniqueConstraint;
import java.math.BigDecimal;
import java.time.MonthDay;
import java.util.ArrayList;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Locale;
import java.util.Map;
import java.util.Objects;
import org.apache.fineract.accounting.glaccount.data.GLAccountData;
import org.apache.fineract.accounting.glaccount.domain.GLAccount;
import org.apache.fineract.infrastructure.core.api.JsonCommand;
import org.apache.fineract.infrastructure.core.data.ApiParameterError;
import org.apache.fineract.infrastructure.core.data.DataValidatorBuilder;
import org.apache.fineract.infrastructure.core.data.EnumOptionData;
import org.apache.fineract.infrastructure.core.domain.AbstractPersistableCustom;
import org.apache.fineract.infrastructure.core.exception.PlatformApiDataValidationException;
import org.apache.fineract.infrastructure.core.service.DateUtils;
import org.apache.fineract.organisation.monetary.data.CurrencyData;
import org.apache.fineract.portfolio.charge.api.ChargesApiConstants;
import org.apache.fineract.portfolio.charge.data.ChargeData;
import org.apache.fineract.portfolio.charge.exception.ChargeDueAtDisbursementCannotBePenaltyException;
import org.apache.fineract.portfolio.charge.exception.ChargeMustBePenaltyException;
import org.apache.fineract.portfolio.charge.exception.ChargeParameterUpdateNotSupportedException;
import org.apache.fineract.portfolio.charge.service.ChargeEnumerations;
import org.apache.fineract.portfolio.common.domain.PeriodFrequencyType;
import org.apache.fineract.portfolio.paymenttype.data.PaymentTypeData;
import org.apache.fineract.portfolio.paymenttype.domain.PaymentType;
import org.apache.fineract.portfolio.tax.data.TaxGroupData;
import org.apache.fineract.portfolio.tax.domain.TaxGroup;
@Entity
@Table(name = "m_charge", uniqueConstraints = { @UniqueConstraint(columnNames = { "name" }, name = "name") })
public class Charge extends AbstractPersistableCustom<Long> {
@Column(name = "name", length = 100)
private String name;
@Column(name = "amount", scale = 6, precision = 19, nullable = false)
private BigDecimal amount;
@Column(name = "currency_code", length = 3)
private String currencyCode;
@Column(name = "charge_applies_to_enum", nullable = false)
private Integer chargeAppliesTo;
@Column(name = "charge_time_enum", nullable = false)
private Integer chargeTimeType;
@Column(name = "charge_calculation_enum")
private Integer chargeCalculation;
@Column(name = "charge_payment_mode_enum", nullable = true)
private Integer chargePaymentMode;
@Column(name = "fee_on_day", nullable = true)
private Integer feeOnDay;
@Column(name = "fee_interval", nullable = true)
private Integer feeInterval;
@Column(name = "fee_on_month", nullable = true)
private Integer feeOnMonth;
@Column(name = "is_penalty", nullable = false)
private boolean penalty;
@Column(name = "is_active", nullable = false)
private boolean active;
@Column(name = "is_deleted", nullable = false)
private boolean deleted = false;
@Column(name = "min_cap", scale = 6, precision = 19, nullable = true)
private BigDecimal minCap;
@Column(name = "max_cap", scale = 6, precision = 19, nullable = true)
private BigDecimal maxCap;
@Column(name = "fee_frequency", nullable = true)
private Integer feeFrequency;
@Column(name = "is_free_withdrawal", nullable = false)
private boolean enableFreeWithdrawal;
@Column(name = "free_withdrawal_charge_frequency", nullable = true)
private Integer freeWithdrawalFrequency;
@Column(name = "restart_frequency", nullable = true)
private Integer restartFrequency;
@Column(name = "restart_frequency_enum", nullable = true)
private Integer restartFrequencyEnum;
@Column(name = "is_payment_type", nullable = false)
private boolean enablePaymentType;
@ManyToOne
@JoinColumn(name = "payment_type_id", nullable = false)
private PaymentType paymentType;
@ManyToOne(fetch = FetchType.LAZY)
@JoinColumn(name = "income_or_liability_account_id")
private GLAccount account;
@ManyToOne
@JoinColumn(name = "tax_group_id")
private TaxGroup taxGroup;
public static Charge fromJson(final JsonCommand command, final GLAccount account, final TaxGroup taxGroup,
final PaymentType paymentType) {
final String name = command.stringValueOfParameterNamed("name");
final BigDecimal amount = command.bigDecimalValueOfParameterNamed("amount");
final String currencyCode = command.stringValueOfParameterNamed("currencyCode");
final ChargeAppliesTo chargeAppliesTo = ChargeAppliesTo.fromInt(command.integerValueOfParameterNamed("chargeAppliesTo"));
final ChargeTimeType chargeTimeType = ChargeTimeType.fromInt(command.integerValueOfParameterNamed("chargeTimeType"));
final ChargeCalculationType chargeCalculationType = ChargeCalculationType
.fromInt(command.integerValueOfParameterNamed("chargeCalculationType"));
final Integer chargePaymentMode = command.integerValueOfParameterNamed("chargePaymentMode");
final ChargePaymentMode paymentMode = chargePaymentMode == null ? null : ChargePaymentMode.fromInt(chargePaymentMode);
final boolean penalty = command.booleanPrimitiveValueOfParameterNamed("penalty");
final boolean active = command.booleanPrimitiveValueOfParameterNamed("active");
final MonthDay feeOnMonthDay = command.extractMonthDayNamed("feeOnMonthDay");
final Integer feeInterval = command.integerValueOfParameterNamed("feeInterval");
final BigDecimal minCap = command.bigDecimalValueOfParameterNamed("minCap");
final BigDecimal maxCap = command.bigDecimalValueOfParameterNamed("maxCap");
final Integer feeFrequency = command.integerValueOfParameterNamed("feeFrequency");
boolean enableFreeWithdrawalCharge = false;
enableFreeWithdrawalCharge = command.booleanPrimitiveValueOfParameterNamed("enableFreeWithdrawalCharge");
boolean enablePaymentType = false;
enablePaymentType = command.booleanPrimitiveValueOfParameterNamed("enablePaymentType");
Integer freeWithdrawalFrequency = null;
Integer restartCountFrequency = null;
PeriodFrequencyType countFrequencyType = null;
if (enableFreeWithdrawalCharge) {
freeWithdrawalFrequency = command.integerValueOfParameterNamed("freeWithdrawalFrequency");
restartCountFrequency = command.integerValueOfParameterNamed("restartCountFrequency");
countFrequencyType = PeriodFrequencyType.fromInt(command.integerValueOfParameterNamed("countFrequencyType"));
}
return new Charge(name, amount, currencyCode, chargeAppliesTo, chargeTimeType, chargeCalculationType, penalty, active, paymentMode,
feeOnMonthDay, feeInterval, minCap, maxCap, feeFrequency, enableFreeWithdrawalCharge, freeWithdrawalFrequency,
restartCountFrequency, countFrequencyType, account, taxGroup, enablePaymentType, paymentType);
}
protected Charge() {}
private Charge(final String name, final BigDecimal amount, final String currencyCode, final ChargeAppliesTo chargeAppliesTo,
final ChargeTimeType chargeTime, final ChargeCalculationType chargeCalculationType, final boolean penalty, final boolean active,
final ChargePaymentMode paymentMode, final MonthDay feeOnMonthDay, final Integer feeInterval, final BigDecimal minCap,
final BigDecimal maxCap, final Integer feeFrequency, final boolean enableFreeWithdrawalCharge,
final Integer freeWithdrawalFrequency, final Integer restartFrequency, final PeriodFrequencyType restartFrequencyEnum,
final GLAccount account, final TaxGroup taxGroup, final boolean enablePaymentType, final PaymentType paymentType) {
this.name = name;
this.amount = amount;
this.currencyCode = currencyCode;
this.chargeAppliesTo = chargeAppliesTo.getValue();
this.chargeTimeType = chargeTime.getValue();
this.chargeCalculation = chargeCalculationType.getValue();
this.penalty = penalty;
this.active = active;
this.account = account;
this.taxGroup = taxGroup;
this.chargePaymentMode = paymentMode == null ? null : paymentMode.getValue();
final List<ApiParameterError> dataValidationErrors = new ArrayList<>();
final DataValidatorBuilder baseDataValidator = new DataValidatorBuilder(dataValidationErrors).resource("charges");
if (isMonthlyFee() || isAnnualFee()) {
this.feeOnMonth = feeOnMonthDay.getMonthValue();
this.feeOnDay = feeOnMonthDay.getDayOfMonth();
}
this.feeInterval = feeInterval;
this.feeFrequency = feeFrequency;
if (isSavingsCharge()) {
// TODO vishwas, this validation seems unnecessary as identical
// validation is performed in the write service
if (!isAllowedSavingsChargeTime()) {
baseDataValidator.reset().parameter("chargeTimeType").value(this.chargeTimeType)
.failWithCodeNoParameterAddedToErrorCode("not.allowed.charge.time.for.savings");
}
// TODO vishwas, this validation seems unnecessary as identical
// validation is performed in the writeservice
if (!isAllowedSavingsChargeCalculationType()) {
baseDataValidator.reset().parameter("chargeCalculationType").value(this.chargeCalculation)
.failWithCodeNoParameterAddedToErrorCode("not.allowed.charge.calculation.type.for.savings");
}
if (!(ChargeTimeType.fromInt(getChargeTimeType()).isWithdrawalFee()
|| ChargeTimeType.fromInt(getChargeTimeType()).isSavingsNoActivityFee())
&& ChargeCalculationType.fromInt(getChargeCalculation()).isPercentageOfAmount()) {
baseDataValidator.reset().parameter("chargeCalculationType").value(this.chargeCalculation)
.failWithCodeNoParameterAddedToErrorCode(
"savings.charge.calculation.type.percentage.allowed.only.for.withdrawal.or.NoActivity");
}
if (enableFreeWithdrawalCharge) {
this.enableFreeWithdrawal = true;
this.freeWithdrawalFrequency = freeWithdrawalFrequency;
this.restartFrequency = restartFrequency;
this.restartFrequencyEnum = restartFrequencyEnum.getValue();
}
if (enablePaymentType) {
if (paymentType != null) {
this.enablePaymentType = true;
this.paymentType = paymentType;
}
}
} else if (isLoanCharge()) {
if (penalty && (chargeTime.isTimeOfDisbursement() || chargeTime.isTrancheDisbursement())) {
throw new ChargeDueAtDisbursementCannotBePenaltyException(name);
}
if (!penalty && chargeTime.isOverdueInstallment()) {
throw new ChargeMustBePenaltyException(name);
}
// TODO vishwas, this validation seems unnecessary as identical
// validation is performed in the write service
if (!isAllowedLoanChargeTime()) {
baseDataValidator.reset().parameter("chargeTimeType").value(this.chargeTimeType)
.failWithCodeNoParameterAddedToErrorCode("not.allowed.charge.time.for.loan");
}
}
if (isPercentageOfDisbursementAmount() || isPercentageOfApprovedAmount()) {
this.minCap = minCap;
this.maxCap = maxCap;
}
if (!dataValidationErrors.isEmpty()) {
throw new PlatformApiDataValidationException(dataValidationErrors);
}
}
public String getName() {
return this.name;
}
public BigDecimal getAmount() {
return this.amount;
}
public String getCurrencyCode() {
return this.currencyCode;
}
public Integer getChargeTimeType() {
return this.chargeTimeType;
}
public Integer getChargeCalculation() {
return this.chargeCalculation;
}
public boolean isActive() {
return this.active;
}
public boolean isPenalty() {
return this.penalty;
}
public boolean isDeleted() {
return this.deleted;
}
public boolean isLoanCharge() {
return ChargeAppliesTo.fromInt(this.chargeAppliesTo).isLoanCharge();
}
public boolean isAllowedLoanChargeTime() {
return ChargeTimeType.fromInt(this.chargeTimeType).isAllowedLoanChargeTime();
}
public boolean isAllowedClientChargeTime() {
return ChargeTimeType.fromInt(this.chargeTimeType).isAllowedClientChargeTime();
}
public boolean isSavingsCharge() {
return ChargeAppliesTo.fromInt(this.chargeAppliesTo).isSavingsCharge();
}
public boolean isClientCharge() {
return ChargeAppliesTo.fromInt(this.chargeAppliesTo).isClientCharge();
}
public boolean isAllowedSavingsChargeTime() {
return ChargeTimeType.fromInt(this.chargeTimeType).isAllowedSavingsChargeTime();
}
public boolean isAllowedSavingsChargeCalculationType() {
return ChargeCalculationType.fromInt(this.chargeCalculation).isAllowedSavingsChargeCalculationType();
}
public boolean isAllowedClientChargeCalculationType() {
return ChargeCalculationType.fromInt(this.chargeCalculation).isAllowedClientChargeCalculationType();
}
public boolean isPercentageOfApprovedAmount() {
return ChargeCalculationType.fromInt(this.chargeCalculation).isPercentageOfAmount();
}
public boolean isPercentageOfDisbursementAmount() {
return ChargeCalculationType.fromInt(this.chargeCalculation).isPercentageOfDisbursementAmount();
}
public BigDecimal getMinCap() {
return this.minCap;
}
public BigDecimal getMaxCap() {
return this.maxCap;
}
public boolean isEnableFreeWithdrawal() {
return this.enableFreeWithdrawal;
}
public boolean isEnablePaymentType() {
return this.enablePaymentType;
}
public Integer getFrequencyFreeWithdrawalCharge() {
return this.freeWithdrawalFrequency;
}
public Integer getRestartFrequency() {
return this.restartFrequency;
}
public Integer getRestartFrequencyEnum() {
return this.restartFrequencyEnum;
}
public PaymentType getPaymentType() {
return this.paymentType;
}
public void setPaymentType(PaymentType paymentType) {
this.paymentType = paymentType;
}
private Long getPaymentTypeId() {
Long paymentTypeId = null;
if (this.paymentType != null) {
paymentTypeId = this.paymentType.getId();
}
return paymentTypeId;
}
public Map<String, Object> update(final JsonCommand command) {
final Map<String, Object> actualChanges = new LinkedHashMap<>(7);
final Locale locale = command.extractLocale();
final List<ApiParameterError> dataValidationErrors = new ArrayList<>();
final DataValidatorBuilder baseDataValidator = new DataValidatorBuilder(dataValidationErrors).resource("charges");
final String nameParamName = "name";
if (command.isChangeInStringParameterNamed(nameParamName, this.name)) {
final String newValue = command.stringValueOfParameterNamed(nameParamName);
actualChanges.put(nameParamName, newValue);
this.name = newValue;
}
final String currencyCodeParamName = "currencyCode";
if (command.isChangeInStringParameterNamed(currencyCodeParamName, this.currencyCode)) {
final String newValue = command.stringValueOfParameterNamed(currencyCodeParamName);
actualChanges.put(currencyCodeParamName, newValue);
this.currencyCode = newValue;
}
final String amountParamName = "amount";
if (command.isChangeInBigDecimalParameterNamed(amountParamName, this.amount)) {
final BigDecimal newValue = command.bigDecimalValueOfParameterNamed(amountParamName, locale);
actualChanges.put(amountParamName, newValue);
actualChanges.put("locale", locale.getLanguage());
this.amount = newValue;
}
final String chargeTimeParamName = "chargeTimeType";
if (command.isChangeInIntegerParameterNamed(chargeTimeParamName, this.chargeTimeType)) {
final Integer newValue = command.integerValueOfParameterNamed(chargeTimeParamName);
actualChanges.put(chargeTimeParamName, newValue);
actualChanges.put("locale", locale.getLanguage());
this.chargeTimeType = ChargeTimeType.fromInt(newValue).getValue();
if (isSavingsCharge()) {
if (!isAllowedSavingsChargeTime()) {
baseDataValidator.reset().parameter("chargeTimeType").value(this.chargeTimeType)
.failWithCodeNoParameterAddedToErrorCode("not.allowed.charge.time.for.savings");
}
// if charge time is changed to monthly then validate for
// feeOnMonthDay and feeInterval
if (isMonthlyFee()) {
final MonthDay monthDay = command.extractMonthDayNamed("feeOnMonthDay");
baseDataValidator.reset().parameter("feeOnMonthDay").value(monthDay).notNull();
final Integer feeInterval = command.integerValueOfParameterNamed("feeInterval");
baseDataValidator.reset().parameter("feeInterval").value(feeInterval).notNull().inMinMaxRange(1, 12);
}
} else if (isLoanCharge()) {
if (!isAllowedLoanChargeTime()) {
baseDataValidator.reset().parameter("chargeTimeType").value(this.chargeTimeType)
.failWithCodeNoParameterAddedToErrorCode("not.allowed.charge.time.for.loan");
}
} else if (isClientCharge()) {
if (!isAllowedLoanChargeTime()) {
baseDataValidator.reset().parameter("chargeTimeType").value(this.chargeTimeType)
.failWithCodeNoParameterAddedToErrorCode("not.allowed.charge.time.for.client");
}
}
}
final String freeWithdrawalFrequencyParamName = "freeWithdrawalFrequency";
if (command.isChangeInIntegerParameterNamed(freeWithdrawalFrequencyParamName, this.freeWithdrawalFrequency)) {
final Integer enableFreeWithdrawalChargeNewValue = command.integerValueOfParameterNamed(freeWithdrawalFrequencyParamName);
actualChanges.put(freeWithdrawalFrequencyParamName, enableFreeWithdrawalChargeNewValue);
this.freeWithdrawalFrequency = enableFreeWithdrawalChargeNewValue;
}
final String restartCountFrequencyParamName = "restartCountFrequency";
if (command.isChangeInIntegerParameterNamed(restartCountFrequencyParamName, this.restartFrequency)) {
final Integer restartCountFrequencyNewValue = command.integerValueOfParameterNamed(restartCountFrequencyParamName);
actualChanges.put(restartCountFrequencyParamName, restartCountFrequencyNewValue);
this.restartFrequency = restartCountFrequencyNewValue;
}
final String countFrequencyTypeParamName = "countFrequencyType";
if (command.isChangeInIntegerParameterNamed(countFrequencyTypeParamName, this.restartFrequencyEnum)) {
final Integer countFrequencyTypeNewValue = command.integerValueOfParameterNamed(countFrequencyTypeParamName);
actualChanges.put(countFrequencyTypeParamName, countFrequencyTypeNewValue);
this.restartFrequencyEnum = ChargeTimeType.fromInt(countFrequencyTypeNewValue).getValue();
}
final String enableFreeWithdrawalChargeParamName = "enableFreeWithdrawalCharge";
if (command.isChangeInBooleanParameterNamed(enableFreeWithdrawalChargeParamName, this.enableFreeWithdrawal)) {
final boolean newValue = command.booleanPrimitiveValueOfParameterNamed(enableFreeWithdrawalChargeParamName);
actualChanges.put(enableFreeWithdrawalChargeParamName, newValue);
this.enableFreeWithdrawal = newValue;
}
final String enablePaymentTypeParamName = "enablePaymentType";
if (command.isChangeInBooleanParameterNamed(enablePaymentTypeParamName, this.enablePaymentType)) {
final boolean newValue = command.booleanPrimitiveValueOfParameterNamed(enablePaymentTypeParamName);
actualChanges.put(enablePaymentTypeParamName, newValue);
this.enablePaymentType = newValue;
}
final String paymentTypeParamName = "paymentTypeId";
if (command.isChangeInLongParameterNamed(paymentTypeParamName, getPaymentTypeId())) {
final Long newValue = command.longValueOfParameterNamed(paymentTypeParamName);
actualChanges.put(paymentTypeParamName, newValue);
}
final String chargeAppliesToParamName = "chargeAppliesTo";
if (command.isChangeInIntegerParameterNamed(chargeAppliesToParamName, this.chargeAppliesTo)) {
/*
* final Integer newValue = command.integerValueOfParameterNamed(chargeAppliesToParamName);
* actualChanges.put(chargeAppliesToParamName, newValue); actualChanges.put("locale", localeAsInput);
* this.chargeAppliesTo = ChargeAppliesTo.fromInt(newValue).getValue();
*/
// AA: Do not allow to change chargeAppliesTo.
final String errorMessage = "Update of Charge applies to is not supported";
throw new ChargeParameterUpdateNotSupportedException("charge.applies.to", errorMessage);
}
final String chargeCalculationParamName = "chargeCalculationType";
if (command.isChangeInIntegerParameterNamed(chargeCalculationParamName, this.chargeCalculation)) {
final Integer newValue = command.integerValueOfParameterNamed(chargeCalculationParamName);
actualChanges.put(chargeCalculationParamName, newValue);
actualChanges.put("locale", locale.getLanguage());
this.chargeCalculation = ChargeCalculationType.fromInt(newValue).getValue();
if (isSavingsCharge()) {
if (!isAllowedSavingsChargeCalculationType()) {
baseDataValidator.reset().parameter("chargeCalculationType").value(this.chargeCalculation)
.failWithCodeNoParameterAddedToErrorCode("not.allowed.charge.calculation.type.for.savings");
}
if (!(ChargeTimeType.fromInt(getChargeTimeType()).isWithdrawalFee()
|| ChargeTimeType.fromInt(getChargeTimeType()).isSavingsNoActivityFee())
&& ChargeCalculationType.fromInt(getChargeCalculation()).isPercentageOfAmount()) {
baseDataValidator.reset().parameter("chargeCalculationType").value(this.chargeCalculation)
.failWithCodeNoParameterAddedToErrorCode(
"charge.calculation.type.percentage.allowed.only.for.withdrawal.or.noactivity");
}
} else if (isClientCharge()) {
if (!isAllowedClientChargeCalculationType()) {
baseDataValidator.reset().parameter("chargeCalculationType").value(this.chargeCalculation)
.failWithCodeNoParameterAddedToErrorCode("not.allowed.charge.calculation.type.for.client");
}
}
}
// validate only for loan charge
if (isLoanCharge()) {
final String paymentModeParamName = "chargePaymentMode";
if (command.isChangeInIntegerParameterNamed(paymentModeParamName, this.chargePaymentMode)) {
final Integer newValue = command.integerValueOfParameterNamed(paymentModeParamName);
actualChanges.put(paymentModeParamName, newValue);
actualChanges.put("locale", locale.getLanguage());
this.chargePaymentMode = ChargePaymentMode.fromInt(newValue).getValue();
}
}
if (command.hasParameter("feeOnMonthDay")) {
final MonthDay monthDay = command.extractMonthDayNamed("feeOnMonthDay");
final String actualValueEntered = command.stringValueOfParameterNamed("feeOnMonthDay");
final Integer dayOfMonthValue = monthDay.getDayOfMonth();
if (!this.feeOnDay.equals(dayOfMonthValue)) {
actualChanges.put("feeOnMonthDay", actualValueEntered);
actualChanges.put("locale", locale.getLanguage());
this.feeOnDay = dayOfMonthValue;
}
final Integer monthOfYear = monthDay.getMonthValue();
if (!this.feeOnMonth.equals(monthOfYear)) {
actualChanges.put("feeOnMonthDay", actualValueEntered);
actualChanges.put("locale", locale.getLanguage());
this.feeOnMonth = monthOfYear;
}
}
final String feeInterval = "feeInterval";
if (command.isChangeInIntegerParameterNamed(feeInterval, this.feeInterval)) {
final Integer newValue = command.integerValueOfParameterNamed(feeInterval);
actualChanges.put(feeInterval, newValue);
actualChanges.put("locale", locale.getLanguage());
this.feeInterval = newValue;
}
final String feeFrequency = "feeFrequency";
if (command.isChangeInIntegerParameterNamed(feeFrequency, this.feeFrequency)) {
final Integer newValue = command.integerValueOfParameterNamed(feeFrequency);
actualChanges.put(feeFrequency, newValue);
actualChanges.put("locale", locale.getLanguage());
this.feeFrequency = newValue;
}
if (this.feeFrequency != null) {
baseDataValidator.reset().parameter("feeInterval").value(this.feeInterval).notNull();
}
final String penaltyParamName = "penalty";
if (command.isChangeInBooleanParameterNamed(penaltyParamName, this.penalty)) {
final boolean newValue = command.booleanPrimitiveValueOfParameterNamed(penaltyParamName);
actualChanges.put(penaltyParamName, newValue);
this.penalty = newValue;
}
final String activeParamName = "active";
if (command.isChangeInBooleanParameterNamed(activeParamName, this.active)) {
final boolean newValue = command.booleanPrimitiveValueOfParameterNamed(activeParamName);
actualChanges.put(activeParamName, newValue);
this.active = newValue;
}
// allow min and max cap to be only added to PERCENT_OF_AMOUNT for now
if (isPercentageOfApprovedAmount()) {
final String minCapParamName = "minCap";
if (command.isChangeInBigDecimalParameterNamed(minCapParamName, this.minCap)) {
final BigDecimal newValue = command.bigDecimalValueOfParameterNamed(minCapParamName);
actualChanges.put(minCapParamName, newValue);
actualChanges.put("locale", locale.getLanguage());
this.minCap = newValue;
}
final String maxCapParamName = "maxCap";
if (command.isChangeInBigDecimalParameterNamed(maxCapParamName, this.maxCap)) {
final BigDecimal newValue = command.bigDecimalValueOfParameterNamed(maxCapParamName);
actualChanges.put(maxCapParamName, newValue);
actualChanges.put("locale", locale.getLanguage());
this.maxCap = newValue;
}
}
if (this.penalty && ChargeTimeType.fromInt(this.chargeTimeType).isTimeOfDisbursement()) {
throw new ChargeDueAtDisbursementCannotBePenaltyException(this.name);
}
if (!penalty && ChargeTimeType.fromInt(this.chargeTimeType).isOverdueInstallment()) {
throw new ChargeMustBePenaltyException(name);
}
if (command.isChangeInLongParameterNamed(ChargesApiConstants.glAccountIdParamName, getIncomeAccountId())) {
final Long newValue = command.longValueOfParameterNamed(ChargesApiConstants.glAccountIdParamName);
actualChanges.put(ChargesApiConstants.glAccountIdParamName, newValue);
}
if (command.isChangeInLongParameterNamed(ChargesApiConstants.taxGroupIdParamName, getTaxGroupId())) {
final Long newValue = command.longValueOfParameterNamed(ChargesApiConstants.taxGroupIdParamName);
actualChanges.put(ChargesApiConstants.taxGroupIdParamName, newValue);
if (taxGroup != null) {
baseDataValidator.reset().parameter(ChargesApiConstants.taxGroupIdParamName).failWithCode("modification.not.supported");
}
}
if (!dataValidationErrors.isEmpty()) {
throw new PlatformApiDataValidationException(dataValidationErrors);
}
return actualChanges;
}
/**
* Delete is a <i>soft delete</i>. Updates flag on charge so it wont appear in query/report results.
*
* Any fields with unique constraints and prepended with id of record.
*/
public void delete() {
this.deleted = true;
this.name = getId() + "_" + this.name;
}
public ChargeData toData() {
final EnumOptionData chargeTimeType = ChargeEnumerations.chargeTimeType(this.chargeTimeType);
final EnumOptionData chargeAppliesTo = ChargeEnumerations.chargeAppliesTo(this.chargeAppliesTo);
final EnumOptionData chargeCalculationType = ChargeEnumerations.chargeCalculationType(this.chargeCalculation);
final EnumOptionData chargePaymentMode = ChargeEnumerations.chargePaymentMode(this.chargePaymentMode);
EnumOptionData feeFrequencyType = null;
if (this.feeFrequency != null) {
feeFrequencyType = ChargeEnumerations.feeFrequencyType(this.feeFrequency);
}
GLAccountData accountData = null;
if (account != null) {
accountData = new GLAccountData().setId(account.getId()).setName(account.getName()).setGlCode(account.getGlCode());
}
TaxGroupData taxGroupData = null;
if (this.taxGroup != null) {
taxGroupData = TaxGroupData.lookup(taxGroup.getId(), taxGroup.getName());
}
PaymentTypeData paymentTypeData = null;
if (this.paymentType != null) {
paymentTypeData = PaymentTypeData.instance(paymentType.getId(), paymentType.getName());
}
final CurrencyData currency = new CurrencyData(this.currencyCode, null, 0, 0, null, null);
return ChargeData.builder().id(getId()).name(this.name).amount(this.amount).currency(currency).chargeTimeType(chargeTimeType)
.chargeAppliesTo(chargeAppliesTo).chargeCalculationType(chargeCalculationType).chargePaymentMode(chargePaymentMode)
.feeOnMonthDay(getFeeOnMonthDay()).feeInterval(this.feeInterval).penalty(this.penalty).active(this.active)
.freeWithdrawal(this.enableFreeWithdrawal).freeWithdrawalChargeFrequency(this.freeWithdrawalFrequency)
.restartFrequency(this.restartFrequency).restartFrequencyEnum(this.restartFrequencyEnum)
.isPaymentType(this.enablePaymentType).paymentTypeOptions(paymentTypeData).minCap(this.minCap).maxCap(this.maxCap)
.feeFrequency(feeFrequencyType).incomeOrLiabilityAccount(accountData).taxGroup(taxGroupData).build();
}
public Integer getChargePaymentMode() {
return this.chargePaymentMode;
}
public Integer getFeeInterval() {
return this.feeInterval;
}
public boolean isMonthlyFee() {
return ChargeTimeType.fromInt(this.chargeTimeType).isMonthlyFee();
}
public boolean isAnnualFee() {
return ChargeTimeType.fromInt(this.chargeTimeType).isAnnualFee();
}
public boolean isOverdueInstallment() {
return ChargeTimeType.fromInt(this.chargeTimeType).isOverdueInstallment();
}
public MonthDay getFeeOnMonthDay() {
MonthDay feeOnMonthDay = null;
if (this.feeOnDay != null && this.feeOnMonth != null) {
feeOnMonthDay = MonthDay.now(DateUtils.getDateTimeZoneOfTenant()).withMonth(this.feeOnMonth).withDayOfMonth(this.feeOnDay);
}
return feeOnMonthDay;
}
public Integer feeInterval() {
return this.feeInterval;
}
public Integer feeFrequency() {
return this.feeFrequency;
}
public GLAccount getAccount() {
return this.account;
}
public void setAccount(GLAccount account) {
this.account = account;
}
public Long getIncomeAccountId() {
Long incomeAccountId = null;
if (this.account != null) {
incomeAccountId = this.account.getId();
}
return incomeAccountId;
}
private Long getTaxGroupId() {
Long taxGroupId = null;
if (this.taxGroup != null) {
taxGroupId = this.taxGroup.getId();
}
return taxGroupId;
}
public boolean isDisbursementCharge() {
return ChargeTimeType.fromInt(this.chargeTimeType).equals(ChargeTimeType.DISBURSEMENT)
|| ChargeTimeType.fromInt(this.chargeTimeType).equals(ChargeTimeType.TRANCHE_DISBURSEMENT);
}
public TaxGroup getTaxGroup() {
return this.taxGroup;
}
public void setTaxGroup(TaxGroup taxGroup) {
this.taxGroup = taxGroup;
}
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (!(o instanceof Charge)) {
return false;
}
Charge other = (Charge) o;
return Objects.equals(name, other.name) && Objects.equals(amount, other.amount) && Objects.equals(currencyCode, other.currencyCode)
&& Objects.equals(chargeAppliesTo, other.chargeAppliesTo) && Objects.equals(chargeTimeType, other.chargeTimeType)
&& Objects.equals(chargeCalculation, other.chargeCalculation) && Objects.equals(chargePaymentMode, other.chargePaymentMode)
&& Objects.equals(feeOnDay, other.feeOnDay) && Objects.equals(feeInterval, other.feeInterval)
&& Objects.equals(feeOnMonth, other.feeOnMonth) && penalty == other.penalty && active == other.active
&& deleted == other.deleted && Objects.equals(minCap, other.minCap) && Objects.equals(maxCap, other.maxCap)
&& Objects.equals(feeFrequency, other.feeFrequency) && Objects.equals(account, other.account)
&& Objects.equals(taxGroup, other.taxGroup);
}
@Override
public int hashCode() {
return Objects.hash(name, amount, currencyCode, chargeAppliesTo, chargeTimeType, chargeCalculation, chargePaymentMode, feeOnDay,
feeInterval, feeOnMonth, penalty, active, deleted, minCap, maxCap, feeFrequency, account, taxGroup);
}
}
|
googleapis/google-cloud-java | 36,227 | java-aiplatform/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/CometSpec.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/aiplatform/v1beta1/evaluation_service.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.aiplatform.v1beta1;
/**
*
*
* <pre>
* Spec for Comet metric.
* </pre>
*
* Protobuf type {@code google.cloud.aiplatform.v1beta1.CometSpec}
*/
public final class CometSpec extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.aiplatform.v1beta1.CometSpec)
CometSpecOrBuilder {
private static final long serialVersionUID = 0L;
// Use CometSpec.newBuilder() to construct.
private CometSpec(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private CometSpec() {
version_ = 0;
sourceLanguage_ = "";
targetLanguage_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new CometSpec();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.aiplatform.v1beta1.EvaluationServiceProto
.internal_static_google_cloud_aiplatform_v1beta1_CometSpec_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.aiplatform.v1beta1.EvaluationServiceProto
.internal_static_google_cloud_aiplatform_v1beta1_CometSpec_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.aiplatform.v1beta1.CometSpec.class,
com.google.cloud.aiplatform.v1beta1.CometSpec.Builder.class);
}
/**
*
*
* <pre>
* Comet version options.
* </pre>
*
* Protobuf enum {@code google.cloud.aiplatform.v1beta1.CometSpec.CometVersion}
*/
public enum CometVersion implements com.google.protobuf.ProtocolMessageEnum {
/**
*
*
* <pre>
* Comet version unspecified.
* </pre>
*
* <code>COMET_VERSION_UNSPECIFIED = 0;</code>
*/
COMET_VERSION_UNSPECIFIED(0),
/**
*
*
* <pre>
* Comet 22 for translation + source + reference
* (source-reference-combined).
* </pre>
*
* <code>COMET_22_SRC_REF = 2;</code>
*/
COMET_22_SRC_REF(2),
UNRECOGNIZED(-1),
;
/**
*
*
* <pre>
* Comet version unspecified.
* </pre>
*
* <code>COMET_VERSION_UNSPECIFIED = 0;</code>
*/
public static final int COMET_VERSION_UNSPECIFIED_VALUE = 0;
/**
*
*
* <pre>
* Comet 22 for translation + source + reference
* (source-reference-combined).
* </pre>
*
* <code>COMET_22_SRC_REF = 2;</code>
*/
public static final int COMET_22_SRC_REF_VALUE = 2;
public final int getNumber() {
if (this == UNRECOGNIZED) {
throw new java.lang.IllegalArgumentException(
"Can't get the number of an unknown enum value.");
}
return value;
}
/**
* @param value The numeric wire value of the corresponding enum entry.
* @return The enum associated with the given numeric wire value.
* @deprecated Use {@link #forNumber(int)} instead.
*/
@java.lang.Deprecated
public static CometVersion valueOf(int value) {
return forNumber(value);
}
/**
* @param value The numeric wire value of the corresponding enum entry.
* @return The enum associated with the given numeric wire value.
*/
public static CometVersion forNumber(int value) {
switch (value) {
case 0:
return COMET_VERSION_UNSPECIFIED;
case 2:
return COMET_22_SRC_REF;
default:
return null;
}
}
public static com.google.protobuf.Internal.EnumLiteMap<CometVersion> internalGetValueMap() {
return internalValueMap;
}
private static final com.google.protobuf.Internal.EnumLiteMap<CometVersion> internalValueMap =
new com.google.protobuf.Internal.EnumLiteMap<CometVersion>() {
public CometVersion findValueByNumber(int number) {
return CometVersion.forNumber(number);
}
};
public final com.google.protobuf.Descriptors.EnumValueDescriptor getValueDescriptor() {
if (this == UNRECOGNIZED) {
throw new java.lang.IllegalStateException(
"Can't get the descriptor of an unrecognized enum value.");
}
return getDescriptor().getValues().get(ordinal());
}
public final com.google.protobuf.Descriptors.EnumDescriptor getDescriptorForType() {
return getDescriptor();
}
public static final com.google.protobuf.Descriptors.EnumDescriptor getDescriptor() {
return com.google.cloud.aiplatform.v1beta1.CometSpec.getDescriptor().getEnumTypes().get(0);
}
private static final CometVersion[] VALUES = values();
public static CometVersion valueOf(com.google.protobuf.Descriptors.EnumValueDescriptor desc) {
if (desc.getType() != getDescriptor()) {
throw new java.lang.IllegalArgumentException("EnumValueDescriptor is not for this type.");
}
if (desc.getIndex() == -1) {
return UNRECOGNIZED;
}
return VALUES[desc.getIndex()];
}
private final int value;
private CometVersion(int value) {
this.value = value;
}
// @@protoc_insertion_point(enum_scope:google.cloud.aiplatform.v1beta1.CometSpec.CometVersion)
}
private int bitField0_;
public static final int VERSION_FIELD_NUMBER = 1;
private int version_ = 0;
/**
*
*
* <pre>
* Required. Which version to use for evaluation.
* </pre>
*
* <code>
* optional .google.cloud.aiplatform.v1beta1.CometSpec.CometVersion version = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the version field is set.
*/
@java.lang.Override
public boolean hasVersion() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* Required. Which version to use for evaluation.
* </pre>
*
* <code>
* optional .google.cloud.aiplatform.v1beta1.CometSpec.CometVersion version = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The enum numeric value on the wire for version.
*/
@java.lang.Override
public int getVersionValue() {
return version_;
}
/**
*
*
* <pre>
* Required. Which version to use for evaluation.
* </pre>
*
* <code>
* optional .google.cloud.aiplatform.v1beta1.CometSpec.CometVersion version = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The version.
*/
@java.lang.Override
public com.google.cloud.aiplatform.v1beta1.CometSpec.CometVersion getVersion() {
com.google.cloud.aiplatform.v1beta1.CometSpec.CometVersion result =
com.google.cloud.aiplatform.v1beta1.CometSpec.CometVersion.forNumber(version_);
return result == null
? com.google.cloud.aiplatform.v1beta1.CometSpec.CometVersion.UNRECOGNIZED
: result;
}
public static final int SOURCE_LANGUAGE_FIELD_NUMBER = 2;
@SuppressWarnings("serial")
private volatile java.lang.Object sourceLanguage_ = "";
/**
*
*
* <pre>
* Optional. Source language in BCP-47 format.
* </pre>
*
* <code>string source_language = 2 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The sourceLanguage.
*/
@java.lang.Override
public java.lang.String getSourceLanguage() {
java.lang.Object ref = sourceLanguage_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
sourceLanguage_ = s;
return s;
}
}
/**
*
*
* <pre>
* Optional. Source language in BCP-47 format.
* </pre>
*
* <code>string source_language = 2 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The bytes for sourceLanguage.
*/
@java.lang.Override
public com.google.protobuf.ByteString getSourceLanguageBytes() {
java.lang.Object ref = sourceLanguage_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
sourceLanguage_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int TARGET_LANGUAGE_FIELD_NUMBER = 3;
@SuppressWarnings("serial")
private volatile java.lang.Object targetLanguage_ = "";
/**
*
*
* <pre>
* Optional. Target language in BCP-47 format. Covers both prediction and
* reference.
* </pre>
*
* <code>string target_language = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The targetLanguage.
*/
@java.lang.Override
public java.lang.String getTargetLanguage() {
java.lang.Object ref = targetLanguage_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
targetLanguage_ = s;
return s;
}
}
/**
*
*
* <pre>
* Optional. Target language in BCP-47 format. Covers both prediction and
* reference.
* </pre>
*
* <code>string target_language = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The bytes for targetLanguage.
*/
@java.lang.Override
public com.google.protobuf.ByteString getTargetLanguageBytes() {
java.lang.Object ref = targetLanguage_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
targetLanguage_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (((bitField0_ & 0x00000001) != 0)) {
output.writeEnum(1, version_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(sourceLanguage_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 2, sourceLanguage_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(targetLanguage_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 3, targetLanguage_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) != 0)) {
size += com.google.protobuf.CodedOutputStream.computeEnumSize(1, version_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(sourceLanguage_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, sourceLanguage_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(targetLanguage_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(3, targetLanguage_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.aiplatform.v1beta1.CometSpec)) {
return super.equals(obj);
}
com.google.cloud.aiplatform.v1beta1.CometSpec other =
(com.google.cloud.aiplatform.v1beta1.CometSpec) obj;
if (hasVersion() != other.hasVersion()) return false;
if (hasVersion()) {
if (version_ != other.version_) return false;
}
if (!getSourceLanguage().equals(other.getSourceLanguage())) return false;
if (!getTargetLanguage().equals(other.getTargetLanguage())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (hasVersion()) {
hash = (37 * hash) + VERSION_FIELD_NUMBER;
hash = (53 * hash) + version_;
}
hash = (37 * hash) + SOURCE_LANGUAGE_FIELD_NUMBER;
hash = (53 * hash) + getSourceLanguage().hashCode();
hash = (37 * hash) + TARGET_LANGUAGE_FIELD_NUMBER;
hash = (53 * hash) + getTargetLanguage().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.aiplatform.v1beta1.CometSpec parseFrom(java.nio.ByteBuffer data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.aiplatform.v1beta1.CometSpec parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.aiplatform.v1beta1.CometSpec parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.aiplatform.v1beta1.CometSpec parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.aiplatform.v1beta1.CometSpec parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.aiplatform.v1beta1.CometSpec parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.aiplatform.v1beta1.CometSpec parseFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.aiplatform.v1beta1.CometSpec parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.aiplatform.v1beta1.CometSpec parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.aiplatform.v1beta1.CometSpec parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.aiplatform.v1beta1.CometSpec parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.aiplatform.v1beta1.CometSpec parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.cloud.aiplatform.v1beta1.CometSpec prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Spec for Comet metric.
* </pre>
*
* Protobuf type {@code google.cloud.aiplatform.v1beta1.CometSpec}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.aiplatform.v1beta1.CometSpec)
com.google.cloud.aiplatform.v1beta1.CometSpecOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.aiplatform.v1beta1.EvaluationServiceProto
.internal_static_google_cloud_aiplatform_v1beta1_CometSpec_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.aiplatform.v1beta1.EvaluationServiceProto
.internal_static_google_cloud_aiplatform_v1beta1_CometSpec_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.aiplatform.v1beta1.CometSpec.class,
com.google.cloud.aiplatform.v1beta1.CometSpec.Builder.class);
}
// Construct using com.google.cloud.aiplatform.v1beta1.CometSpec.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
version_ = 0;
sourceLanguage_ = "";
targetLanguage_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.aiplatform.v1beta1.EvaluationServiceProto
.internal_static_google_cloud_aiplatform_v1beta1_CometSpec_descriptor;
}
@java.lang.Override
public com.google.cloud.aiplatform.v1beta1.CometSpec getDefaultInstanceForType() {
return com.google.cloud.aiplatform.v1beta1.CometSpec.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.aiplatform.v1beta1.CometSpec build() {
com.google.cloud.aiplatform.v1beta1.CometSpec result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.aiplatform.v1beta1.CometSpec buildPartial() {
com.google.cloud.aiplatform.v1beta1.CometSpec result =
new com.google.cloud.aiplatform.v1beta1.CometSpec(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(com.google.cloud.aiplatform.v1beta1.CometSpec result) {
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.version_ = version_;
to_bitField0_ |= 0x00000001;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.sourceLanguage_ = sourceLanguage_;
}
if (((from_bitField0_ & 0x00000004) != 0)) {
result.targetLanguage_ = targetLanguage_;
}
result.bitField0_ |= to_bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.aiplatform.v1beta1.CometSpec) {
return mergeFrom((com.google.cloud.aiplatform.v1beta1.CometSpec) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.aiplatform.v1beta1.CometSpec other) {
if (other == com.google.cloud.aiplatform.v1beta1.CometSpec.getDefaultInstance()) return this;
if (other.hasVersion()) {
setVersion(other.getVersion());
}
if (!other.getSourceLanguage().isEmpty()) {
sourceLanguage_ = other.sourceLanguage_;
bitField0_ |= 0x00000002;
onChanged();
}
if (!other.getTargetLanguage().isEmpty()) {
targetLanguage_ = other.targetLanguage_;
bitField0_ |= 0x00000004;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 8:
{
version_ = input.readEnum();
bitField0_ |= 0x00000001;
break;
} // case 8
case 18:
{
sourceLanguage_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000002;
break;
} // case 18
case 26:
{
targetLanguage_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000004;
break;
} // case 26
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private int version_ = 0;
/**
*
*
* <pre>
* Required. Which version to use for evaluation.
* </pre>
*
* <code>
* optional .google.cloud.aiplatform.v1beta1.CometSpec.CometVersion version = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the version field is set.
*/
@java.lang.Override
public boolean hasVersion() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* Required. Which version to use for evaluation.
* </pre>
*
* <code>
* optional .google.cloud.aiplatform.v1beta1.CometSpec.CometVersion version = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The enum numeric value on the wire for version.
*/
@java.lang.Override
public int getVersionValue() {
return version_;
}
/**
*
*
* <pre>
* Required. Which version to use for evaluation.
* </pre>
*
* <code>
* optional .google.cloud.aiplatform.v1beta1.CometSpec.CometVersion version = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @param value The enum numeric value on the wire for version to set.
* @return This builder for chaining.
*/
public Builder setVersionValue(int value) {
version_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. Which version to use for evaluation.
* </pre>
*
* <code>
* optional .google.cloud.aiplatform.v1beta1.CometSpec.CometVersion version = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The version.
*/
@java.lang.Override
public com.google.cloud.aiplatform.v1beta1.CometSpec.CometVersion getVersion() {
com.google.cloud.aiplatform.v1beta1.CometSpec.CometVersion result =
com.google.cloud.aiplatform.v1beta1.CometSpec.CometVersion.forNumber(version_);
return result == null
? com.google.cloud.aiplatform.v1beta1.CometSpec.CometVersion.UNRECOGNIZED
: result;
}
/**
*
*
* <pre>
* Required. Which version to use for evaluation.
* </pre>
*
* <code>
* optional .google.cloud.aiplatform.v1beta1.CometSpec.CometVersion version = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @param value The version to set.
* @return This builder for chaining.
*/
public Builder setVersion(com.google.cloud.aiplatform.v1beta1.CometSpec.CometVersion value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000001;
version_ = value.getNumber();
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. Which version to use for evaluation.
* </pre>
*
* <code>
* optional .google.cloud.aiplatform.v1beta1.CometSpec.CometVersion version = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return This builder for chaining.
*/
public Builder clearVersion() {
bitField0_ = (bitField0_ & ~0x00000001);
version_ = 0;
onChanged();
return this;
}
private java.lang.Object sourceLanguage_ = "";
/**
*
*
* <pre>
* Optional. Source language in BCP-47 format.
* </pre>
*
* <code>string source_language = 2 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The sourceLanguage.
*/
public java.lang.String getSourceLanguage() {
java.lang.Object ref = sourceLanguage_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
sourceLanguage_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Optional. Source language in BCP-47 format.
* </pre>
*
* <code>string source_language = 2 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The bytes for sourceLanguage.
*/
public com.google.protobuf.ByteString getSourceLanguageBytes() {
java.lang.Object ref = sourceLanguage_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
sourceLanguage_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Optional. Source language in BCP-47 format.
* </pre>
*
* <code>string source_language = 2 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @param value The sourceLanguage to set.
* @return This builder for chaining.
*/
public Builder setSourceLanguage(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
sourceLanguage_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. Source language in BCP-47 format.
* </pre>
*
* <code>string source_language = 2 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return This builder for chaining.
*/
public Builder clearSourceLanguage() {
sourceLanguage_ = getDefaultInstance().getSourceLanguage();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. Source language in BCP-47 format.
* </pre>
*
* <code>string source_language = 2 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @param value The bytes for sourceLanguage to set.
* @return This builder for chaining.
*/
public Builder setSourceLanguageBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
sourceLanguage_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
private java.lang.Object targetLanguage_ = "";
/**
*
*
* <pre>
* Optional. Target language in BCP-47 format. Covers both prediction and
* reference.
* </pre>
*
* <code>string target_language = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The targetLanguage.
*/
public java.lang.String getTargetLanguage() {
java.lang.Object ref = targetLanguage_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
targetLanguage_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Optional. Target language in BCP-47 format. Covers both prediction and
* reference.
* </pre>
*
* <code>string target_language = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The bytes for targetLanguage.
*/
public com.google.protobuf.ByteString getTargetLanguageBytes() {
java.lang.Object ref = targetLanguage_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
targetLanguage_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Optional. Target language in BCP-47 format. Covers both prediction and
* reference.
* </pre>
*
* <code>string target_language = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @param value The targetLanguage to set.
* @return This builder for chaining.
*/
public Builder setTargetLanguage(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
targetLanguage_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. Target language in BCP-47 format. Covers both prediction and
* reference.
* </pre>
*
* <code>string target_language = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return This builder for chaining.
*/
public Builder clearTargetLanguage() {
targetLanguage_ = getDefaultInstance().getTargetLanguage();
bitField0_ = (bitField0_ & ~0x00000004);
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. Target language in BCP-47 format. Covers both prediction and
* reference.
* </pre>
*
* <code>string target_language = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @param value The bytes for targetLanguage to set.
* @return This builder for chaining.
*/
public Builder setTargetLanguageBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
targetLanguage_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.aiplatform.v1beta1.CometSpec)
}
// @@protoc_insertion_point(class_scope:google.cloud.aiplatform.v1beta1.CometSpec)
private static final com.google.cloud.aiplatform.v1beta1.CometSpec DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.aiplatform.v1beta1.CometSpec();
}
public static com.google.cloud.aiplatform.v1beta1.CometSpec getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<CometSpec> PARSER =
new com.google.protobuf.AbstractParser<CometSpec>() {
@java.lang.Override
public CometSpec parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<CometSpec> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<CometSpec> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.aiplatform.v1beta1.CometSpec getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.