repo_id stringclasses 875
values | size int64 974 38.9k | file_path stringlengths 10 308 | content stringlengths 974 38.9k |
|---|---|---|---|
apache/fineract | 36,984 | fineract-savings/src/main/java/org/apache/fineract/portfolio/savings/domain/SavingsAccountCharge.java | /**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.fineract.portfolio.savings.domain;
import static org.apache.fineract.portfolio.savings.SavingsApiConstants.amountParamName;
import static org.apache.fineract.portfolio.savings.SavingsApiConstants.dateFormatParamName;
import static org.apache.fineract.portfolio.savings.SavingsApiConstants.dueAsOfDateParamName;
import static org.apache.fineract.portfolio.savings.SavingsApiConstants.feeIntervalParamName;
import static org.apache.fineract.portfolio.savings.SavingsApiConstants.feeOnMonthDayParamName;
import static org.apache.fineract.portfolio.savings.SavingsApiConstants.localeParamName;
import edu.umd.cs.findbugs.annotations.SuppressFBWarnings;
import jakarta.persistence.Column;
import jakarta.persistence.Entity;
import jakarta.persistence.JoinColumn;
import jakarta.persistence.ManyToOne;
import jakarta.persistence.Table;
import jakarta.validation.constraints.NotNull;
import java.math.BigDecimal;
import java.math.MathContext;
import java.time.LocalDate;
import java.time.MonthDay;
import java.time.temporal.ChronoField;
import java.util.LinkedHashMap;
import java.util.Map;
import java.util.Objects;
import org.apache.fineract.infrastructure.core.api.JsonCommand;
import org.apache.fineract.infrastructure.core.domain.AbstractAuditableWithUTCDateTimeCustom;
import org.apache.fineract.infrastructure.core.service.DateUtils;
import org.apache.fineract.infrastructure.core.service.MathUtil;
import org.apache.fineract.organisation.monetary.domain.MonetaryCurrency;
import org.apache.fineract.organisation.monetary.domain.Money;
import org.apache.fineract.organisation.monetary.domain.MoneyHelper;
import org.apache.fineract.portfolio.charge.domain.Charge;
import org.apache.fineract.portfolio.charge.domain.ChargeCalculationType;
import org.apache.fineract.portfolio.charge.domain.ChargeTimeType;
import org.apache.fineract.portfolio.charge.exception.SavingsAccountChargeWithoutMandatoryFieldException;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* @author dv6
*
*/
@Entity
@Table(name = "m_savings_account_charge")
public class SavingsAccountCharge extends AbstractAuditableWithUTCDateTimeCustom<Long> {
private static final Logger LOG = LoggerFactory.getLogger(SavingsAccountCharge.class);
@ManyToOne(optional = false)
@JoinColumn(name = "savings_account_id", referencedColumnName = "id", nullable = false)
private SavingsAccount savingsAccount;
@ManyToOne(optional = false)
@JoinColumn(name = "charge_id", referencedColumnName = "id", nullable = false)
private Charge charge;
@Column(name = "charge_time_enum", nullable = false)
private Integer chargeTime;
@Column(name = "charge_due_date")
private LocalDate dueDate;
@Column(name = "fee_on_month", nullable = true)
private Integer feeOnMonth;
@Column(name = "fee_on_day", nullable = true)
private Integer feeOnDay;
@Column(name = "fee_interval", nullable = true)
private Integer feeInterval;
@Column(name = "charge_calculation_enum")
private Integer chargeCalculation;
@Column(name = "free_withdrawal_count", nullable = true)
private Integer freeWithdrawalCount;
@Column(name = "charge_reset_date", nullable = true)
private LocalDate chargeResetDate;
@Column(name = "calculation_percentage", scale = 6, precision = 19, nullable = true)
private BigDecimal percentage;
// TODO AA: This field may not require for savings charges
@Column(name = "calculation_on_amount", scale = 6, precision = 19, nullable = true)
private BigDecimal amountPercentageAppliedTo;
@Column(name = "amount", scale = 6, precision = 19, nullable = false)
private BigDecimal amount;
@Column(name = "amount_paid_derived", scale = 6, precision = 19, nullable = true)
private BigDecimal amountPaid;
@Column(name = "amount_waived_derived", scale = 6, precision = 19, nullable = true)
private BigDecimal amountWaived;
@Column(name = "amount_writtenoff_derived", scale = 6, precision = 19, nullable = true)
private BigDecimal amountWrittenOff;
@Column(name = "amount_outstanding_derived", scale = 6, precision = 19, nullable = false)
private BigDecimal amountOutstanding;
@Column(name = "is_penalty", nullable = false)
private boolean penaltyCharge = false;
@Column(name = "is_paid_derived", nullable = false)
private boolean paid = false;
@Column(name = "waived", nullable = false)
private boolean waived = false;
@Column(name = "is_active", nullable = false)
private boolean status = true;
@Column(name = "inactivated_on_date")
private LocalDate inactivationDate;
public static SavingsAccountCharge createNewFromJson(final SavingsAccount savingsAccount, final Charge chargeDefinition,
final JsonCommand command) {
BigDecimal amount = command.bigDecimalValueOfParameterNamed(amountParamName);
final LocalDate dueDate = command.localDateValueOfParameterNamed(dueAsOfDateParamName);
MonthDay feeOnMonthDay = command.extractMonthDayNamed(feeOnMonthDayParamName);
Integer feeInterval = command.integerValueOfParameterNamed(feeIntervalParamName);
final ChargeTimeType chargeTime = null;
final ChargeCalculationType chargeCalculation = null;
final boolean status = true;
// If these values is not sent as parameter, then derive from Charge
// definition
amount = (amount == null) ? chargeDefinition.getAmount() : amount;
feeOnMonthDay = (feeOnMonthDay == null) ? chargeDefinition.getFeeOnMonthDay() : feeOnMonthDay;
feeInterval = (feeInterval == null) ? chargeDefinition.getFeeInterval() : feeInterval;
return new SavingsAccountCharge(savingsAccount, chargeDefinition, amount, chargeTime, chargeCalculation, dueDate, status,
feeOnMonthDay, feeInterval);
}
public static SavingsAccountCharge createNewWithoutSavingsAccount(final Charge chargeDefinition, final BigDecimal amountPayable,
final ChargeTimeType chargeTime, final ChargeCalculationType chargeCalculation, final LocalDate dueDate, final boolean status,
final MonthDay feeOnMonthDay, final Integer feeInterval) {
return new SavingsAccountCharge(null, chargeDefinition, amountPayable, chargeTime, chargeCalculation, dueDate, status,
feeOnMonthDay, feeInterval);
}
protected SavingsAccountCharge() {
//
}
private SavingsAccountCharge(final SavingsAccount savingsAccount, final Charge chargeDefinition, final BigDecimal amount,
final ChargeTimeType chargeTime, final ChargeCalculationType chargeCalculation, final LocalDate dueDate, final boolean status,
MonthDay feeOnMonthDay, final Integer feeInterval) {
this.savingsAccount = savingsAccount;
this.charge = chargeDefinition;
this.penaltyCharge = chargeDefinition.isPenalty();
this.chargeTime = (chargeTime == null) ? chargeDefinition.getChargeTimeType() : chargeTime.getValue();
if (isOnSpecifiedDueDate()) {
if (dueDate == null) {
final String defaultUserMessage = "Savings Account charge is missing due date.";
throw new SavingsAccountChargeWithoutMandatoryFieldException("savingsaccount.charge", dueAsOfDateParamName,
defaultUserMessage, chargeDefinition.getId(), chargeDefinition.getName());
}
}
if (isAnnualFee() || isMonthlyFee()) {
feeOnMonthDay = (feeOnMonthDay == null) ? chargeDefinition.getFeeOnMonthDay() : feeOnMonthDay;
if (feeOnMonthDay == null) {
final String defaultUserMessage = "Savings Account charge is missing due date.";
throw new SavingsAccountChargeWithoutMandatoryFieldException("savingsaccount.charge", dueAsOfDateParamName,
defaultUserMessage, chargeDefinition.getId(), chargeDefinition.getName());
}
this.feeOnMonth = feeOnMonthDay.getMonthValue();
this.feeOnDay = feeOnMonthDay.getDayOfMonth();
} else if (isWeeklyFee()) {
if (dueDate == null) {
final String defaultUserMessage = "Savings Account charge is missing due date.";
throw new SavingsAccountChargeWithoutMandatoryFieldException("savingsaccount.charge", dueAsOfDateParamName,
defaultUserMessage, chargeDefinition.getId(), chargeDefinition.getName());
}
/**
* For Weekly fee feeOnDay is ISO standard day of the week. Monday=1, Tuesday=2
*/
this.feeOnDay = dueDate.get(ChronoField.DAY_OF_WEEK);
} else {
this.feeOnDay = null;
this.feeOnMonth = null;
this.feeInterval = null;
}
if (isMonthlyFee() || isWeeklyFee()) {
this.feeInterval = (feeInterval == null) ? chargeDefinition.feeInterval() : feeInterval;
}
this.dueDate = dueDate;
this.chargeCalculation = chargeDefinition.getChargeCalculation();
if (chargeCalculation != null) {
this.chargeCalculation = chargeCalculation.getValue();
}
BigDecimal chargeAmount = chargeDefinition.getAmount();
if (amount != null) {
chargeAmount = amount;
}
final BigDecimal transactionAmount = new BigDecimal(0);
populateDerivedFields(transactionAmount, chargeAmount);
if (this.isWithdrawalFee() || this.isSavingsNoActivity()) {
this.amountOutstanding = BigDecimal.ZERO;
}
this.paid = determineIfFullyPaid();
this.status = status;
}
public void resetPropertiesForRecurringFees() {
if (isMonthlyFee() || isAnnualFee() || isWeeklyFee()) {
// FIXME: AA: If charge is percentage of x amount then need to
// update amount outstanding accordingly.
// Right now annual and monthly charges supports charge calculation
// type flat.
this.amountOutstanding = this.amount;
this.paid = false;// reset to false for recurring fee.
this.waived = false;
}
}
private void populateDerivedFields(final BigDecimal transactionAmount, final BigDecimal chargeAmount) {
switch (ChargeCalculationType.fromInt(this.chargeCalculation)) {
case INVALID:
this.percentage = null;
this.amount = null;
this.amountPercentageAppliedTo = null;
this.amountPaid = null;
this.amountOutstanding = BigDecimal.ZERO;
this.amountWaived = null;
this.amountWrittenOff = null;
break;
case FLAT:
this.percentage = null;
this.amount = chargeAmount;
this.amountPercentageAppliedTo = null;
this.amountPaid = null;
this.amountOutstanding = chargeAmount;
this.amountWaived = null;
this.amountWrittenOff = null;
break;
case PERCENT_OF_AMOUNT:
this.percentage = chargeAmount;
this.amountPercentageAppliedTo = transactionAmount;
this.amount = percentageOf(this.amountPercentageAppliedTo, this.percentage);
this.amountPaid = null;
this.amountOutstanding = calculateOutstanding();
this.amountWaived = null;
this.amountWrittenOff = null;
break;
case PERCENT_OF_AMOUNT_AND_INTEREST:
this.percentage = null;
this.amount = null;
this.amountPercentageAppliedTo = null;
this.amountPaid = null;
this.amountOutstanding = BigDecimal.ZERO;
this.amountWaived = null;
this.amountWrittenOff = null;
break;
case PERCENT_OF_INTEREST:
this.percentage = null;
this.amount = null;
this.amountPercentageAppliedTo = null;
this.amountPaid = null;
this.amountOutstanding = BigDecimal.ZERO;
this.amountWaived = null;
this.amountWrittenOff = null;
break;
case PERCENT_OF_DISBURSEMENT_AMOUNT:
this.percentage = null;
this.amount = null;
this.amountPercentageAppliedTo = null;
this.amountPaid = null;
this.amountOutstanding = BigDecimal.ZERO;
this.amountWaived = null;
this.amountWrittenOff = null;
break;
}
}
public void markAsFullyPaid() {
this.amountPaid = this.amount;
this.amountOutstanding = BigDecimal.ZERO;
this.paid = true;
}
public void resetToOriginal(final MonetaryCurrency currency) {
this.amountPaid = BigDecimal.ZERO;
this.amountWaived = BigDecimal.ZERO;
this.amountWrittenOff = BigDecimal.ZERO;
this.amountOutstanding = calculateAmountOutstanding(currency);
this.paid = false;
this.waived = false;
}
public void undoPayment(final MonetaryCurrency currency, final Money transactionAmount) {
Money amountPaid = getAmountPaid(currency);
amountPaid = amountPaid.minus(transactionAmount);
this.amountPaid = amountPaid.getAmount();
this.amountOutstanding = calculateAmountOutstanding(currency);
if (this.isWithdrawalFee()) {
this.amountOutstanding = BigDecimal.ZERO;
}
// to reset amount outstanding for annual and monthly fee
resetPropertiesForRecurringFees();
updateToPreviousDueDate();// reset annual and monthly due date.
this.paid = false;
this.status = true;
}
public Money waive(final MonetaryCurrency currency) {
Money amountWaivedToDate = Money.of(currency, this.amountWaived);
Money amountOutstanding = Money.of(currency, this.amountOutstanding);
this.amountWaived = amountWaivedToDate.plus(amountOutstanding).getAmount();
this.amountOutstanding = BigDecimal.ZERO;
this.waived = true;
resetPropertiesForRecurringFees();
updateNextDueDateForRecurringFees();
return amountOutstanding;
}
public void undoWaiver(final MonetaryCurrency currency, final Money transactionAmount) {
Money amountWaived = getAmountWaived(currency);
amountWaived = amountWaived.minus(transactionAmount);
this.amountWaived = amountWaived.getAmount();
this.amountOutstanding = calculateAmountOutstanding(currency);
this.waived = false;
this.status = true;
resetPropertiesForRecurringFees();
updateToPreviousDueDate();
}
public Money pay(final MonetaryCurrency currency, final Money amountPaid) {
Money amountPaidToDate = Money.of(currency, this.amountPaid);
Money amountOutstanding = Money.of(currency, this.amountOutstanding);
amountPaidToDate = amountPaidToDate.plus(amountPaid);
amountOutstanding = amountOutstanding.minus(amountPaid);
this.amountPaid = amountPaidToDate.getAmount();
this.amountOutstanding = amountOutstanding.getAmount();
this.paid = determineIfFullyPaid();
if (BigDecimal.ZERO.compareTo(this.amountOutstanding) == 0) {
// full outstanding is paid, update to next due date
updateNextDueDateForRecurringFees();
resetPropertiesForRecurringFees();
}
return Money.of(currency, this.amountOutstanding);
}
private BigDecimal calculateAmountOutstanding(final MonetaryCurrency currency) {
return getAmount(currency).minus(getAmountWaived(currency)).minus(getAmountPaid(currency)).getAmount();
}
public void update(final SavingsAccount savingsAccount) {
this.savingsAccount = savingsAccount;
}
public void update(final BigDecimal amount, final LocalDate dueDate, final MonthDay feeOnMonthDay, final Integer feeInterval) {
final BigDecimal transactionAmount = BigDecimal.ZERO;
if (dueDate != null) {
this.dueDate = dueDate;
if (isWeeklyFee()) {
this.feeOnDay = dueDate.get(ChronoField.DAY_OF_WEEK);
}
}
if (feeOnMonthDay != null) {
this.feeOnMonth = feeOnMonthDay.getMonthValue();
this.feeOnDay = feeOnMonthDay.getDayOfMonth();
}
if (feeInterval != null) {
this.feeInterval = feeInterval;
}
if (amount != null) {
switch (ChargeCalculationType.fromInt(this.chargeCalculation)) {
case INVALID:
break;
case FLAT:
this.amount = amount;
break;
case PERCENT_OF_AMOUNT:
this.percentage = amount;
this.amountPercentageAppliedTo = transactionAmount;
this.amount = percentageOf(this.amountPercentageAppliedTo, this.percentage);
this.amountOutstanding = calculateOutstanding();
break;
case PERCENT_OF_AMOUNT_AND_INTEREST:
this.percentage = amount;
this.amount = null;
this.amountPercentageAppliedTo = null;
this.amountOutstanding = null;
break;
case PERCENT_OF_INTEREST:
this.percentage = amount;
this.amount = null;
this.amountPercentageAppliedTo = null;
this.amountOutstanding = null;
break;
case PERCENT_OF_DISBURSEMENT_AMOUNT:
LOG.error("TODO Implement update ChargeCalculationType for PERCENT_OF_DISBURSEMENT_AMOUNT");
break;
}
}
}
@SuppressFBWarnings(value = "NP_NULL_PARAM_DEREF_NONVIRTUAL") // https://issues.apache.org/jira/browse/FINERACT-987
public Map<String, Object> update(final JsonCommand command) {
final Map<String, Object> actualChanges = new LinkedHashMap<>(7);
final String dateFormatAsInput = command.dateFormat();
final String localeAsInput = command.locale();
if (command.isChangeInLocalDateParameterNamed(dueAsOfDateParamName, getDueDate())) {
final String valueAsInput = command.stringValueOfParameterNamed(dueAsOfDateParamName);
actualChanges.put(dueAsOfDateParamName, valueAsInput);
actualChanges.put(dateFormatParamName, dateFormatAsInput);
actualChanges.put(localeParamName, localeAsInput);
this.dueDate = command.localDateValueOfParameterNamed(dueAsOfDateParamName);
if (this.isWeeklyFee()) {
this.feeOnDay = this.dueDate.get(ChronoField.DAY_OF_WEEK);
}
}
if (command.hasParameter(feeOnMonthDayParamName)) {
final MonthDay monthDay = command.extractMonthDayNamed(feeOnMonthDayParamName);
final String actualValueEntered = command.stringValueOfParameterNamed(feeOnMonthDayParamName);
final Integer dayOfMonthValue = monthDay.getDayOfMonth();
if (!this.feeOnDay.equals(dayOfMonthValue)) {
actualChanges.put(feeOnMonthDayParamName, actualValueEntered);
actualChanges.put(localeParamName, localeAsInput);
this.feeOnDay = dayOfMonthValue;
}
final Integer monthOfYear = monthDay.getMonthValue();
if (!this.feeOnMonth.equals(monthOfYear)) {
actualChanges.put(feeOnMonthDayParamName, actualValueEntered);
actualChanges.put(localeParamName, localeAsInput);
this.feeOnMonth = monthOfYear;
}
}
if (command.isChangeInBigDecimalParameterNamed(amountParamName, this.amount)) {
final BigDecimal newValue = command.bigDecimalValueOfParameterNamed(amountParamName);
actualChanges.put(amountParamName, newValue);
actualChanges.put(localeParamName, localeAsInput);
switch (ChargeCalculationType.fromInt(this.chargeCalculation)) {
case INVALID:
break;
case FLAT:
this.amount = newValue;
this.amountOutstanding = calculateOutstanding();
break;
case PERCENT_OF_AMOUNT:
this.percentage = newValue;
this.amountPercentageAppliedTo = null;
this.amount = percentageOf(this.amountPercentageAppliedTo, this.percentage);
this.amountOutstanding = calculateOutstanding();
break;
case PERCENT_OF_AMOUNT_AND_INTEREST:
this.percentage = newValue;
this.amount = null;
this.amountPercentageAppliedTo = null;
this.amountOutstanding = null;
break;
case PERCENT_OF_INTEREST:
this.percentage = newValue;
this.amount = null;
this.amountPercentageAppliedTo = null;
this.amountOutstanding = null;
break;
case PERCENT_OF_DISBURSEMENT_AMOUNT:
LOG.error("TODO Implement update ChargeCalculationType for PERCENT_OF_DISBURSEMENT_AMOUNT");
break;
}
}
return actualChanges;
}
private boolean isGreaterThanZero(final BigDecimal value) {
return value.compareTo(BigDecimal.ZERO) > 0;
}
public LocalDate getDueDate() {
return this.dueDate;
}
private boolean determineIfFullyPaid() {
return BigDecimal.ZERO.compareTo(calculateOutstanding()) == 0;
}
private BigDecimal calculateOutstanding() {
BigDecimal amountPaidLocal = BigDecimal.ZERO;
if (this.amountPaid != null) {
amountPaidLocal = this.amountPaid;
}
BigDecimal amountWaivedLocal = BigDecimal.ZERO;
if (this.amountWaived != null) {
amountWaivedLocal = this.amountWaived;
}
BigDecimal amountWrittenOffLocal = BigDecimal.ZERO;
if (this.amountWrittenOff != null) {
amountWrittenOffLocal = this.amountWrittenOff;
}
final BigDecimal totalAccountedFor = amountPaidLocal.add(amountWaivedLocal).add(amountWrittenOffLocal);
return this.amount.subtract(totalAccountedFor);
}
private BigDecimal percentageOf(final BigDecimal value, final BigDecimal percentage) {
BigDecimal percentageOf = BigDecimal.ZERO;
if (isGreaterThanZero(value)) {
final MathContext mc = new MathContext(8, MoneyHelper.getRoundingMode());
final BigDecimal multiplicand = percentage.divide(BigDecimal.valueOf(100L), mc);
percentageOf = value.multiply(multiplicand, mc);
}
return percentageOf;
}
public BigDecimal amount() {
return this.amount;
}
public BigDecimal amoutOutstanding() {
return this.amountOutstanding;
}
public boolean isFeeCharge() {
return !this.penaltyCharge;
}
public boolean isPenaltyCharge() {
return this.penaltyCharge;
}
public boolean isNotFullyPaid() {
return !isPaid();
}
public boolean isPaid() {
return this.paid;
}
public boolean isWaived() {
return this.waived;
}
public boolean isPaidOrPartiallyPaid(final MonetaryCurrency currency) {
final Money amountWaivedOrWrittenOff = getAmountWaived(currency).plus(getAmountWrittenOff(currency));
return Money.of(currency, this.amountPaid).plus(amountWaivedOrWrittenOff).isGreaterThanZero();
}
public Money getAmount(final MonetaryCurrency currency) {
return Money.of(currency, this.amount);
}
private Money getAmountPaid(final MonetaryCurrency currency) {
return Money.of(currency, this.amountPaid);
}
public Money getAmountWaived(final MonetaryCurrency currency) {
return Money.of(currency, this.amountWaived);
}
public Money getAmountWrittenOff(final MonetaryCurrency currency) {
return Money.of(currency, this.amountWrittenOff);
}
public Money getAmountOutstanding(final MonetaryCurrency currency) {
return Money.of(currency, this.amountOutstanding);
}
/**
* @param incrementBy
* Amount used to pay off this charge
* @return Actual amount paid on this charge
*/
public Money updatePaidAmountBy(final Money incrementBy) {
Money amountPaidToDate = Money.of(incrementBy.getCurrency(), this.amountPaid);
final Money amountOutstanding = Money.of(incrementBy.getCurrency(), this.amountOutstanding);
Money amountPaidOnThisCharge = Money.zero(incrementBy.getCurrency());
if (incrementBy.isGreaterThanOrEqualTo(amountOutstanding)) {
amountPaidOnThisCharge = amountOutstanding;
amountPaidToDate = amountPaidToDate.plus(amountOutstanding);
this.amountPaid = amountPaidToDate.getAmount();
this.amountOutstanding = BigDecimal.ZERO;
} else {
amountPaidOnThisCharge = incrementBy;
amountPaidToDate = amountPaidToDate.plus(incrementBy);
this.amountPaid = amountPaidToDate.getAmount();
final Money amountExpected = Money.of(incrementBy.getCurrency(), this.amount);
this.amountOutstanding = amountExpected.minus(amountPaidToDate).getAmount();
}
this.paid = determineIfFullyPaid();
return amountPaidOnThisCharge;
}
public String name() {
return this.charge.getName();
}
public String currencyCode() {
return this.charge.getCurrencyCode();
}
public Charge getCharge() {
return this.charge;
}
public boolean isEnableFreeWithdrawal() {
return charge.isEnableFreeWithdrawal();
}
public boolean isEnablePaymentType() {
return charge.isEnablePaymentType();
}
public Integer getFrequencyFreeWithdrawalCharge() { // number of times free withdrawal allowed
return charge.getFrequencyFreeWithdrawalCharge();
}
public Integer getRestartFrequency() { // numeric value of which numeric-period, count should restart
return charge.getRestartFrequency();
}
public Integer getRestartFrequencyEnum() { // enum day/week/month for restarting the count.
return charge.getRestartFrequencyEnum();
}
public Integer getFreeWithdrawalCount() {
return freeWithdrawalCount;
}
public void setFreeWithdrawalCount(Integer freeWithdrawalCount) {
this.freeWithdrawalCount = freeWithdrawalCount;
}
public LocalDate getResetChargeDate() {
return chargeResetDate;
}
public void setDiscountDueDate(final LocalDate date) {
this.chargeResetDate = date;
}
public SavingsAccount savingsAccount() {
return this.savingsAccount;
}
public boolean isOnSpecifiedDueDate() {
return ChargeTimeType.fromInt(this.chargeTime).isOnSpecifiedDueDate();
}
public boolean isSavingsActivation() {
return ChargeTimeType.fromInt(this.chargeTime).isSavingsActivation();
}
public boolean isSavingsNoActivity() {
return ChargeTimeType.fromInt(this.chargeTime).isSavingsNoActivityFee();
}
public boolean isSavingsClosure() {
return ChargeTimeType.fromInt(this.chargeTime).isSavingsClosure();
}
public boolean isWithdrawalFee() {
return ChargeTimeType.fromInt(this.chargeTime).isWithdrawalFee();
}
public boolean isOverdraftFee() {
return ChargeTimeType.fromInt(this.chargeTime).isOverdraftFee();
}
public boolean isAnnualFee() {
return ChargeTimeType.fromInt(this.chargeTime).isAnnualFee();
}
public boolean isMonthlyFee() {
return ChargeTimeType.fromInt(this.chargeTime).isMonthlyFee();
}
public boolean isWeeklyFee() {
return ChargeTimeType.fromInt(this.chargeTime).isWeeklyFee();
}
public boolean hasCurrencyCodeOf(final String matchingCurrencyCode) {
if (this.currencyCode() == null || matchingCurrencyCode == null) {
return false;
}
return this.currencyCode().equalsIgnoreCase(matchingCurrencyCode);
}
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (!(o instanceof SavingsAccountCharge)) {
return false;
}
SavingsAccountCharge that = (SavingsAccountCharge) o;
return (penaltyCharge == that.penaltyCharge) && (paid == that.paid) && (waived == that.waived) && (status == that.status)
&& Objects.equals(savingsAccount, that.savingsAccount) && Objects.equals(charge, that.charge)
&& Objects.equals(chargeTime, that.chargeTime) && DateUtils.isEqual(dueDate, that.dueDate)
&& Objects.equals(feeOnMonth, that.feeOnMonth) && Objects.equals(feeOnDay, that.feeOnDay)
&& Objects.equals(feeInterval, that.feeInterval) && Objects.equals(chargeCalculation, that.chargeCalculation)
&& Objects.equals(percentage, that.percentage) && Objects.equals(amountPercentageAppliedTo, that.amountPercentageAppliedTo)
&& Objects.equals(amount, that.amount) && Objects.equals(amountPaid, that.amountPaid)
&& Objects.equals(amountWaived, that.amountWaived) && Objects.equals(amountWrittenOff, that.amountWrittenOff)
&& Objects.equals(amountOutstanding, that.amountOutstanding) && DateUtils.isEqual(inactivationDate, that.inactivationDate);
}
@Override
public int hashCode() {
return Objects.hash(savingsAccount, charge, chargeTime, dueDate, feeOnMonth, feeOnDay, feeInterval, chargeCalculation, percentage,
amountPercentageAppliedTo, amount, amountPaid, amountWaived, amountWrittenOff, amountOutstanding, penaltyCharge, paid,
waived, status, inactivationDate);
}
public BigDecimal calculateWithdralFeeAmount(@NotNull BigDecimal transactionAmount) {
BigDecimal amountPaybale = BigDecimal.ZERO;
if (ChargeCalculationType.fromInt(this.chargeCalculation).isFlat()) {
amountPaybale = this.amount;
} else if (ChargeCalculationType.fromInt(this.chargeCalculation).isPercentageOfAmount()) {
amountPaybale = transactionAmount.multiply(this.percentage).divide(BigDecimal.valueOf(100L), MoneyHelper.getRoundingMode());
}
return amountPaybale;
}
public BigDecimal updateWithdralFeeAmount(final BigDecimal transactionAmount) {
return amountOutstanding = calculateWithdralFeeAmount(transactionAmount);
}
public BigDecimal updateNoWithdrawalFee() {
return amountOutstanding = BigDecimal.ZERO;
}
public void updateToNextDueDateFrom(final LocalDate startingDate) {
if (isAnnualFee() || isMonthlyFee() || isWeeklyFee()) {
this.dueDate = getNextDueDateFrom(startingDate);
}
}
public LocalDate getNextDueDateFrom(final LocalDate startingDate) {
LocalDate nextDueLocalDate = null;
if (isAnnualFee() || isMonthlyFee()) {
nextDueLocalDate = startingDate.withMonth(this.feeOnMonth);
nextDueLocalDate = setDayOfMonth(nextDueLocalDate);
while (DateUtils.isBefore(nextDueLocalDate, startingDate)) {
nextDueLocalDate = calculateNextDueDate(nextDueLocalDate);
}
} else if (isWeeklyFee()) {
nextDueLocalDate = getDueDate();
while (DateUtils.isBefore(nextDueLocalDate, startingDate)) {
nextDueLocalDate = calculateNextDueDate(nextDueLocalDate);
}
} else {
nextDueLocalDate = calculateNextDueDate(startingDate);
}
return nextDueLocalDate;
}
private LocalDate calculateNextDueDate(final LocalDate date) {
LocalDate nextDueLocalDate = null;
if (isAnnualFee()) {
nextDueLocalDate = date.withMonth(this.feeOnMonth).plusYears(1);
nextDueLocalDate = setDayOfMonth(nextDueLocalDate);
} else if (isMonthlyFee()) {
nextDueLocalDate = date.plusMonths(this.feeInterval);
nextDueLocalDate = setDayOfMonth(nextDueLocalDate);
} else if (isWeeklyFee()) {
nextDueLocalDate = date.plusWeeks(this.feeInterval);
nextDueLocalDate = setDayOfWeek(nextDueLocalDate);
}
return nextDueLocalDate;
}
private LocalDate setDayOfMonth(LocalDate nextDueLocalDate) {
int maxDayOfMonth = nextDueLocalDate.lengthOfMonth();
int newDayOfMonth = (this.feeOnDay.intValue() < maxDayOfMonth) ? this.feeOnDay : maxDayOfMonth;
nextDueLocalDate = nextDueLocalDate.withDayOfMonth(newDayOfMonth);
return nextDueLocalDate;
}
private LocalDate setDayOfWeek(LocalDate nextDueLocalDate) {
if (this.feeOnDay != nextDueLocalDate.get(ChronoField.DAY_OF_WEEK)) {
nextDueLocalDate = nextDueLocalDate.with(ChronoField.DAY_OF_WEEK, this.feeOnDay);
}
return nextDueLocalDate;
}
public void updateNextDueDateForRecurringFees() {
if (isAnnualFee() || isMonthlyFee() || isWeeklyFee()) {
this.dueDate = calculateNextDueDate(this.dueDate);
}
}
public void updateToPreviousDueDate() {
if (isAnnualFee() || isMonthlyFee() || isWeeklyFee()) {
LocalDate nextDueLocalDate = dueDate;
if (isAnnualFee()) {
nextDueLocalDate = nextDueLocalDate.withMonth(this.feeOnMonth).minusYears(1);
nextDueLocalDate = setDayOfMonth(nextDueLocalDate);
} else if (isMonthlyFee()) {
nextDueLocalDate = nextDueLocalDate.minusMonths(this.feeInterval);
nextDueLocalDate = setDayOfMonth(nextDueLocalDate);
} else if (isWeeklyFee()) {
nextDueLocalDate = nextDueLocalDate.minusDays(7 * this.feeInterval);
nextDueLocalDate = setDayOfWeek(nextDueLocalDate);
}
this.dueDate = nextDueLocalDate;
}
}
public boolean feeSettingsNotSet() {
return !feeSettingsSet();
}
public boolean feeSettingsSet() {
return this.feeOnDay != null && this.feeOnMonth != null;
}
public boolean isRecurringFee() {
return isWeeklyFee() || isMonthlyFee() || isAnnualFee();
}
public boolean isChargeIsDue(final LocalDate nextDueDate) {
return DateUtils.isBefore(getDueDate(), nextDueDate);
}
public boolean isChargeIsOverPaid(final LocalDate nextDueDate) {
return DateUtils.isAfter(getDueDate(), nextDueDate) && MathUtil.isGreaterThanZero(amountPaid());
}
private BigDecimal amountPaid() {
return this.amountPaid;
}
public void inactiavateCharge(final LocalDate inactivationOnDate) {
this.inactivationDate = inactivationOnDate;
this.status = false;
this.amountOutstanding = BigDecimal.ZERO;
this.paid = true;
}
public boolean isActive() {
return this.status;
}
public boolean isNotActive() {
return !isActive();
}
/**
* This method is to identify the charges which can override the savings rules(for example if there is a minimum
* enforced balance of 1000 on savings account with account balance of 1000, still these charges can be collected as
* these charges are initiated by system and it can bring down the balance below the enforced minimum balance).
*
*/
public boolean canOverriteSavingAccountRules() {
return (!this.isSavingsActivation() && !this.isWithdrawalFee());
}
}
|
apache/kafka | 37,160 | clients/src/test/java/org/apache/kafka/common/security/oauthbearer/internals/expiring/ExpiringCredentialRefreshingLoginTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.kafka.common.security.oauthbearer.internals.expiring;
import org.apache.kafka.common.config.ConfigDef;
import org.apache.kafka.common.config.SaslConfigs;
import org.apache.kafka.common.internals.KafkaFutureImpl;
import org.apache.kafka.common.security.oauthbearer.internals.expiring.ExpiringCredentialRefreshingLogin.LoginContextFactory;
import org.apache.kafka.common.utils.MockScheduler;
import org.apache.kafka.common.utils.MockTime;
import org.apache.kafka.common.utils.Time;
import org.junit.jupiter.api.Test;
import org.mockito.InOrder;
import org.mockito.Mockito;
import org.mockito.internal.util.MockUtil;
import java.util.ArrayList;
import java.util.Date;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.concurrent.Future;
import java.util.concurrent.TimeUnit;
import javax.security.auth.Subject;
import javax.security.auth.login.AppConfigurationEntry;
import javax.security.auth.login.Configuration;
import javax.security.auth.login.LoginContext;
import javax.security.auth.login.LoginException;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertFalse;
import static org.junit.jupiter.api.Assertions.assertTrue;
import static org.mockito.Mockito.inOrder;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;
public class ExpiringCredentialRefreshingLoginTest {
private static final Configuration EMPTY_WILDCARD_CONFIGURATION;
static {
EMPTY_WILDCARD_CONFIGURATION = new Configuration() {
@Override
public AppConfigurationEntry[] getAppConfigurationEntry(String name) {
return new AppConfigurationEntry[0]; // match any name
}
};
}
/*
* An ExpiringCredentialRefreshingLogin that we can tell explicitly to
* create/remove an expiring credential with specific
* create/expire/absoluteLastRefresh times
*/
private static class TestExpiringCredentialRefreshingLogin extends ExpiringCredentialRefreshingLogin {
private ExpiringCredential expiringCredential;
private ExpiringCredential tmpExpiringCredential;
private final Time time;
private final long lifetimeMillis;
private final long absoluteLastRefreshTimeMs;
private final boolean clientReloginAllowedBeforeLogout;
public TestExpiringCredentialRefreshingLogin(ExpiringCredentialRefreshConfig refreshConfig,
LoginContextFactory loginContextFactory, Time time, final long lifetimeMillis,
final long absoluteLastRefreshMs, boolean clientReloginAllowedBeforeLogout) {
super("contextName", EMPTY_WILDCARD_CONFIGURATION, refreshConfig, null,
TestExpiringCredentialRefreshingLogin.class, loginContextFactory, Objects.requireNonNull(time));
this.time = time;
this.lifetimeMillis = lifetimeMillis;
this.absoluteLastRefreshTimeMs = absoluteLastRefreshMs;
this.clientReloginAllowedBeforeLogout = clientReloginAllowedBeforeLogout;
}
public long getCreateMs() {
return time.milliseconds();
}
public long getExpireTimeMs() {
return time.milliseconds() + lifetimeMillis;
}
/*
* Invoke at login time
*/
public void createNewExpiringCredential() {
if (!clientReloginAllowedBeforeLogout)
/*
* Was preceded by logout
*/
expiringCredential = internalNewExpiringCredential();
else {
boolean initialLogin = expiringCredential == null;
if (initialLogin)
// no logout immediately after the initial login
this.expiringCredential = internalNewExpiringCredential();
else
/*
* This is at least the second invocation of login; we will move the credential
* over upon logout, which should be invoked next
*/
this.tmpExpiringCredential = internalNewExpiringCredential();
}
}
/*
* Invoke at logout time
*/
public void clearExpiringCredential() {
if (!clientReloginAllowedBeforeLogout)
/*
* Have not yet invoked login
*/
expiringCredential = null;
else
/*
* login has already been invoked
*/
expiringCredential = tmpExpiringCredential;
}
@Override
public ExpiringCredential expiringCredential() {
return expiringCredential;
}
private ExpiringCredential internalNewExpiringCredential() {
return new ExpiringCredential() {
private final long createMs = getCreateMs();
private final long expireTimeMs = getExpireTimeMs();
@Override
public String principalName() {
return "Created at " + new Date(createMs);
}
@Override
public Long startTimeMs() {
return createMs;
}
@Override
public long expireTimeMs() {
return expireTimeMs;
}
@Override
public Long absoluteLastRefreshTimeMs() {
return absoluteLastRefreshTimeMs;
}
// useful in debugger
@Override
public String toString() {
return String.format("startTimeMs=%d, expireTimeMs=%d, absoluteLastRefreshTimeMs=%s", startTimeMs(),
expireTimeMs(), absoluteLastRefreshTimeMs());
}
};
}
}
/*
* A class that will forward all login/logout/getSubject() calls to a mock while
* also telling an instance of TestExpiringCredentialRefreshingLogin to
* create/remove an expiring credential upon login/logout(). Basically we are
* getting the functionality of a mock while simultaneously in the same method
* call performing creation/removal of expiring credentials.
*/
private static class TestLoginContext extends LoginContext {
private final TestExpiringCredentialRefreshingLogin testExpiringCredentialRefreshingLogin;
private final LoginContext mockLoginContext;
public TestLoginContext(TestExpiringCredentialRefreshingLogin testExpiringCredentialRefreshingLogin,
LoginContext mockLoginContext) throws LoginException {
super("contextName", null, null, EMPTY_WILDCARD_CONFIGURATION);
this.testExpiringCredentialRefreshingLogin = Objects.requireNonNull(testExpiringCredentialRefreshingLogin);
// sanity check to make sure it is likely a mock
if (!MockUtil.isMock(mockLoginContext))
throw new IllegalArgumentException();
this.mockLoginContext = mockLoginContext;
}
@Override
public void login() throws LoginException {
/*
* Here is where we get the functionality of a mock while simultaneously
* performing the creation of an expiring credential
*/
mockLoginContext.login();
testExpiringCredentialRefreshingLogin.createNewExpiringCredential();
}
@Override
public void logout() throws LoginException {
/*
* Here is where we get the functionality of a mock while simultaneously
* performing the removal of an expiring credential
*/
mockLoginContext.logout();
testExpiringCredentialRefreshingLogin.clearExpiringCredential();
}
@Override
public Subject getSubject() {
// here we just need the functionality of a mock
return mockLoginContext.getSubject();
}
}
/*
* An implementation of LoginContextFactory that returns an instance of
* TestLoginContext
*/
private static class TestLoginContextFactory extends LoginContextFactory {
private final KafkaFutureImpl<Object> refresherThreadStartedFuture = new KafkaFutureImpl<>();
private final KafkaFutureImpl<Object> refresherThreadDoneFuture = new KafkaFutureImpl<>();
private TestLoginContext testLoginContext;
public void configure(LoginContext mockLoginContext,
TestExpiringCredentialRefreshingLogin testExpiringCredentialRefreshingLogin) throws LoginException {
// sanity check to make sure it is likely a mock
if (!MockUtil.isMock(mockLoginContext))
throw new IllegalArgumentException();
this.testLoginContext = new TestLoginContext(Objects.requireNonNull(testExpiringCredentialRefreshingLogin),
mockLoginContext);
}
@Override
public LoginContext createLoginContext(ExpiringCredentialRefreshingLogin expiringCredentialRefreshingLogin) throws LoginException {
return new LoginContext("", null, null, EMPTY_WILDCARD_CONFIGURATION) {
private boolean loginSuccess = false;
@Override
public void login() throws LoginException {
testLoginContext.login();
loginSuccess = true;
}
@Override
public void logout() throws LoginException {
if (!loginSuccess)
// will cause the refresher thread to exit
throw new IllegalStateException("logout called without a successful login");
testLoginContext.logout();
}
@Override
public Subject getSubject() {
return testLoginContext.getSubject();
}
};
}
@Override
public void refresherThreadStarted() {
refresherThreadStartedFuture.complete(null);
}
@Override
public void refresherThreadDone() {
refresherThreadDoneFuture.complete(null);
}
public Future<?> refresherThreadStartedFuture() {
return refresherThreadStartedFuture;
}
public Future<?> refresherThreadDoneFuture() {
return refresherThreadDoneFuture;
}
}
@Test
public void testRefresh() throws Exception {
for (int numExpectedRefreshes : new int[] {0, 1, 2}) {
for (boolean clientReloginAllowedBeforeLogout : new boolean[] {true, false}) {
Subject subject = new Subject();
final LoginContext mockLoginContext = mock(LoginContext.class);
when(mockLoginContext.getSubject()).thenReturn(subject);
MockTime mockTime = new MockTime();
long startMs = mockTime.milliseconds();
/*
* Identify the lifetime of each expiring credential
*/
long lifetimeMinutes = 100L;
/*
* Identify the point at which refresh will occur in that lifetime
*/
long refreshEveryMinutes = 80L;
/*
* Set an absolute last refresh time that will cause the login thread to exit
* after a certain number of re-logins (by adding an extra half of a refresh
* interval).
*/
long absoluteLastRefreshMs = startMs + (1 + numExpectedRefreshes) * 1000 * 60 * refreshEveryMinutes
- 1000 * 60 * refreshEveryMinutes / 2;
/*
* Identify buffer time on either side for the refresh algorithm
*/
short minPeriodSeconds = (short) 0;
short bufferSeconds = minPeriodSeconds;
/*
* Define some listeners so we can keep track of who gets done and when. All
* added listeners should end up done except the last, extra one, which should
* not.
*/
MockScheduler mockScheduler = new MockScheduler(mockTime);
List<KafkaFutureImpl<Long>> waiters = addWaiters(mockScheduler, 1000 * 60 * refreshEveryMinutes,
numExpectedRefreshes + 1);
// Create the ExpiringCredentialRefreshingLogin instance under test
TestLoginContextFactory testLoginContextFactory = new TestLoginContextFactory();
TestExpiringCredentialRefreshingLogin testExpiringCredentialRefreshingLogin = new TestExpiringCredentialRefreshingLogin(
refreshConfigThatPerformsReloginEveryGivenPercentageOfLifetime(
1.0 * refreshEveryMinutes / lifetimeMinutes, minPeriodSeconds, bufferSeconds,
clientReloginAllowedBeforeLogout),
testLoginContextFactory, mockTime, 1000 * 60 * lifetimeMinutes, absoluteLastRefreshMs,
clientReloginAllowedBeforeLogout);
testLoginContextFactory.configure(mockLoginContext, testExpiringCredentialRefreshingLogin);
/*
* Perform the login, wait up to a certain amount of time for the refresher
* thread to exit, and make sure the correct calls happened at the correct times
*/
long expectedFinalMs = startMs + numExpectedRefreshes * 1000 * 60 * refreshEveryMinutes;
assertFalse(testLoginContextFactory.refresherThreadStartedFuture().isDone());
assertFalse(testLoginContextFactory.refresherThreadDoneFuture().isDone());
testExpiringCredentialRefreshingLogin.login();
assertTrue(testLoginContextFactory.refresherThreadStartedFuture().isDone());
testLoginContextFactory.refresherThreadDoneFuture().get(1L, TimeUnit.SECONDS);
assertEquals(expectedFinalMs, mockTime.milliseconds());
for (int i = 0; i < numExpectedRefreshes; ++i) {
KafkaFutureImpl<Long> waiter = waiters.get(i);
assertTrue(waiter.isDone());
assertEquals((i + 1) * 1000 * 60 * refreshEveryMinutes, waiter.get() - startMs);
}
assertFalse(waiters.get(numExpectedRefreshes).isDone());
/*
* We expect login() to be invoked followed by getSubject() and then ultimately followed by
* numExpectedRefreshes pairs of either login()/logout() or logout()/login() calls
*/
InOrder inOrder = inOrder(mockLoginContext);
inOrder.verify(mockLoginContext).login();
inOrder.verify(mockLoginContext).getSubject();
for (int i = 0; i < numExpectedRefreshes; ++i) {
if (clientReloginAllowedBeforeLogout) {
inOrder.verify(mockLoginContext).login();
inOrder.verify(mockLoginContext).logout();
} else {
inOrder.verify(mockLoginContext).logout();
inOrder.verify(mockLoginContext).login();
}
}
testExpiringCredentialRefreshingLogin.close();
}
}
}
@Test
public void testRefreshWithExpirationSmallerThanConfiguredBuffers() throws Exception {
int numExpectedRefreshes = 1;
boolean clientReloginAllowedBeforeLogout = true;
final LoginContext mockLoginContext = mock(LoginContext.class);
Subject subject = new Subject();
when(mockLoginContext.getSubject()).thenReturn(subject);
MockTime mockTime = new MockTime();
long startMs = mockTime.milliseconds();
/*
* Identify the lifetime of each expiring credential
*/
long lifetimeMinutes = 10L;
/*
* Identify the point at which refresh will occur in that lifetime
*/
long refreshEveryMinutes = 8L;
/*
* Set an absolute last refresh time that will cause the login thread to exit
* after a certain number of re-logins (by adding an extra half of a refresh
* interval).
*/
long absoluteLastRefreshMs = startMs + (1 + numExpectedRefreshes) * 1000 * 60 * refreshEveryMinutes
- 1000 * 60 * refreshEveryMinutes / 2;
/*
* Identify buffer time on either side for the refresh algorithm that will cause
* the entire lifetime to be taken up. In other words, make sure there is no way
* to honor the buffers.
*/
short minPeriodSeconds = (short) (1 + lifetimeMinutes * 60 / 2);
short bufferSeconds = minPeriodSeconds;
/*
* Define some listeners so we can keep track of who gets done and when. All
* added listeners should end up done except the last, extra one, which should
* not.
*/
MockScheduler mockScheduler = new MockScheduler(mockTime);
List<KafkaFutureImpl<Long>> waiters = addWaiters(mockScheduler, 1000 * 60 * refreshEveryMinutes,
numExpectedRefreshes + 1);
// Create the ExpiringCredentialRefreshingLogin instance under test
TestLoginContextFactory testLoginContextFactory = new TestLoginContextFactory();
TestExpiringCredentialRefreshingLogin testExpiringCredentialRefreshingLogin = new TestExpiringCredentialRefreshingLogin(
refreshConfigThatPerformsReloginEveryGivenPercentageOfLifetime(
1.0 * refreshEveryMinutes / lifetimeMinutes, minPeriodSeconds, bufferSeconds,
clientReloginAllowedBeforeLogout),
testLoginContextFactory, mockTime, 1000 * 60 * lifetimeMinutes, absoluteLastRefreshMs,
clientReloginAllowedBeforeLogout);
testLoginContextFactory.configure(mockLoginContext, testExpiringCredentialRefreshingLogin);
/*
* Perform the login, wait up to a certain amount of time for the refresher
* thread to exit, and make sure the correct calls happened at the correct times
*/
long expectedFinalMs = startMs + numExpectedRefreshes * 1000 * 60 * refreshEveryMinutes;
assertFalse(testLoginContextFactory.refresherThreadStartedFuture().isDone());
assertFalse(testLoginContextFactory.refresherThreadDoneFuture().isDone());
testExpiringCredentialRefreshingLogin.login();
assertTrue(testLoginContextFactory.refresherThreadStartedFuture().isDone());
testLoginContextFactory.refresherThreadDoneFuture().get(1L, TimeUnit.SECONDS);
assertEquals(expectedFinalMs, mockTime.milliseconds());
for (int i = 0; i < numExpectedRefreshes; ++i) {
KafkaFutureImpl<Long> waiter = waiters.get(i);
assertTrue(waiter.isDone());
assertEquals((i + 1) * 1000 * 60 * refreshEveryMinutes, waiter.get() - startMs);
}
assertFalse(waiters.get(numExpectedRefreshes).isDone());
InOrder inOrder = inOrder(mockLoginContext);
inOrder.verify(mockLoginContext).login();
for (int i = 0; i < numExpectedRefreshes; ++i) {
inOrder.verify(mockLoginContext).login();
inOrder.verify(mockLoginContext).logout();
}
}
@Test
public void testRefreshWithExpirationSmallerThanConfiguredBuffersAndOlderCreateTime() throws Exception {
int numExpectedRefreshes = 1;
boolean clientReloginAllowedBeforeLogout = true;
final LoginContext mockLoginContext = mock(LoginContext.class);
Subject subject = new Subject();
when(mockLoginContext.getSubject()).thenReturn(subject);
MockTime mockTime = new MockTime();
long startMs = mockTime.milliseconds();
/*
* Identify the lifetime of each expiring credential
*/
long lifetimeMinutes = 10L;
/*
* Identify the point at which refresh will occur in that lifetime
*/
long refreshEveryMinutes = 8L;
/*
* Set an absolute last refresh time that will cause the login thread to exit
* after a certain number of re-logins (by adding an extra half of a refresh
* interval).
*/
long absoluteLastRefreshMs = startMs + (1 + numExpectedRefreshes) * 1000 * 60 * refreshEveryMinutes
- 1000 * 60 * refreshEveryMinutes / 2;
/*
* Identify buffer time on either side for the refresh algorithm that will cause
* the entire lifetime to be taken up. In other words, make sure there is no way
* to honor the buffers.
*/
short minPeriodSeconds = (short) (1 + lifetimeMinutes * 60 / 2);
short bufferSeconds = minPeriodSeconds;
/*
* Define some listeners so we can keep track of who gets done and when. All
* added listeners should end up done except the last, extra one, which should
* not.
*/
MockScheduler mockScheduler = new MockScheduler(mockTime);
List<KafkaFutureImpl<Long>> waiters = addWaiters(mockScheduler, 1000 * 60 * refreshEveryMinutes,
numExpectedRefreshes + 1);
// Create the ExpiringCredentialRefreshingLogin instance under test
TestLoginContextFactory testLoginContextFactory = new TestLoginContextFactory();
TestExpiringCredentialRefreshingLogin testExpiringCredentialRefreshingLogin = new TestExpiringCredentialRefreshingLogin(
refreshConfigThatPerformsReloginEveryGivenPercentageOfLifetime(
1.0 * refreshEveryMinutes / lifetimeMinutes, minPeriodSeconds, bufferSeconds,
clientReloginAllowedBeforeLogout),
testLoginContextFactory, mockTime, 1000 * 60 * lifetimeMinutes, absoluteLastRefreshMs,
clientReloginAllowedBeforeLogout) {
@Override
public long getCreateMs() {
return super.getCreateMs() - 1000 * 60 * 60; // distant past
}
};
testLoginContextFactory.configure(mockLoginContext, testExpiringCredentialRefreshingLogin);
/*
* Perform the login, wait up to a certain amount of time for the refresher
* thread to exit, and make sure the correct calls happened at the correct times
*/
long expectedFinalMs = startMs + numExpectedRefreshes * 1000 * 60 * refreshEveryMinutes;
assertFalse(testLoginContextFactory.refresherThreadStartedFuture().isDone());
assertFalse(testLoginContextFactory.refresherThreadDoneFuture().isDone());
testExpiringCredentialRefreshingLogin.login();
assertTrue(testLoginContextFactory.refresherThreadStartedFuture().isDone());
testLoginContextFactory.refresherThreadDoneFuture().get(1L, TimeUnit.SECONDS);
assertEquals(expectedFinalMs, mockTime.milliseconds());
for (int i = 0; i < numExpectedRefreshes; ++i) {
KafkaFutureImpl<Long> waiter = waiters.get(i);
assertTrue(waiter.isDone());
assertEquals((i + 1) * 1000 * 60 * refreshEveryMinutes, waiter.get() - startMs);
}
assertFalse(waiters.get(numExpectedRefreshes).isDone());
InOrder inOrder = inOrder(mockLoginContext);
inOrder.verify(mockLoginContext).login();
for (int i = 0; i < numExpectedRefreshes; ++i) {
inOrder.verify(mockLoginContext).login();
inOrder.verify(mockLoginContext).logout();
}
}
@Test
public void testRefreshWithMinPeriodIntrusion() throws Exception {
int numExpectedRefreshes = 1;
boolean clientReloginAllowedBeforeLogout = true;
Subject subject = new Subject();
final LoginContext mockLoginContext = mock(LoginContext.class);
when(mockLoginContext.getSubject()).thenReturn(subject);
MockTime mockTime = new MockTime();
long startMs = mockTime.milliseconds();
/*
* Identify the lifetime of each expiring credential
*/
long lifetimeMinutes = 10L;
/*
* Identify the point at which refresh will occur in that lifetime
*/
long refreshEveryMinutes = 8L;
/*
* Set an absolute last refresh time that will cause the login thread to exit
* after a certain number of re-logins (by adding an extra half of a refresh
* interval).
*/
long absoluteLastRefreshMs = startMs + (1 + numExpectedRefreshes) * 1000 * 60 * refreshEveryMinutes
- 1000 * 60 * refreshEveryMinutes / 2;
/*
* Identify a minimum period that will cause the refresh time to be delayed a
* bit.
*/
int bufferIntrusionSeconds = 1;
short minPeriodSeconds = (short) (refreshEveryMinutes * 60 + bufferIntrusionSeconds);
short bufferSeconds = (short) 0;
/*
* Define some listeners so we can keep track of who gets done and when. All
* added listeners should end up done except the last, extra one, which should
* not.
*/
MockScheduler mockScheduler = new MockScheduler(mockTime);
List<KafkaFutureImpl<Long>> waiters = addWaiters(mockScheduler,
1000 * (60 * refreshEveryMinutes + bufferIntrusionSeconds), numExpectedRefreshes + 1);
// Create the ExpiringCredentialRefreshingLogin instance under test
TestLoginContextFactory testLoginContextFactory = new TestLoginContextFactory();
TestExpiringCredentialRefreshingLogin testExpiringCredentialRefreshingLogin = new TestExpiringCredentialRefreshingLogin(
refreshConfigThatPerformsReloginEveryGivenPercentageOfLifetime(
1.0 * refreshEveryMinutes / lifetimeMinutes, minPeriodSeconds, bufferSeconds,
clientReloginAllowedBeforeLogout),
testLoginContextFactory, mockTime, 1000 * 60 * lifetimeMinutes, absoluteLastRefreshMs,
clientReloginAllowedBeforeLogout);
testLoginContextFactory.configure(mockLoginContext, testExpiringCredentialRefreshingLogin);
/*
* Perform the login, wait up to a certain amount of time for the refresher
* thread to exit, and make sure the correct calls happened at the correct times
*/
long expectedFinalMs = startMs
+ numExpectedRefreshes * 1000 * (60 * refreshEveryMinutes + bufferIntrusionSeconds);
assertFalse(testLoginContextFactory.refresherThreadStartedFuture().isDone());
assertFalse(testLoginContextFactory.refresherThreadDoneFuture().isDone());
testExpiringCredentialRefreshingLogin.login();
assertTrue(testLoginContextFactory.refresherThreadStartedFuture().isDone());
testLoginContextFactory.refresherThreadDoneFuture().get(1L, TimeUnit.SECONDS);
assertEquals(expectedFinalMs, mockTime.milliseconds());
for (int i = 0; i < numExpectedRefreshes; ++i) {
KafkaFutureImpl<Long> waiter = waiters.get(i);
assertTrue(waiter.isDone());
assertEquals((i + 1) * 1000 * (60 * refreshEveryMinutes + bufferIntrusionSeconds),
waiter.get() - startMs);
}
assertFalse(waiters.get(numExpectedRefreshes).isDone());
InOrder inOrder = inOrder(mockLoginContext);
inOrder.verify(mockLoginContext).login();
for (int i = 0; i < numExpectedRefreshes; ++i) {
inOrder.verify(mockLoginContext).login();
inOrder.verify(mockLoginContext).logout();
}
}
@Test
public void testRefreshWithPreExpirationBufferIntrusion() throws Exception {
int numExpectedRefreshes = 1;
boolean clientReloginAllowedBeforeLogout = true;
Subject subject = new Subject();
final LoginContext mockLoginContext = mock(LoginContext.class);
when(mockLoginContext.getSubject()).thenReturn(subject);
MockTime mockTime = new MockTime();
long startMs = mockTime.milliseconds();
/*
* Identify the lifetime of each expiring credential
*/
long lifetimeMinutes = 10L;
/*
* Identify the point at which refresh will occur in that lifetime
*/
long refreshEveryMinutes = 8L;
/*
* Set an absolute last refresh time that will cause the login thread to exit
* after a certain number of re-logins (by adding an extra half of a refresh
* interval).
*/
long absoluteLastRefreshMs = startMs + (1 + numExpectedRefreshes) * 1000 * 60 * refreshEveryMinutes
- 1000 * 60 * refreshEveryMinutes / 2;
/*
* Identify a minimum period that will cause the refresh time to be delayed a
* bit.
*/
int bufferIntrusionSeconds = 1;
short bufferSeconds = (short) ((lifetimeMinutes - refreshEveryMinutes) * 60 + bufferIntrusionSeconds);
short minPeriodSeconds = (short) 0;
/*
* Define some listeners so we can keep track of who gets done and when. All
* added listeners should end up done except the last, extra one, which should
* not.
*/
MockScheduler mockScheduler = new MockScheduler(mockTime);
List<KafkaFutureImpl<Long>> waiters = addWaiters(mockScheduler,
1000 * (60 * refreshEveryMinutes - bufferIntrusionSeconds), numExpectedRefreshes + 1);
// Create the ExpiringCredentialRefreshingLogin instance under test
TestLoginContextFactory testLoginContextFactory = new TestLoginContextFactory();
TestExpiringCredentialRefreshingLogin testExpiringCredentialRefreshingLogin = new TestExpiringCredentialRefreshingLogin(
refreshConfigThatPerformsReloginEveryGivenPercentageOfLifetime(
1.0 * refreshEveryMinutes / lifetimeMinutes, minPeriodSeconds, bufferSeconds,
clientReloginAllowedBeforeLogout),
testLoginContextFactory, mockTime, 1000 * 60 * lifetimeMinutes, absoluteLastRefreshMs,
clientReloginAllowedBeforeLogout);
testLoginContextFactory.configure(mockLoginContext, testExpiringCredentialRefreshingLogin);
/*
* Perform the login, wait up to a certain amount of time for the refresher
* thread to exit, and make sure the correct calls happened at the correct times
*/
long expectedFinalMs = startMs
+ numExpectedRefreshes * 1000 * (60 * refreshEveryMinutes - bufferIntrusionSeconds);
assertFalse(testLoginContextFactory.refresherThreadStartedFuture().isDone());
assertFalse(testLoginContextFactory.refresherThreadDoneFuture().isDone());
testExpiringCredentialRefreshingLogin.login();
assertTrue(testLoginContextFactory.refresherThreadStartedFuture().isDone());
testLoginContextFactory.refresherThreadDoneFuture().get(1L, TimeUnit.SECONDS);
assertEquals(expectedFinalMs, mockTime.milliseconds());
for (int i = 0; i < numExpectedRefreshes; ++i) {
KafkaFutureImpl<Long> waiter = waiters.get(i);
assertTrue(waiter.isDone());
assertEquals((i + 1) * 1000 * (60 * refreshEveryMinutes - bufferIntrusionSeconds),
waiter.get() - startMs);
}
assertFalse(waiters.get(numExpectedRefreshes).isDone());
InOrder inOrder = inOrder(mockLoginContext);
inOrder.verify(mockLoginContext).login();
for (int i = 0; i < numExpectedRefreshes; ++i) {
inOrder.verify(mockLoginContext).login();
inOrder.verify(mockLoginContext).logout();
}
}
@Test
public void testLoginExceptionCausesCorrectLogout() throws Exception {
int numExpectedRefreshes = 3;
boolean clientReloginAllowedBeforeLogout = true;
Subject subject = new Subject();
final LoginContext mockLoginContext = mock(LoginContext.class);
when(mockLoginContext.getSubject()).thenReturn(subject);
Mockito.doNothing().doThrow(new LoginException()).doNothing().when(mockLoginContext).login();
MockTime mockTime = new MockTime();
long startMs = mockTime.milliseconds();
/*
* Identify the lifetime of each expiring credential
*/
long lifetimeMinutes = 100L;
/*
* Identify the point at which refresh will occur in that lifetime
*/
long refreshEveryMinutes = 80L;
/*
* Set an absolute last refresh time that will cause the login thread to exit
* after a certain number of re-logins (by adding an extra half of a refresh
* interval).
*/
long absoluteLastRefreshMs = startMs + (1 + numExpectedRefreshes) * 1000 * 60 * refreshEveryMinutes
- 1000 * 60 * refreshEveryMinutes / 2;
/*
* Identify buffer time on either side for the refresh algorithm
*/
short minPeriodSeconds = (short) 0;
short bufferSeconds = minPeriodSeconds;
// Create the ExpiringCredentialRefreshingLogin instance under test
TestLoginContextFactory testLoginContextFactory = new TestLoginContextFactory();
TestExpiringCredentialRefreshingLogin testExpiringCredentialRefreshingLogin = new TestExpiringCredentialRefreshingLogin(
refreshConfigThatPerformsReloginEveryGivenPercentageOfLifetime(
1.0 * refreshEveryMinutes / lifetimeMinutes, minPeriodSeconds, bufferSeconds,
clientReloginAllowedBeforeLogout),
testLoginContextFactory, mockTime, 1000 * 60 * lifetimeMinutes, absoluteLastRefreshMs,
clientReloginAllowedBeforeLogout);
testLoginContextFactory.configure(mockLoginContext, testExpiringCredentialRefreshingLogin);
/*
* Perform the login and wait up to a certain amount of time for the refresher
* thread to exit. A timeout indicates the thread died due to logout()
* being invoked on an instance where the login() invocation had failed.
*/
assertFalse(testLoginContextFactory.refresherThreadStartedFuture().isDone());
assertFalse(testLoginContextFactory.refresherThreadDoneFuture().isDone());
testExpiringCredentialRefreshingLogin.login();
assertTrue(testLoginContextFactory.refresherThreadStartedFuture().isDone());
testLoginContextFactory.refresherThreadDoneFuture().get(1L, TimeUnit.SECONDS);
}
private static List<KafkaFutureImpl<Long>> addWaiters(MockScheduler mockScheduler, long refreshEveryMillis,
int numWaiters) {
List<KafkaFutureImpl<Long>> retvalWaiters = new ArrayList<>(numWaiters);
for (int i = 1; i <= numWaiters; ++i) {
KafkaFutureImpl<Long> waiter = new KafkaFutureImpl<>();
mockScheduler.addWaiter(i * refreshEveryMillis, waiter);
retvalWaiters.add(waiter);
}
return retvalWaiters;
}
private static ExpiringCredentialRefreshConfig refreshConfigThatPerformsReloginEveryGivenPercentageOfLifetime(
double refreshWindowFactor, short minPeriodSeconds, short bufferSeconds,
boolean clientReloginAllowedBeforeLogout) {
Map<Object, Object> configs = new HashMap<>();
configs.put(SaslConfigs.SASL_LOGIN_REFRESH_WINDOW_FACTOR, refreshWindowFactor);
configs.put(SaslConfigs.SASL_LOGIN_REFRESH_WINDOW_JITTER, 0);
configs.put(SaslConfigs.SASL_LOGIN_REFRESH_MIN_PERIOD_SECONDS, minPeriodSeconds);
configs.put(SaslConfigs.SASL_LOGIN_REFRESH_BUFFER_SECONDS, bufferSeconds);
return new ExpiringCredentialRefreshConfig(new ConfigDef().withClientSaslSupport().parse(configs),
clientReloginAllowedBeforeLogout);
}
}
|
googleapis/google-cloud-java | 36,615 | java-modelarmor/proto-google-cloud-modelarmor-v1/src/main/java/com/google/cloud/modelarmor/v1/VirusDetail.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/modelarmor/v1/service.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.modelarmor.v1;
/**
*
*
* <pre>
* Details of an identified virus
* </pre>
*
* Protobuf type {@code google.cloud.modelarmor.v1.VirusDetail}
*/
public final class VirusDetail extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.modelarmor.v1.VirusDetail)
VirusDetailOrBuilder {
private static final long serialVersionUID = 0L;
// Use VirusDetail.newBuilder() to construct.
private VirusDetail(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private VirusDetail() {
vendor_ = "";
names_ = com.google.protobuf.LazyStringArrayList.emptyList();
threatType_ = 0;
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new VirusDetail();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.modelarmor.v1.V1mainProto
.internal_static_google_cloud_modelarmor_v1_VirusDetail_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.modelarmor.v1.V1mainProto
.internal_static_google_cloud_modelarmor_v1_VirusDetail_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.modelarmor.v1.VirusDetail.class,
com.google.cloud.modelarmor.v1.VirusDetail.Builder.class);
}
/**
*
*
* <pre>
* Defines all the threat types of a virus
* </pre>
*
* Protobuf enum {@code google.cloud.modelarmor.v1.VirusDetail.ThreatType}
*/
public enum ThreatType implements com.google.protobuf.ProtocolMessageEnum {
/**
*
*
* <pre>
* Unused
* </pre>
*
* <code>THREAT_TYPE_UNSPECIFIED = 0;</code>
*/
THREAT_TYPE_UNSPECIFIED(0),
/**
*
*
* <pre>
* Unable to categorize threat
* </pre>
*
* <code>UNKNOWN = 1;</code>
*/
UNKNOWN(1),
/**
*
*
* <pre>
* Virus or Worm threat.
* </pre>
*
* <code>VIRUS_OR_WORM = 2;</code>
*/
VIRUS_OR_WORM(2),
/**
*
*
* <pre>
* Malicious program. E.g. Spyware, Trojan.
* </pre>
*
* <code>MALICIOUS_PROGRAM = 3;</code>
*/
MALICIOUS_PROGRAM(3),
/**
*
*
* <pre>
* Potentially harmful content. E.g. Injected code, Macro
* </pre>
*
* <code>POTENTIALLY_HARMFUL_CONTENT = 4;</code>
*/
POTENTIALLY_HARMFUL_CONTENT(4),
/**
*
*
* <pre>
* Potentially unwanted content. E.g. Adware.
* </pre>
*
* <code>POTENTIALLY_UNWANTED_CONTENT = 5;</code>
*/
POTENTIALLY_UNWANTED_CONTENT(5),
UNRECOGNIZED(-1),
;
/**
*
*
* <pre>
* Unused
* </pre>
*
* <code>THREAT_TYPE_UNSPECIFIED = 0;</code>
*/
public static final int THREAT_TYPE_UNSPECIFIED_VALUE = 0;
/**
*
*
* <pre>
* Unable to categorize threat
* </pre>
*
* <code>UNKNOWN = 1;</code>
*/
public static final int UNKNOWN_VALUE = 1;
/**
*
*
* <pre>
* Virus or Worm threat.
* </pre>
*
* <code>VIRUS_OR_WORM = 2;</code>
*/
public static final int VIRUS_OR_WORM_VALUE = 2;
/**
*
*
* <pre>
* Malicious program. E.g. Spyware, Trojan.
* </pre>
*
* <code>MALICIOUS_PROGRAM = 3;</code>
*/
public static final int MALICIOUS_PROGRAM_VALUE = 3;
/**
*
*
* <pre>
* Potentially harmful content. E.g. Injected code, Macro
* </pre>
*
* <code>POTENTIALLY_HARMFUL_CONTENT = 4;</code>
*/
public static final int POTENTIALLY_HARMFUL_CONTENT_VALUE = 4;
/**
*
*
* <pre>
* Potentially unwanted content. E.g. Adware.
* </pre>
*
* <code>POTENTIALLY_UNWANTED_CONTENT = 5;</code>
*/
public static final int POTENTIALLY_UNWANTED_CONTENT_VALUE = 5;
public final int getNumber() {
if (this == UNRECOGNIZED) {
throw new java.lang.IllegalArgumentException(
"Can't get the number of an unknown enum value.");
}
return value;
}
/**
* @param value The numeric wire value of the corresponding enum entry.
* @return The enum associated with the given numeric wire value.
* @deprecated Use {@link #forNumber(int)} instead.
*/
@java.lang.Deprecated
public static ThreatType valueOf(int value) {
return forNumber(value);
}
/**
* @param value The numeric wire value of the corresponding enum entry.
* @return The enum associated with the given numeric wire value.
*/
public static ThreatType forNumber(int value) {
switch (value) {
case 0:
return THREAT_TYPE_UNSPECIFIED;
case 1:
return UNKNOWN;
case 2:
return VIRUS_OR_WORM;
case 3:
return MALICIOUS_PROGRAM;
case 4:
return POTENTIALLY_HARMFUL_CONTENT;
case 5:
return POTENTIALLY_UNWANTED_CONTENT;
default:
return null;
}
}
public static com.google.protobuf.Internal.EnumLiteMap<ThreatType> internalGetValueMap() {
return internalValueMap;
}
private static final com.google.protobuf.Internal.EnumLiteMap<ThreatType> internalValueMap =
new com.google.protobuf.Internal.EnumLiteMap<ThreatType>() {
public ThreatType findValueByNumber(int number) {
return ThreatType.forNumber(number);
}
};
public final com.google.protobuf.Descriptors.EnumValueDescriptor getValueDescriptor() {
if (this == UNRECOGNIZED) {
throw new java.lang.IllegalStateException(
"Can't get the descriptor of an unrecognized enum value.");
}
return getDescriptor().getValues().get(ordinal());
}
public final com.google.protobuf.Descriptors.EnumDescriptor getDescriptorForType() {
return getDescriptor();
}
public static final com.google.protobuf.Descriptors.EnumDescriptor getDescriptor() {
return com.google.cloud.modelarmor.v1.VirusDetail.getDescriptor().getEnumTypes().get(0);
}
private static final ThreatType[] VALUES = values();
public static ThreatType valueOf(com.google.protobuf.Descriptors.EnumValueDescriptor desc) {
if (desc.getType() != getDescriptor()) {
throw new java.lang.IllegalArgumentException("EnumValueDescriptor is not for this type.");
}
if (desc.getIndex() == -1) {
return UNRECOGNIZED;
}
return VALUES[desc.getIndex()];
}
private final int value;
private ThreatType(int value) {
this.value = value;
}
// @@protoc_insertion_point(enum_scope:google.cloud.modelarmor.v1.VirusDetail.ThreatType)
}
public static final int VENDOR_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private volatile java.lang.Object vendor_ = "";
/**
*
*
* <pre>
* Name of vendor that produced this virus identification.
* </pre>
*
* <code>string vendor = 1;</code>
*
* @return The vendor.
*/
@java.lang.Override
public java.lang.String getVendor() {
java.lang.Object ref = vendor_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
vendor_ = s;
return s;
}
}
/**
*
*
* <pre>
* Name of vendor that produced this virus identification.
* </pre>
*
* <code>string vendor = 1;</code>
*
* @return The bytes for vendor.
*/
@java.lang.Override
public com.google.protobuf.ByteString getVendorBytes() {
java.lang.Object ref = vendor_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
vendor_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int NAMES_FIELD_NUMBER = 2;
@SuppressWarnings("serial")
private com.google.protobuf.LazyStringArrayList names_ =
com.google.protobuf.LazyStringArrayList.emptyList();
/**
*
*
* <pre>
* Names of this Virus.
* </pre>
*
* <code>repeated string names = 2;</code>
*
* @return A list containing the names.
*/
public com.google.protobuf.ProtocolStringList getNamesList() {
return names_;
}
/**
*
*
* <pre>
* Names of this Virus.
* </pre>
*
* <code>repeated string names = 2;</code>
*
* @return The count of names.
*/
public int getNamesCount() {
return names_.size();
}
/**
*
*
* <pre>
* Names of this Virus.
* </pre>
*
* <code>repeated string names = 2;</code>
*
* @param index The index of the element to return.
* @return The names at the given index.
*/
public java.lang.String getNames(int index) {
return names_.get(index);
}
/**
*
*
* <pre>
* Names of this Virus.
* </pre>
*
* <code>repeated string names = 2;</code>
*
* @param index The index of the value to return.
* @return The bytes of the names at the given index.
*/
public com.google.protobuf.ByteString getNamesBytes(int index) {
return names_.getByteString(index);
}
public static final int THREAT_TYPE_FIELD_NUMBER = 3;
private int threatType_ = 0;
/**
*
*
* <pre>
* Threat type of the identified virus
* </pre>
*
* <code>.google.cloud.modelarmor.v1.VirusDetail.ThreatType threat_type = 3;</code>
*
* @return The enum numeric value on the wire for threatType.
*/
@java.lang.Override
public int getThreatTypeValue() {
return threatType_;
}
/**
*
*
* <pre>
* Threat type of the identified virus
* </pre>
*
* <code>.google.cloud.modelarmor.v1.VirusDetail.ThreatType threat_type = 3;</code>
*
* @return The threatType.
*/
@java.lang.Override
public com.google.cloud.modelarmor.v1.VirusDetail.ThreatType getThreatType() {
com.google.cloud.modelarmor.v1.VirusDetail.ThreatType result =
com.google.cloud.modelarmor.v1.VirusDetail.ThreatType.forNumber(threatType_);
return result == null
? com.google.cloud.modelarmor.v1.VirusDetail.ThreatType.UNRECOGNIZED
: result;
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(vendor_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 1, vendor_);
}
for (int i = 0; i < names_.size(); i++) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 2, names_.getRaw(i));
}
if (threatType_
!= com.google.cloud.modelarmor.v1.VirusDetail.ThreatType.THREAT_TYPE_UNSPECIFIED
.getNumber()) {
output.writeEnum(3, threatType_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(vendor_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, vendor_);
}
{
int dataSize = 0;
for (int i = 0; i < names_.size(); i++) {
dataSize += computeStringSizeNoTag(names_.getRaw(i));
}
size += dataSize;
size += 1 * getNamesList().size();
}
if (threatType_
!= com.google.cloud.modelarmor.v1.VirusDetail.ThreatType.THREAT_TYPE_UNSPECIFIED
.getNumber()) {
size += com.google.protobuf.CodedOutputStream.computeEnumSize(3, threatType_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.modelarmor.v1.VirusDetail)) {
return super.equals(obj);
}
com.google.cloud.modelarmor.v1.VirusDetail other =
(com.google.cloud.modelarmor.v1.VirusDetail) obj;
if (!getVendor().equals(other.getVendor())) return false;
if (!getNamesList().equals(other.getNamesList())) return false;
if (threatType_ != other.threatType_) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + VENDOR_FIELD_NUMBER;
hash = (53 * hash) + getVendor().hashCode();
if (getNamesCount() > 0) {
hash = (37 * hash) + NAMES_FIELD_NUMBER;
hash = (53 * hash) + getNamesList().hashCode();
}
hash = (37 * hash) + THREAT_TYPE_FIELD_NUMBER;
hash = (53 * hash) + threatType_;
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.modelarmor.v1.VirusDetail parseFrom(java.nio.ByteBuffer data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.modelarmor.v1.VirusDetail parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.modelarmor.v1.VirusDetail parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.modelarmor.v1.VirusDetail parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.modelarmor.v1.VirusDetail parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.modelarmor.v1.VirusDetail parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.modelarmor.v1.VirusDetail parseFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.modelarmor.v1.VirusDetail parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.modelarmor.v1.VirusDetail parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.modelarmor.v1.VirusDetail parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.modelarmor.v1.VirusDetail parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.modelarmor.v1.VirusDetail parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.cloud.modelarmor.v1.VirusDetail prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Details of an identified virus
* </pre>
*
* Protobuf type {@code google.cloud.modelarmor.v1.VirusDetail}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.modelarmor.v1.VirusDetail)
com.google.cloud.modelarmor.v1.VirusDetailOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.modelarmor.v1.V1mainProto
.internal_static_google_cloud_modelarmor_v1_VirusDetail_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.modelarmor.v1.V1mainProto
.internal_static_google_cloud_modelarmor_v1_VirusDetail_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.modelarmor.v1.VirusDetail.class,
com.google.cloud.modelarmor.v1.VirusDetail.Builder.class);
}
// Construct using com.google.cloud.modelarmor.v1.VirusDetail.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
vendor_ = "";
names_ = com.google.protobuf.LazyStringArrayList.emptyList();
threatType_ = 0;
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.modelarmor.v1.V1mainProto
.internal_static_google_cloud_modelarmor_v1_VirusDetail_descriptor;
}
@java.lang.Override
public com.google.cloud.modelarmor.v1.VirusDetail getDefaultInstanceForType() {
return com.google.cloud.modelarmor.v1.VirusDetail.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.modelarmor.v1.VirusDetail build() {
com.google.cloud.modelarmor.v1.VirusDetail result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.modelarmor.v1.VirusDetail buildPartial() {
com.google.cloud.modelarmor.v1.VirusDetail result =
new com.google.cloud.modelarmor.v1.VirusDetail(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(com.google.cloud.modelarmor.v1.VirusDetail result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.vendor_ = vendor_;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
names_.makeImmutable();
result.names_ = names_;
}
if (((from_bitField0_ & 0x00000004) != 0)) {
result.threatType_ = threatType_;
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.modelarmor.v1.VirusDetail) {
return mergeFrom((com.google.cloud.modelarmor.v1.VirusDetail) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.modelarmor.v1.VirusDetail other) {
if (other == com.google.cloud.modelarmor.v1.VirusDetail.getDefaultInstance()) return this;
if (!other.getVendor().isEmpty()) {
vendor_ = other.vendor_;
bitField0_ |= 0x00000001;
onChanged();
}
if (!other.names_.isEmpty()) {
if (names_.isEmpty()) {
names_ = other.names_;
bitField0_ |= 0x00000002;
} else {
ensureNamesIsMutable();
names_.addAll(other.names_);
}
onChanged();
}
if (other.threatType_ != 0) {
setThreatTypeValue(other.getThreatTypeValue());
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
vendor_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000001;
break;
} // case 10
case 18:
{
java.lang.String s = input.readStringRequireUtf8();
ensureNamesIsMutable();
names_.add(s);
break;
} // case 18
case 24:
{
threatType_ = input.readEnum();
bitField0_ |= 0x00000004;
break;
} // case 24
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.lang.Object vendor_ = "";
/**
*
*
* <pre>
* Name of vendor that produced this virus identification.
* </pre>
*
* <code>string vendor = 1;</code>
*
* @return The vendor.
*/
public java.lang.String getVendor() {
java.lang.Object ref = vendor_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
vendor_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Name of vendor that produced this virus identification.
* </pre>
*
* <code>string vendor = 1;</code>
*
* @return The bytes for vendor.
*/
public com.google.protobuf.ByteString getVendorBytes() {
java.lang.Object ref = vendor_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
vendor_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Name of vendor that produced this virus identification.
* </pre>
*
* <code>string vendor = 1;</code>
*
* @param value The vendor to set.
* @return This builder for chaining.
*/
public Builder setVendor(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
vendor_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Name of vendor that produced this virus identification.
* </pre>
*
* <code>string vendor = 1;</code>
*
* @return This builder for chaining.
*/
public Builder clearVendor() {
vendor_ = getDefaultInstance().getVendor();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
return this;
}
/**
*
*
* <pre>
* Name of vendor that produced this virus identification.
* </pre>
*
* <code>string vendor = 1;</code>
*
* @param value The bytes for vendor to set.
* @return This builder for chaining.
*/
public Builder setVendorBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
vendor_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
private com.google.protobuf.LazyStringArrayList names_ =
com.google.protobuf.LazyStringArrayList.emptyList();
private void ensureNamesIsMutable() {
if (!names_.isModifiable()) {
names_ = new com.google.protobuf.LazyStringArrayList(names_);
}
bitField0_ |= 0x00000002;
}
/**
*
*
* <pre>
* Names of this Virus.
* </pre>
*
* <code>repeated string names = 2;</code>
*
* @return A list containing the names.
*/
public com.google.protobuf.ProtocolStringList getNamesList() {
names_.makeImmutable();
return names_;
}
/**
*
*
* <pre>
* Names of this Virus.
* </pre>
*
* <code>repeated string names = 2;</code>
*
* @return The count of names.
*/
public int getNamesCount() {
return names_.size();
}
/**
*
*
* <pre>
* Names of this Virus.
* </pre>
*
* <code>repeated string names = 2;</code>
*
* @param index The index of the element to return.
* @return The names at the given index.
*/
public java.lang.String getNames(int index) {
return names_.get(index);
}
/**
*
*
* <pre>
* Names of this Virus.
* </pre>
*
* <code>repeated string names = 2;</code>
*
* @param index The index of the value to return.
* @return The bytes of the names at the given index.
*/
public com.google.protobuf.ByteString getNamesBytes(int index) {
return names_.getByteString(index);
}
/**
*
*
* <pre>
* Names of this Virus.
* </pre>
*
* <code>repeated string names = 2;</code>
*
* @param index The index to set the value at.
* @param value The names to set.
* @return This builder for chaining.
*/
public Builder setNames(int index, java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
ensureNamesIsMutable();
names_.set(index, value);
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Names of this Virus.
* </pre>
*
* <code>repeated string names = 2;</code>
*
* @param value The names to add.
* @return This builder for chaining.
*/
public Builder addNames(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
ensureNamesIsMutable();
names_.add(value);
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Names of this Virus.
* </pre>
*
* <code>repeated string names = 2;</code>
*
* @param values The names to add.
* @return This builder for chaining.
*/
public Builder addAllNames(java.lang.Iterable<java.lang.String> values) {
ensureNamesIsMutable();
com.google.protobuf.AbstractMessageLite.Builder.addAll(values, names_);
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Names of this Virus.
* </pre>
*
* <code>repeated string names = 2;</code>
*
* @return This builder for chaining.
*/
public Builder clearNames() {
names_ = com.google.protobuf.LazyStringArrayList.emptyList();
bitField0_ = (bitField0_ & ~0x00000002);
;
onChanged();
return this;
}
/**
*
*
* <pre>
* Names of this Virus.
* </pre>
*
* <code>repeated string names = 2;</code>
*
* @param value The bytes of the names to add.
* @return This builder for chaining.
*/
public Builder addNamesBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
ensureNamesIsMutable();
names_.add(value);
bitField0_ |= 0x00000002;
onChanged();
return this;
}
private int threatType_ = 0;
/**
*
*
* <pre>
* Threat type of the identified virus
* </pre>
*
* <code>.google.cloud.modelarmor.v1.VirusDetail.ThreatType threat_type = 3;</code>
*
* @return The enum numeric value on the wire for threatType.
*/
@java.lang.Override
public int getThreatTypeValue() {
return threatType_;
}
/**
*
*
* <pre>
* Threat type of the identified virus
* </pre>
*
* <code>.google.cloud.modelarmor.v1.VirusDetail.ThreatType threat_type = 3;</code>
*
* @param value The enum numeric value on the wire for threatType to set.
* @return This builder for chaining.
*/
public Builder setThreatTypeValue(int value) {
threatType_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
*
*
* <pre>
* Threat type of the identified virus
* </pre>
*
* <code>.google.cloud.modelarmor.v1.VirusDetail.ThreatType threat_type = 3;</code>
*
* @return The threatType.
*/
@java.lang.Override
public com.google.cloud.modelarmor.v1.VirusDetail.ThreatType getThreatType() {
com.google.cloud.modelarmor.v1.VirusDetail.ThreatType result =
com.google.cloud.modelarmor.v1.VirusDetail.ThreatType.forNumber(threatType_);
return result == null
? com.google.cloud.modelarmor.v1.VirusDetail.ThreatType.UNRECOGNIZED
: result;
}
/**
*
*
* <pre>
* Threat type of the identified virus
* </pre>
*
* <code>.google.cloud.modelarmor.v1.VirusDetail.ThreatType threat_type = 3;</code>
*
* @param value The threatType to set.
* @return This builder for chaining.
*/
public Builder setThreatType(com.google.cloud.modelarmor.v1.VirusDetail.ThreatType value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000004;
threatType_ = value.getNumber();
onChanged();
return this;
}
/**
*
*
* <pre>
* Threat type of the identified virus
* </pre>
*
* <code>.google.cloud.modelarmor.v1.VirusDetail.ThreatType threat_type = 3;</code>
*
* @return This builder for chaining.
*/
public Builder clearThreatType() {
bitField0_ = (bitField0_ & ~0x00000004);
threatType_ = 0;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.modelarmor.v1.VirusDetail)
}
// @@protoc_insertion_point(class_scope:google.cloud.modelarmor.v1.VirusDetail)
private static final com.google.cloud.modelarmor.v1.VirusDetail DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.modelarmor.v1.VirusDetail();
}
public static com.google.cloud.modelarmor.v1.VirusDetail getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<VirusDetail> PARSER =
new com.google.protobuf.AbstractParser<VirusDetail>() {
@java.lang.Override
public VirusDetail parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<VirusDetail> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<VirusDetail> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.modelarmor.v1.VirusDetail getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
apache/dolphinscheduler | 37,086 | dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/SchedulerServiceImpl.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.api.service.impl;
import static org.apache.dolphinscheduler.api.constants.ApiFuncIdentificationConstant.PROJECT;
import static org.apache.dolphinscheduler.api.constants.ApiFuncIdentificationConstant.WORKFLOW_ONLINE_OFFLINE;
import org.apache.dolphinscheduler.api.dto.ScheduleParam;
import org.apache.dolphinscheduler.api.dto.schedule.ScheduleCreateRequest;
import org.apache.dolphinscheduler.api.dto.schedule.ScheduleFilterRequest;
import org.apache.dolphinscheduler.api.dto.schedule.ScheduleUpdateRequest;
import org.apache.dolphinscheduler.api.enums.Status;
import org.apache.dolphinscheduler.api.exceptions.ServiceException;
import org.apache.dolphinscheduler.api.service.ExecutorService;
import org.apache.dolphinscheduler.api.service.ProjectService;
import org.apache.dolphinscheduler.api.service.SchedulerService;
import org.apache.dolphinscheduler.api.utils.PageInfo;
import org.apache.dolphinscheduler.api.utils.Result;
import org.apache.dolphinscheduler.api.vo.ScheduleVO;
import org.apache.dolphinscheduler.common.constants.Constants;
import org.apache.dolphinscheduler.common.enums.FailureStrategy;
import org.apache.dolphinscheduler.common.enums.Priority;
import org.apache.dolphinscheduler.common.enums.ReleaseState;
import org.apache.dolphinscheduler.common.enums.UserType;
import org.apache.dolphinscheduler.common.enums.WarningType;
import org.apache.dolphinscheduler.common.utils.DateUtils;
import org.apache.dolphinscheduler.common.utils.JSONUtils;
import org.apache.dolphinscheduler.dao.entity.Environment;
import org.apache.dolphinscheduler.dao.entity.Project;
import org.apache.dolphinscheduler.dao.entity.Schedule;
import org.apache.dolphinscheduler.dao.entity.Tenant;
import org.apache.dolphinscheduler.dao.entity.User;
import org.apache.dolphinscheduler.dao.entity.WorkflowDefinition;
import org.apache.dolphinscheduler.dao.mapper.EnvironmentMapper;
import org.apache.dolphinscheduler.dao.mapper.ProjectMapper;
import org.apache.dolphinscheduler.dao.mapper.ScheduleMapper;
import org.apache.dolphinscheduler.dao.mapper.TenantMapper;
import org.apache.dolphinscheduler.dao.mapper.WorkflowDefinitionMapper;
import org.apache.dolphinscheduler.scheduler.api.SchedulerApi;
import org.apache.dolphinscheduler.service.cron.CronUtils;
import org.apache.dolphinscheduler.service.exceptions.CronParseException;
import org.apache.commons.collections4.CollectionUtils;
import org.apache.commons.lang3.StringUtils;
import java.lang.reflect.InvocationTargetException;
import java.time.ZoneId;
import java.time.ZonedDateTime;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Date;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.TimeZone;
import java.util.stream.Collectors;
import lombok.NonNull;
import lombok.extern.slf4j.Slf4j;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Transactional;
import com.baomidou.mybatisplus.core.metadata.IPage;
import com.baomidou.mybatisplus.extension.plugins.pagination.Page;
import com.cronutils.model.Cron;
@Service
@Slf4j
public class SchedulerServiceImpl extends BaseServiceImpl implements SchedulerService {
@Autowired
private ProjectService projectService;
@Autowired
private ExecutorService executorService;
@Autowired
private ScheduleMapper scheduleMapper;
@Autowired
private ProjectMapper projectMapper;
@Autowired
private WorkflowDefinitionMapper workflowDefinitionMapper;
@Autowired
private SchedulerApi schedulerApi;
@Autowired
private EnvironmentMapper environmentMapper;
@Autowired
private TenantMapper tenantMapper;
/**
* save schedule
*
* @param loginUser login user
* @param projectCode project name
* @param workflowDefinitionCode workflow definition code
* @param schedule scheduler
* @param warningType warning type
* @param warningGroupId warning group id
* @param failureStrategy failure strategy
* @param workflowInstancePriority workflow instance priority
* @param workerGroup worker group
* @param tenantCode tenant code
* @param environmentCode environment code
* @return create result code
*/
@Override
@Transactional
public Map<String, Object> insertSchedule(User loginUser,
long projectCode,
long workflowDefinitionCode,
String schedule,
WarningType warningType,
int warningGroupId,
FailureStrategy failureStrategy,
Priority workflowInstancePriority,
String workerGroup,
String tenantCode,
Long environmentCode) {
Map<String, Object> result = new HashMap<>();
Project project = projectMapper.queryByCode(projectCode);
// check project auth
boolean hasProjectAndPerm = projectService.hasProjectAndPerm(loginUser, project, result, null);
if (!hasProjectAndPerm) {
return result;
}
// check workflow define release state
WorkflowDefinition workflowDefinition = workflowDefinitionMapper.queryByCode(workflowDefinitionCode);
executorService.checkWorkflowDefinitionValid(projectCode, workflowDefinition, workflowDefinitionCode,
workflowDefinition.getVersion());
Schedule scheduleExists =
scheduleMapper.queryByWorkflowDefinitionCode(workflowDefinitionCode);
if (scheduleExists != null) {
log.error("Schedule already exist, scheduleId:{}, workflowDefinitionCode:{}", scheduleExists.getId(),
workflowDefinitionCode);
putMsg(result, Status.SCHEDULE_ALREADY_EXISTS, workflowDefinitionCode, scheduleExists.getId());
return result;
}
Schedule scheduleObj = new Schedule();
Date now = new Date();
checkValidTenant(tenantCode);
scheduleObj.setTenantCode(tenantCode);
scheduleObj.setProjectName(project.getName());
scheduleObj.setWorkflowDefinitionCode(workflowDefinitionCode);
scheduleObj.setWorkflowDefinitionName(workflowDefinition.getName());
ScheduleParam scheduleParam = JSONUtils.parseObject(schedule, ScheduleParam.class);
if (DateUtils.differSec(scheduleParam.getStartTime(), scheduleParam.getEndTime()) == 0) {
log.warn("The start time must not be the same as the end or time can not be null.");
putMsg(result, Status.SCHEDULE_START_TIME_END_TIME_SAME);
return result;
}
if (scheduleParam.getStartTime().getTime() > scheduleParam.getEndTime().getTime()) {
log.warn("The start time must smaller than end time");
putMsg(result, Status.START_TIME_BIGGER_THAN_END_TIME_ERROR);
return result;
}
scheduleObj.setStartTime(scheduleParam.getStartTime());
scheduleObj.setEndTime(scheduleParam.getEndTime());
if (!CronUtils.isValidExpression(scheduleParam.getCrontab())) {
log.error("Schedule crontab verify failure, crontab:{}.", scheduleParam.getCrontab());
putMsg(result, Status.REQUEST_PARAMS_NOT_VALID_ERROR, scheduleParam.getCrontab());
return result;
}
scheduleObj.setCrontab(scheduleParam.getCrontab());
scheduleObj.setTimezoneId(scheduleParam.getTimezoneId());
scheduleObj.setWarningType(warningType);
scheduleObj.setWarningGroupId(warningGroupId);
scheduleObj.setFailureStrategy(failureStrategy);
scheduleObj.setCreateTime(now);
scheduleObj.setUpdateTime(now);
scheduleObj.setUserId(loginUser.getId());
scheduleObj.setUserName(loginUser.getUserName());
scheduleObj.setReleaseState(ReleaseState.OFFLINE);
scheduleObj.setWorkflowInstancePriority(workflowInstancePriority);
scheduleObj.setWorkerGroup(workerGroup);
scheduleObj.setEnvironmentCode(environmentCode);
scheduleMapper.insert(scheduleObj);
/**
* updateWorkflowInstance receivers and cc by workflow definition id
*/
workflowDefinition.setWarningGroupId(warningGroupId);
workflowDefinitionMapper.updateById(workflowDefinition);
// return scheduler object with ID
result.put(Constants.DATA_LIST, scheduleMapper.selectById(scheduleObj.getId()));
putMsg(result, Status.SUCCESS);
log.info("Schedule create complete, projectCode:{}, workflowDefinitionCode:{}, scheduleId:{}.",
projectCode, workflowDefinitionCode, scheduleObj.getId());
result.put("scheduleId", scheduleObj.getId());
return result;
}
protected void projectPermCheckByWorkflowCode(User loginUser, long workflowDefinitionCode) {
WorkflowDefinition workflowDefinition = workflowDefinitionMapper.queryByCode(workflowDefinitionCode);
if (workflowDefinition == null) {
throw new ServiceException(Status.WORKFLOW_DEFINITION_NOT_EXIST, workflowDefinitionCode);
}
Project project = projectMapper.queryByCode(workflowDefinition.getProjectCode());
// check project auth
this.projectService.checkProjectAndAuthThrowException(loginUser, project, null);
}
private void scheduleParamCheck(String scheduleParamStr) {
ScheduleParam scheduleParam = JSONUtils.parseObject(scheduleParamStr, ScheduleParam.class);
if (scheduleParam == null) {
throw new ServiceException(Status.PARSE_SCHEDULE_PARAM_ERROR, scheduleParamStr);
}
if (DateUtils.differSec(scheduleParam.getStartTime(), scheduleParam.getEndTime()) == 0) {
throw new ServiceException(Status.SCHEDULE_START_TIME_END_TIME_SAME);
}
if (scheduleParam.getStartTime().getTime() > scheduleParam.getEndTime().getTime()) {
throw new ServiceException(Status.START_TIME_BIGGER_THAN_END_TIME_ERROR);
}
if (!CronUtils.isValidExpression(scheduleParam.getCrontab())) {
throw new ServiceException(Status.SCHEDULE_CRON_CHECK_FAILED, scheduleParam.getCrontab());
}
}
/**
* save schedule V2, will also change workflow definition's warningGroupId if schedule's warningGroupId be set
*
* @param loginUser login user
* @param scheduleCreateRequest schedule create object
* @return Schedule object just be created
*/
@Override
@Transactional
public Schedule createSchedulesV2(User loginUser,
ScheduleCreateRequest scheduleCreateRequest) {
this.projectPermCheckByWorkflowCode(loginUser, scheduleCreateRequest.getWorkflowDefinitionCode());
WorkflowDefinition workflowDefinition =
workflowDefinitionMapper.queryByCode(scheduleCreateRequest.getWorkflowDefinitionCode());
// check workflow define release state
executorService.checkWorkflowDefinitionValid(workflowDefinition.getProjectCode(), workflowDefinition,
workflowDefinition.getCode(), workflowDefinition.getVersion());
Schedule scheduleExists =
scheduleMapper.queryByWorkflowDefinitionCode(scheduleCreateRequest.getWorkflowDefinitionCode());
if (scheduleExists != null) {
throw new ServiceException(Status.SCHEDULE_ALREADY_EXISTS,
scheduleCreateRequest.getWorkflowDefinitionCode(),
scheduleExists.getId());
}
checkValidTenant(scheduleCreateRequest.getTenantCode());
Schedule schedule = scheduleCreateRequest.convert2Schedule();
Environment environment = environmentMapper.queryByEnvironmentCode(schedule.getEnvironmentCode());
if (environment == null) {
throw new ServiceException(Status.QUERY_ENVIRONMENT_BY_CODE_ERROR, schedule.getEnvironmentCode());
}
schedule.setUserId(loginUser.getId());
// give more detail when return schedule object
schedule.setUserName(loginUser.getUserName());
schedule.setWorkflowDefinitionName(workflowDefinition.getName());
this.scheduleParamCheck(scheduleCreateRequest.getScheduleParam());
int create = scheduleMapper.insert(schedule);
if (create <= 0) {
throw new ServiceException(Status.CREATE_SCHEDULE_ERROR);
}
// updateWorkflowInstance receivers and cc by workflow definition id
workflowDefinition.setWarningGroupId(schedule.getWarningGroupId());
workflowDefinitionMapper.updateById(workflowDefinition);
return schedule;
}
/**
* updateWorkflowInstance schedule
*
* @param loginUser login user
* @param projectCode project code
* @param id scheduler id
* @param scheduleExpression scheduler
* @param warningType warning type
* @param warningGroupId warning group id
* @param failureStrategy failure strategy
* @param workerGroup worker group
* @param tenantCode tenant code
* @param environmentCode environment code
* @param workflowInstancePriority workflow instance priority
* @return update result code
*/
@Override
@Transactional
public Map<String, Object> updateSchedule(User loginUser,
long projectCode,
Integer id,
String scheduleExpression,
WarningType warningType,
int warningGroupId,
FailureStrategy failureStrategy,
Priority workflowInstancePriority,
String workerGroup,
String tenantCode,
Long environmentCode) {
Map<String, Object> result = new HashMap<>();
Project project = projectMapper.queryByCode(projectCode);
// check project auth
boolean hasProjectAndPerm = projectService.hasProjectAndPerm(loginUser, project, result, null);
if (!hasProjectAndPerm) {
return result;
}
// check schedule exists
Schedule schedule = scheduleMapper.selectById(id);
if (schedule == null) {
log.error("Schedule does not exist, scheduleId:{}.", id);
putMsg(result, Status.SCHEDULE_NOT_EXISTS, id);
return result;
}
WorkflowDefinition workflowDefinition =
workflowDefinitionMapper.queryByCode(schedule.getWorkflowDefinitionCode());
if (workflowDefinition == null || projectCode != workflowDefinition.getProjectCode()) {
log.error("workflow definition does not exist, workflowDefinitionCode:{}.",
schedule.getWorkflowDefinitionCode());
putMsg(result, Status.WORKFLOW_DEFINITION_NOT_EXIST, String.valueOf(schedule.getWorkflowDefinitionCode()));
return result;
}
updateSchedule(result, schedule, workflowDefinition, scheduleExpression, warningType, warningGroupId,
failureStrategy, workflowInstancePriority, workerGroup, tenantCode, environmentCode);
return result;
}
/**
* update schedule object V2
*
* @param loginUser login user
* @param scheduleId scheduler id
* @param scheduleUpdateRequest the schedule object will be updated
* @return Schedule object
*/
@Override
@Transactional
public Schedule updateSchedulesV2(User loginUser,
Integer scheduleId,
ScheduleUpdateRequest scheduleUpdateRequest) {
Schedule schedule = scheduleMapper.selectById(scheduleId);
if (schedule == null) {
throw new ServiceException(Status.SCHEDULE_NOT_EXISTS, scheduleId);
}
Schedule scheduleUpdate;
try {
scheduleUpdate = scheduleUpdateRequest.mergeIntoSchedule(schedule);
// check update params
this.scheduleParamCheck(scheduleUpdateRequest.updateScheduleParam(scheduleUpdate));
} catch (InvocationTargetException | IllegalAccessException | InstantiationException
| NoSuchMethodException e) {
throw new ServiceException(Status.REQUEST_PARAMS_NOT_VALID_ERROR, scheduleUpdateRequest.toString());
}
// check update params
this.projectPermCheckByWorkflowCode(loginUser, scheduleUpdate.getWorkflowDefinitionCode());
if (scheduleUpdate.getEnvironmentCode() != null) {
Environment environment = environmentMapper.queryByEnvironmentCode(scheduleUpdate.getEnvironmentCode());
if (environment == null) {
throw new ServiceException(Status.QUERY_ENVIRONMENT_BY_CODE_ERROR, scheduleUpdate.getEnvironmentCode());
}
}
int update = scheduleMapper.updateById(scheduleUpdate);
if (update <= 0) {
throw new ServiceException(Status.UPDATE_SCHEDULE_ERROR);
}
return scheduleUpdate;
}
/**
* get schedule object
*
* @param loginUser login user
* @param scheduleId scheduler id
* @return Schedule object
*/
@Override
@Transactional
public Schedule getSchedule(User loginUser,
Integer scheduleId) {
Schedule schedule = scheduleMapper.selectById(scheduleId);
if (schedule == null) {
throw new ServiceException(Status.SCHEDULE_NOT_EXISTS, scheduleId);
}
this.projectPermCheckByWorkflowCode(loginUser, schedule.getWorkflowDefinitionCode());
return schedule;
}
/**
* query schedule
*
* @param loginUser login user
* @param projectCode project code
* @param workflowDefinitionCode workflow definition code
* @param pageNo page number
* @param pageSize page size
* @param searchVal search value
* @return schedule list page
*/
@Override
public Result querySchedule(User loginUser, long projectCode, long workflowDefinitionCode, String searchVal,
Integer pageNo, Integer pageSize) {
Result result = new Result();
Project project = projectMapper.queryByCode(projectCode);
// check project auth
boolean hasProjectAndPerm = projectService.hasProjectAndPerm(loginUser, project, result, PROJECT);
if (!hasProjectAndPerm) {
return result;
}
if (workflowDefinitionCode != 0) {
WorkflowDefinition workflowDefinition = workflowDefinitionMapper.queryByCode(workflowDefinitionCode);
if (workflowDefinition == null || projectCode != workflowDefinition.getProjectCode()) {
log.error("workflow definition does not exist, workflowDefinitionCode:{}.", workflowDefinitionCode);
putMsg(result, Status.WORKFLOW_DEFINITION_NOT_EXIST, String.valueOf(workflowDefinitionCode));
return result;
}
}
Page<Schedule> page = new Page<>(pageNo, pageSize);
IPage<Schedule> schedulePage =
scheduleMapper.queryByProjectAndWorkflowDefinitionCodePaging(page, projectCode, workflowDefinitionCode,
searchVal);
List<ScheduleVO> scheduleList = new ArrayList<>();
for (Schedule schedule : schedulePage.getRecords()) {
scheduleList.add(new ScheduleVO(schedule));
}
PageInfo<ScheduleVO> pageInfo = new PageInfo<>(pageNo, pageSize);
pageInfo.setTotal((int) schedulePage.getTotal());
pageInfo.setTotalList(scheduleList);
result.setData(pageInfo);
putMsg(result, Status.SUCCESS);
return result;
}
public List<Schedule> queryScheduleByWorkflowDefinitionCodes(@NonNull List<Long> workflowDefinitionCodes) {
if (CollectionUtils.isEmpty(workflowDefinitionCodes)) {
return Collections.emptyList();
}
return scheduleMapper.querySchedulesByWorkflowDefinitionCodes(workflowDefinitionCodes);
}
/**
* query schedule
*
* @param loginUser login user
* @param scheduleFilterRequest schedule filter request
* @return schedule list page
*/
@Override
@Transactional
public PageInfo<Schedule> filterSchedules(User loginUser,
ScheduleFilterRequest scheduleFilterRequest) {
if (scheduleFilterRequest.getProjectName() != null) {
Project project = projectMapper.queryByName(scheduleFilterRequest.getProjectName());
// check project auth
projectService.checkProjectAndAuthThrowException(loginUser, project, null);
}
Page<Schedule> page = new Page<>(scheduleFilterRequest.getPageNo(), scheduleFilterRequest.getPageSize());
IPage<Schedule> scheduleIPage = scheduleMapper.filterSchedules(page, scheduleFilterRequest.convert2Schedule());
PageInfo<Schedule> pageInfo =
new PageInfo<>(scheduleFilterRequest.getPageNo(), scheduleFilterRequest.getPageSize());
pageInfo.setTotal((int) scheduleIPage.getTotal());
pageInfo.setTotalList(scheduleIPage.getRecords());
return pageInfo;
}
/**
* query schedule list
*
* @param loginUser login user
* @param projectCode project code
* @return schedule list
*/
@Override
public Map<String, Object> queryScheduleList(User loginUser, long projectCode) {
Map<String, Object> result = new HashMap<>();
Project project = projectMapper.queryByCode(projectCode);
// check project auth
boolean hasProjectAndPerm = projectService.hasProjectAndPerm(loginUser, project, result, null);
if (!hasProjectAndPerm) {
return result;
}
List<Schedule> schedules = scheduleMapper.querySchedulerListByProjectName(project.getName());
List<ScheduleVO> scheduleList = new ArrayList<>();
for (Schedule schedule : schedules) {
scheduleList.add(new ScheduleVO(schedule));
}
result.put(Constants.DATA_LIST, scheduleList);
putMsg(result, Status.SUCCESS);
return result;
}
/**
* check valid
*
* @param result result
* @param bool bool
* @param status status
* @return check result code
*/
private boolean checkValid(Map<String, Object> result, boolean bool, Status status) {
// timeout is valid
if (bool) {
putMsg(result, status);
return true;
}
return false;
}
/**
* delete schedule by id
*
* @param loginUser login user
* @param scheduleId schedule id
*/
@Override
public void deleteSchedulesById(User loginUser, Integer scheduleId) {
Schedule schedule = scheduleMapper.selectById(scheduleId);
if (schedule == null) {
throw new ServiceException(Status.SCHEDULE_NOT_EXISTS, scheduleId);
}
// check schedule is already online
if (schedule.getReleaseState() == ReleaseState.ONLINE) {
throw new ServiceException(Status.SCHEDULE_STATE_ONLINE, scheduleId);
}
// Determine if the login user is the owner of the schedule
if (loginUser.getId() != schedule.getUserId() && loginUser.getUserType() != UserType.ADMIN_USER) {
throw new ServiceException(Status.USER_NO_OPERATION_PERM);
}
this.projectPermCheckByWorkflowCode(loginUser, schedule.getWorkflowDefinitionCode());
int delete = scheduleMapper.deleteById(scheduleId);
if (delete <= 0) {
throw new ServiceException(Status.DELETE_SCHEDULE_BY_ID_ERROR);
}
}
/**
* preview schedule
*
* @param loginUser login user
* @param schedule schedule expression
* @return the next five fire time
*/
@Override
public Map<String, Object> previewSchedule(User loginUser, String schedule) {
Map<String, Object> result = new HashMap<>();
Cron cron;
ScheduleParam scheduleParam = JSONUtils.parseObject(schedule, ScheduleParam.class);
assert scheduleParam != null;
ZoneId zoneId = TimeZone.getTimeZone(scheduleParam.getTimezoneId()).toZoneId();
ZonedDateTime now = ZonedDateTime.now(zoneId);
ZonedDateTime startTime = ZonedDateTime.ofInstant(scheduleParam.getStartTime().toInstant(), zoneId);
ZonedDateTime endTime = ZonedDateTime.ofInstant(scheduleParam.getEndTime().toInstant(), zoneId);
startTime = now.isAfter(startTime) ? now : startTime;
try {
cron = CronUtils.parse2Cron(scheduleParam.getCrontab());
} catch (CronParseException e) {
log.error("Parse cron to cron expression error, crontab:{}.", scheduleParam.getCrontab(), e);
putMsg(result, Status.PARSE_TO_CRON_EXPRESSION_ERROR);
return result;
}
List<ZonedDateTime> selfFireDateList =
CronUtils.getSelfFireDateList(startTime, endTime, cron, Constants.PREVIEW_SCHEDULE_EXECUTE_COUNT);
List<String> previewDateList =
selfFireDateList.stream().map(t -> DateUtils.dateToString(t, zoneId)).collect(Collectors.toList());
result.put(Constants.DATA_LIST, previewDateList);
putMsg(result, Status.SUCCESS);
return result;
}
/**
* update workflow definition schedule
*
* @param loginUser login user
* @param projectCode project code
* @param workflowDefinitionCode workflow definition code
* @param scheduleExpression scheduleExpression
* @param warningType warning type
* @param warningGroupId warning group id
* @param failureStrategy failure strategy
* @param workerGroup worker group
* @param tenantCode tenant code
* @param workflowInstancePriority workflow instance priority
* @return update result code
*/
@Override
public Map<String, Object> updateScheduleByWorkflowDefinitionCode(User loginUser,
long projectCode,
long workflowDefinitionCode,
String scheduleExpression,
WarningType warningType,
int warningGroupId,
FailureStrategy failureStrategy,
Priority workflowInstancePriority,
String workerGroup,
String tenantCode,
long environmentCode) {
Project project = projectMapper.queryByCode(projectCode);
// check user access for project
Map<String, Object> result = projectService.checkProjectAndAuth(loginUser, project, projectCode, null);
if (result.get(Constants.STATUS) != Status.SUCCESS) {
return result;
}
// check schedule exists
Schedule schedule = scheduleMapper.queryByWorkflowDefinitionCode(workflowDefinitionCode);
if (schedule == null) {
log.error("Schedule of workflow definition does not exist, workflowDefinitionCode:{}.",
workflowDefinitionCode);
putMsg(result, Status.SCHEDULE_CRON_NOT_EXISTS, workflowDefinitionCode);
return result;
}
WorkflowDefinition workflowDefinition = workflowDefinitionMapper.queryByCode(workflowDefinitionCode);
if (workflowDefinition == null || projectCode != workflowDefinition.getProjectCode()) {
log.error("workflow definition does not exist, workflowDefinitionCode:{}.", workflowDefinitionCode);
putMsg(result, Status.WORKFLOW_DEFINITION_NOT_EXIST, String.valueOf(workflowDefinitionCode));
return result;
}
updateSchedule(result, schedule, workflowDefinition, scheduleExpression, warningType, warningGroupId,
failureStrategy, workflowInstancePriority, workerGroup, tenantCode, environmentCode);
return result;
}
@Transactional
@Override
public void onlineScheduler(User loginUser, Long projectCode, Integer schedulerId) {
projectService.checkProjectAndAuthThrowException(loginUser, projectCode, WORKFLOW_ONLINE_OFFLINE);
Schedule schedule = scheduleMapper.selectById(schedulerId);
doOnlineScheduler(schedule);
}
@Transactional
@Override
public void onlineSchedulerByWorkflowCode(Long workflowDefinitionCode) {
Schedule schedule = scheduleMapper.queryByWorkflowDefinitionCode(workflowDefinitionCode);
doOnlineScheduler(schedule);
}
private void doOnlineScheduler(Schedule schedule) {
if (schedule == null) {
return;
}
if (ReleaseState.ONLINE.equals(schedule.getReleaseState())) {
log.debug("The schedule is already online, scheduleId:{}.", schedule.getId());
return;
}
WorkflowDefinition workflowDefinition =
workflowDefinitionMapper.queryByCode(schedule.getWorkflowDefinitionCode());
if (!ReleaseState.ONLINE.equals(workflowDefinition.getReleaseState())) {
throw new ServiceException(Status.WORKFLOW_DEFINITION_NOT_RELEASE, workflowDefinition.getName());
}
schedule.setReleaseState(ReleaseState.ONLINE);
schedule.setUpdateTime(new Date());
scheduleMapper.updateById(schedule);
Project project = projectMapper.queryByCode(workflowDefinition.getProjectCode());
schedulerApi.insertOrUpdateScheduleTask(project.getId(), schedule);
}
@Transactional
@Override
public void offlineScheduler(User loginUser, Long projectCode, Integer schedulerId) {
projectService.checkProjectAndAuthThrowException(loginUser, projectCode, WORKFLOW_ONLINE_OFFLINE);
Schedule schedule = scheduleMapper.selectById(schedulerId);
doOfflineScheduler(schedule);
}
@Transactional
@Override
public void offlineSchedulerByWorkflowCode(Long workflowDefinitionCode) {
Schedule schedule = scheduleMapper.queryByWorkflowDefinitionCode(workflowDefinitionCode);
doOfflineScheduler(schedule);
}
private void doOfflineScheduler(Schedule schedule) {
if (schedule == null) {
return;
}
if (ReleaseState.OFFLINE.equals(schedule.getReleaseState())) {
log.debug("The schedule is already offline, scheduleId:{}.", schedule.getId());
return;
}
schedule.setUpdateTime(new Date());
schedule.setReleaseState(ReleaseState.OFFLINE);
scheduleMapper.updateById(schedule);
WorkflowDefinition workflowDefinition =
workflowDefinitionMapper.queryByCode(schedule.getWorkflowDefinitionCode());
Project project = projectMapper.queryByCode(workflowDefinition.getProjectCode());
schedulerApi.deleteScheduleTask(project.getId(), schedule.getId());
}
private void updateSchedule(Map<String, Object> result, Schedule schedule, WorkflowDefinition workflowDefinition,
String scheduleExpression, WarningType warningType, int warningGroupId,
FailureStrategy failureStrategy, Priority workflowInstancePriority, String workerGroup,
String tenantCode,
long environmentCode) {
if (checkValid(result, schedule.getReleaseState() == ReleaseState.ONLINE,
Status.SCHEDULE_CRON_ONLINE_FORBID_UPDATE)) {
log.warn("Schedule can not be updated due to schedule is {}, scheduleId:{}.",
ReleaseState.ONLINE.getDescp(), schedule.getId());
return;
}
Date now = new Date();
checkValidTenant(tenantCode);
schedule.setTenantCode(tenantCode);
// updateWorkflowInstance param
if (!StringUtils.isEmpty(scheduleExpression)) {
ScheduleParam scheduleParam = JSONUtils.parseObject(scheduleExpression, ScheduleParam.class);
if (scheduleParam == null) {
log.warn("Parameter scheduleExpression is invalid, so parse cron error.");
putMsg(result, Status.PARSE_TO_CRON_EXPRESSION_ERROR);
return;
}
if (DateUtils.differSec(scheduleParam.getStartTime(), scheduleParam.getEndTime()) == 0) {
log.warn("The start time must not be the same as the end or time can not be null.");
putMsg(result, Status.SCHEDULE_START_TIME_END_TIME_SAME);
return;
}
if (scheduleParam.getStartTime().getTime() > scheduleParam.getEndTime().getTime()) {
log.warn("The start time must smaller than end time");
putMsg(result, Status.START_TIME_BIGGER_THAN_END_TIME_ERROR);
return;
}
schedule.setStartTime(scheduleParam.getStartTime());
schedule.setEndTime(scheduleParam.getEndTime());
if (!CronUtils.isValidExpression(scheduleParam.getCrontab())) {
log.error("Schedule crontab verify failure, crontab:{}.", scheduleParam.getCrontab());
putMsg(result, Status.SCHEDULE_CRON_CHECK_FAILED, scheduleParam.getCrontab());
return;
}
schedule.setCrontab(scheduleParam.getCrontab());
schedule.setTimezoneId(scheduleParam.getTimezoneId());
}
if (warningType != null) {
schedule.setWarningType(warningType);
}
schedule.setWarningGroupId(warningGroupId);
if (failureStrategy != null) {
schedule.setFailureStrategy(failureStrategy);
}
schedule.setWorkerGroup(workerGroup);
schedule.setEnvironmentCode(environmentCode);
schedule.setUpdateTime(now);
schedule.setWorkflowInstancePriority(workflowInstancePriority);
scheduleMapper.updateById(schedule);
workflowDefinition.setWarningGroupId(warningGroupId);
workflowDefinitionMapper.updateById(workflowDefinition);
log.info("Schedule update complete, projectCode:{}, workflowDefinitionCode:{}, scheduleId:{}.",
workflowDefinition.getProjectCode(), workflowDefinition.getCode(), schedule.getId());
result.put(Constants.DATA_LIST, schedule);
putMsg(result, Status.SUCCESS);
}
/**
* check valid tenant
*
* @param tenantCode
*/
private void checkValidTenant(String tenantCode) {
if (!Constants.DEFAULT.equals(tenantCode)) {
Tenant tenant = tenantMapper.queryByTenantCode(tenantCode);
if (tenant == null) {
throw new ServiceException(Status.TENANT_NOT_EXIST, tenantCode);
}
}
}
}
|
googleapis/google-cloud-java | 36,769 | java-domains/proto-google-cloud-domains-v1/src/main/java/com/google/cloud/domains/v1/ListRegistrationsResponse.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/domains/v1/domains.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.domains.v1;
/**
*
*
* <pre>
* Response for the `ListRegistrations` method.
* </pre>
*
* Protobuf type {@code google.cloud.domains.v1.ListRegistrationsResponse}
*/
public final class ListRegistrationsResponse extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.domains.v1.ListRegistrationsResponse)
ListRegistrationsResponseOrBuilder {
private static final long serialVersionUID = 0L;
// Use ListRegistrationsResponse.newBuilder() to construct.
private ListRegistrationsResponse(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private ListRegistrationsResponse() {
registrations_ = java.util.Collections.emptyList();
nextPageToken_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new ListRegistrationsResponse();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.domains.v1.DomainsProto
.internal_static_google_cloud_domains_v1_ListRegistrationsResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.domains.v1.DomainsProto
.internal_static_google_cloud_domains_v1_ListRegistrationsResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.domains.v1.ListRegistrationsResponse.class,
com.google.cloud.domains.v1.ListRegistrationsResponse.Builder.class);
}
public static final int REGISTRATIONS_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private java.util.List<com.google.cloud.domains.v1.Registration> registrations_;
/**
*
*
* <pre>
* A list of `Registration`s.
* </pre>
*
* <code>repeated .google.cloud.domains.v1.Registration registrations = 1;</code>
*/
@java.lang.Override
public java.util.List<com.google.cloud.domains.v1.Registration> getRegistrationsList() {
return registrations_;
}
/**
*
*
* <pre>
* A list of `Registration`s.
* </pre>
*
* <code>repeated .google.cloud.domains.v1.Registration registrations = 1;</code>
*/
@java.lang.Override
public java.util.List<? extends com.google.cloud.domains.v1.RegistrationOrBuilder>
getRegistrationsOrBuilderList() {
return registrations_;
}
/**
*
*
* <pre>
* A list of `Registration`s.
* </pre>
*
* <code>repeated .google.cloud.domains.v1.Registration registrations = 1;</code>
*/
@java.lang.Override
public int getRegistrationsCount() {
return registrations_.size();
}
/**
*
*
* <pre>
* A list of `Registration`s.
* </pre>
*
* <code>repeated .google.cloud.domains.v1.Registration registrations = 1;</code>
*/
@java.lang.Override
public com.google.cloud.domains.v1.Registration getRegistrations(int index) {
return registrations_.get(index);
}
/**
*
*
* <pre>
* A list of `Registration`s.
* </pre>
*
* <code>repeated .google.cloud.domains.v1.Registration registrations = 1;</code>
*/
@java.lang.Override
public com.google.cloud.domains.v1.RegistrationOrBuilder getRegistrationsOrBuilder(int index) {
return registrations_.get(index);
}
public static final int NEXT_PAGE_TOKEN_FIELD_NUMBER = 2;
@SuppressWarnings("serial")
private volatile java.lang.Object nextPageToken_ = "";
/**
*
*
* <pre>
* When present, there are more results to retrieve. Set `page_token` to this
* value on a subsequent call to get the next page of results.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The nextPageToken.
*/
@java.lang.Override
public java.lang.String getNextPageToken() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
nextPageToken_ = s;
return s;
}
}
/**
*
*
* <pre>
* When present, there are more results to retrieve. Set `page_token` to this
* value on a subsequent call to get the next page of results.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The bytes for nextPageToken.
*/
@java.lang.Override
public com.google.protobuf.ByteString getNextPageTokenBytes() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
nextPageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
for (int i = 0; i < registrations_.size(); i++) {
output.writeMessage(1, registrations_.get(i));
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 2, nextPageToken_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
for (int i = 0; i < registrations_.size(); i++) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, registrations_.get(i));
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, nextPageToken_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.domains.v1.ListRegistrationsResponse)) {
return super.equals(obj);
}
com.google.cloud.domains.v1.ListRegistrationsResponse other =
(com.google.cloud.domains.v1.ListRegistrationsResponse) obj;
if (!getRegistrationsList().equals(other.getRegistrationsList())) return false;
if (!getNextPageToken().equals(other.getNextPageToken())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (getRegistrationsCount() > 0) {
hash = (37 * hash) + REGISTRATIONS_FIELD_NUMBER;
hash = (53 * hash) + getRegistrationsList().hashCode();
}
hash = (37 * hash) + NEXT_PAGE_TOKEN_FIELD_NUMBER;
hash = (53 * hash) + getNextPageToken().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.domains.v1.ListRegistrationsResponse parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.domains.v1.ListRegistrationsResponse parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.domains.v1.ListRegistrationsResponse parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.domains.v1.ListRegistrationsResponse parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.domains.v1.ListRegistrationsResponse parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.domains.v1.ListRegistrationsResponse parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.domains.v1.ListRegistrationsResponse parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.domains.v1.ListRegistrationsResponse parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.domains.v1.ListRegistrationsResponse parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.domains.v1.ListRegistrationsResponse parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.domains.v1.ListRegistrationsResponse parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.domains.v1.ListRegistrationsResponse parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.domains.v1.ListRegistrationsResponse prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Response for the `ListRegistrations` method.
* </pre>
*
* Protobuf type {@code google.cloud.domains.v1.ListRegistrationsResponse}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.domains.v1.ListRegistrationsResponse)
com.google.cloud.domains.v1.ListRegistrationsResponseOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.domains.v1.DomainsProto
.internal_static_google_cloud_domains_v1_ListRegistrationsResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.domains.v1.DomainsProto
.internal_static_google_cloud_domains_v1_ListRegistrationsResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.domains.v1.ListRegistrationsResponse.class,
com.google.cloud.domains.v1.ListRegistrationsResponse.Builder.class);
}
// Construct using com.google.cloud.domains.v1.ListRegistrationsResponse.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
if (registrationsBuilder_ == null) {
registrations_ = java.util.Collections.emptyList();
} else {
registrations_ = null;
registrationsBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000001);
nextPageToken_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.domains.v1.DomainsProto
.internal_static_google_cloud_domains_v1_ListRegistrationsResponse_descriptor;
}
@java.lang.Override
public com.google.cloud.domains.v1.ListRegistrationsResponse getDefaultInstanceForType() {
return com.google.cloud.domains.v1.ListRegistrationsResponse.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.domains.v1.ListRegistrationsResponse build() {
com.google.cloud.domains.v1.ListRegistrationsResponse result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.domains.v1.ListRegistrationsResponse buildPartial() {
com.google.cloud.domains.v1.ListRegistrationsResponse result =
new com.google.cloud.domains.v1.ListRegistrationsResponse(this);
buildPartialRepeatedFields(result);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartialRepeatedFields(
com.google.cloud.domains.v1.ListRegistrationsResponse result) {
if (registrationsBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0)) {
registrations_ = java.util.Collections.unmodifiableList(registrations_);
bitField0_ = (bitField0_ & ~0x00000001);
}
result.registrations_ = registrations_;
} else {
result.registrations_ = registrationsBuilder_.build();
}
}
private void buildPartial0(com.google.cloud.domains.v1.ListRegistrationsResponse result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000002) != 0)) {
result.nextPageToken_ = nextPageToken_;
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.domains.v1.ListRegistrationsResponse) {
return mergeFrom((com.google.cloud.domains.v1.ListRegistrationsResponse) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.domains.v1.ListRegistrationsResponse other) {
if (other == com.google.cloud.domains.v1.ListRegistrationsResponse.getDefaultInstance())
return this;
if (registrationsBuilder_ == null) {
if (!other.registrations_.isEmpty()) {
if (registrations_.isEmpty()) {
registrations_ = other.registrations_;
bitField0_ = (bitField0_ & ~0x00000001);
} else {
ensureRegistrationsIsMutable();
registrations_.addAll(other.registrations_);
}
onChanged();
}
} else {
if (!other.registrations_.isEmpty()) {
if (registrationsBuilder_.isEmpty()) {
registrationsBuilder_.dispose();
registrationsBuilder_ = null;
registrations_ = other.registrations_;
bitField0_ = (bitField0_ & ~0x00000001);
registrationsBuilder_ =
com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders
? getRegistrationsFieldBuilder()
: null;
} else {
registrationsBuilder_.addAllMessages(other.registrations_);
}
}
}
if (!other.getNextPageToken().isEmpty()) {
nextPageToken_ = other.nextPageToken_;
bitField0_ |= 0x00000002;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
com.google.cloud.domains.v1.Registration m =
input.readMessage(
com.google.cloud.domains.v1.Registration.parser(), extensionRegistry);
if (registrationsBuilder_ == null) {
ensureRegistrationsIsMutable();
registrations_.add(m);
} else {
registrationsBuilder_.addMessage(m);
}
break;
} // case 10
case 18:
{
nextPageToken_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000002;
break;
} // case 18
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.util.List<com.google.cloud.domains.v1.Registration> registrations_ =
java.util.Collections.emptyList();
private void ensureRegistrationsIsMutable() {
if (!((bitField0_ & 0x00000001) != 0)) {
registrations_ =
new java.util.ArrayList<com.google.cloud.domains.v1.Registration>(registrations_);
bitField0_ |= 0x00000001;
}
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.domains.v1.Registration,
com.google.cloud.domains.v1.Registration.Builder,
com.google.cloud.domains.v1.RegistrationOrBuilder>
registrationsBuilder_;
/**
*
*
* <pre>
* A list of `Registration`s.
* </pre>
*
* <code>repeated .google.cloud.domains.v1.Registration registrations = 1;</code>
*/
public java.util.List<com.google.cloud.domains.v1.Registration> getRegistrationsList() {
if (registrationsBuilder_ == null) {
return java.util.Collections.unmodifiableList(registrations_);
} else {
return registrationsBuilder_.getMessageList();
}
}
/**
*
*
* <pre>
* A list of `Registration`s.
* </pre>
*
* <code>repeated .google.cloud.domains.v1.Registration registrations = 1;</code>
*/
public int getRegistrationsCount() {
if (registrationsBuilder_ == null) {
return registrations_.size();
} else {
return registrationsBuilder_.getCount();
}
}
/**
*
*
* <pre>
* A list of `Registration`s.
* </pre>
*
* <code>repeated .google.cloud.domains.v1.Registration registrations = 1;</code>
*/
public com.google.cloud.domains.v1.Registration getRegistrations(int index) {
if (registrationsBuilder_ == null) {
return registrations_.get(index);
} else {
return registrationsBuilder_.getMessage(index);
}
}
/**
*
*
* <pre>
* A list of `Registration`s.
* </pre>
*
* <code>repeated .google.cloud.domains.v1.Registration registrations = 1;</code>
*/
public Builder setRegistrations(int index, com.google.cloud.domains.v1.Registration value) {
if (registrationsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureRegistrationsIsMutable();
registrations_.set(index, value);
onChanged();
} else {
registrationsBuilder_.setMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* A list of `Registration`s.
* </pre>
*
* <code>repeated .google.cloud.domains.v1.Registration registrations = 1;</code>
*/
public Builder setRegistrations(
int index, com.google.cloud.domains.v1.Registration.Builder builderForValue) {
if (registrationsBuilder_ == null) {
ensureRegistrationsIsMutable();
registrations_.set(index, builderForValue.build());
onChanged();
} else {
registrationsBuilder_.setMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* A list of `Registration`s.
* </pre>
*
* <code>repeated .google.cloud.domains.v1.Registration registrations = 1;</code>
*/
public Builder addRegistrations(com.google.cloud.domains.v1.Registration value) {
if (registrationsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureRegistrationsIsMutable();
registrations_.add(value);
onChanged();
} else {
registrationsBuilder_.addMessage(value);
}
return this;
}
/**
*
*
* <pre>
* A list of `Registration`s.
* </pre>
*
* <code>repeated .google.cloud.domains.v1.Registration registrations = 1;</code>
*/
public Builder addRegistrations(int index, com.google.cloud.domains.v1.Registration value) {
if (registrationsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureRegistrationsIsMutable();
registrations_.add(index, value);
onChanged();
} else {
registrationsBuilder_.addMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* A list of `Registration`s.
* </pre>
*
* <code>repeated .google.cloud.domains.v1.Registration registrations = 1;</code>
*/
public Builder addRegistrations(
com.google.cloud.domains.v1.Registration.Builder builderForValue) {
if (registrationsBuilder_ == null) {
ensureRegistrationsIsMutable();
registrations_.add(builderForValue.build());
onChanged();
} else {
registrationsBuilder_.addMessage(builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* A list of `Registration`s.
* </pre>
*
* <code>repeated .google.cloud.domains.v1.Registration registrations = 1;</code>
*/
public Builder addRegistrations(
int index, com.google.cloud.domains.v1.Registration.Builder builderForValue) {
if (registrationsBuilder_ == null) {
ensureRegistrationsIsMutable();
registrations_.add(index, builderForValue.build());
onChanged();
} else {
registrationsBuilder_.addMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* A list of `Registration`s.
* </pre>
*
* <code>repeated .google.cloud.domains.v1.Registration registrations = 1;</code>
*/
public Builder addAllRegistrations(
java.lang.Iterable<? extends com.google.cloud.domains.v1.Registration> values) {
if (registrationsBuilder_ == null) {
ensureRegistrationsIsMutable();
com.google.protobuf.AbstractMessageLite.Builder.addAll(values, registrations_);
onChanged();
} else {
registrationsBuilder_.addAllMessages(values);
}
return this;
}
/**
*
*
* <pre>
* A list of `Registration`s.
* </pre>
*
* <code>repeated .google.cloud.domains.v1.Registration registrations = 1;</code>
*/
public Builder clearRegistrations() {
if (registrationsBuilder_ == null) {
registrations_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
} else {
registrationsBuilder_.clear();
}
return this;
}
/**
*
*
* <pre>
* A list of `Registration`s.
* </pre>
*
* <code>repeated .google.cloud.domains.v1.Registration registrations = 1;</code>
*/
public Builder removeRegistrations(int index) {
if (registrationsBuilder_ == null) {
ensureRegistrationsIsMutable();
registrations_.remove(index);
onChanged();
} else {
registrationsBuilder_.remove(index);
}
return this;
}
/**
*
*
* <pre>
* A list of `Registration`s.
* </pre>
*
* <code>repeated .google.cloud.domains.v1.Registration registrations = 1;</code>
*/
public com.google.cloud.domains.v1.Registration.Builder getRegistrationsBuilder(int index) {
return getRegistrationsFieldBuilder().getBuilder(index);
}
/**
*
*
* <pre>
* A list of `Registration`s.
* </pre>
*
* <code>repeated .google.cloud.domains.v1.Registration registrations = 1;</code>
*/
public com.google.cloud.domains.v1.RegistrationOrBuilder getRegistrationsOrBuilder(int index) {
if (registrationsBuilder_ == null) {
return registrations_.get(index);
} else {
return registrationsBuilder_.getMessageOrBuilder(index);
}
}
/**
*
*
* <pre>
* A list of `Registration`s.
* </pre>
*
* <code>repeated .google.cloud.domains.v1.Registration registrations = 1;</code>
*/
public java.util.List<? extends com.google.cloud.domains.v1.RegistrationOrBuilder>
getRegistrationsOrBuilderList() {
if (registrationsBuilder_ != null) {
return registrationsBuilder_.getMessageOrBuilderList();
} else {
return java.util.Collections.unmodifiableList(registrations_);
}
}
/**
*
*
* <pre>
* A list of `Registration`s.
* </pre>
*
* <code>repeated .google.cloud.domains.v1.Registration registrations = 1;</code>
*/
public com.google.cloud.domains.v1.Registration.Builder addRegistrationsBuilder() {
return getRegistrationsFieldBuilder()
.addBuilder(com.google.cloud.domains.v1.Registration.getDefaultInstance());
}
/**
*
*
* <pre>
* A list of `Registration`s.
* </pre>
*
* <code>repeated .google.cloud.domains.v1.Registration registrations = 1;</code>
*/
public com.google.cloud.domains.v1.Registration.Builder addRegistrationsBuilder(int index) {
return getRegistrationsFieldBuilder()
.addBuilder(index, com.google.cloud.domains.v1.Registration.getDefaultInstance());
}
/**
*
*
* <pre>
* A list of `Registration`s.
* </pre>
*
* <code>repeated .google.cloud.domains.v1.Registration registrations = 1;</code>
*/
public java.util.List<com.google.cloud.domains.v1.Registration.Builder>
getRegistrationsBuilderList() {
return getRegistrationsFieldBuilder().getBuilderList();
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.domains.v1.Registration,
com.google.cloud.domains.v1.Registration.Builder,
com.google.cloud.domains.v1.RegistrationOrBuilder>
getRegistrationsFieldBuilder() {
if (registrationsBuilder_ == null) {
registrationsBuilder_ =
new com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.domains.v1.Registration,
com.google.cloud.domains.v1.Registration.Builder,
com.google.cloud.domains.v1.RegistrationOrBuilder>(
registrations_,
((bitField0_ & 0x00000001) != 0),
getParentForChildren(),
isClean());
registrations_ = null;
}
return registrationsBuilder_;
}
private java.lang.Object nextPageToken_ = "";
/**
*
*
* <pre>
* When present, there are more results to retrieve. Set `page_token` to this
* value on a subsequent call to get the next page of results.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The nextPageToken.
*/
public java.lang.String getNextPageToken() {
java.lang.Object ref = nextPageToken_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
nextPageToken_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* When present, there are more results to retrieve. Set `page_token` to this
* value on a subsequent call to get the next page of results.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The bytes for nextPageToken.
*/
public com.google.protobuf.ByteString getNextPageTokenBytes() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
nextPageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* When present, there are more results to retrieve. Set `page_token` to this
* value on a subsequent call to get the next page of results.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @param value The nextPageToken to set.
* @return This builder for chaining.
*/
public Builder setNextPageToken(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
nextPageToken_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* When present, there are more results to retrieve. Set `page_token` to this
* value on a subsequent call to get the next page of results.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return This builder for chaining.
*/
public Builder clearNextPageToken() {
nextPageToken_ = getDefaultInstance().getNextPageToken();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
return this;
}
/**
*
*
* <pre>
* When present, there are more results to retrieve. Set `page_token` to this
* value on a subsequent call to get the next page of results.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @param value The bytes for nextPageToken to set.
* @return This builder for chaining.
*/
public Builder setNextPageTokenBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
nextPageToken_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.domains.v1.ListRegistrationsResponse)
}
// @@protoc_insertion_point(class_scope:google.cloud.domains.v1.ListRegistrationsResponse)
private static final com.google.cloud.domains.v1.ListRegistrationsResponse DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.domains.v1.ListRegistrationsResponse();
}
public static com.google.cloud.domains.v1.ListRegistrationsResponse getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<ListRegistrationsResponse> PARSER =
new com.google.protobuf.AbstractParser<ListRegistrationsResponse>() {
@java.lang.Override
public ListRegistrationsResponse parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<ListRegistrationsResponse> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<ListRegistrationsResponse> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.domains.v1.ListRegistrationsResponse getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleapis/google-cloud-java | 36,610 | java-dataplex/proto-google-cloud-dataplex-v1/src/main/java/com/google/cloud/dataplex/v1/BusinessGlossaryEvent.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/dataplex/v1/logs.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.dataplex.v1;
/**
*
*
* <pre>
* Payload associated with Business Glossary related log events.
* </pre>
*
* Protobuf type {@code google.cloud.dataplex.v1.BusinessGlossaryEvent}
*/
public final class BusinessGlossaryEvent extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.dataplex.v1.BusinessGlossaryEvent)
BusinessGlossaryEventOrBuilder {
private static final long serialVersionUID = 0L;
// Use BusinessGlossaryEvent.newBuilder() to construct.
private BusinessGlossaryEvent(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private BusinessGlossaryEvent() {
message_ = "";
eventType_ = 0;
resource_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new BusinessGlossaryEvent();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.dataplex.v1.LogsProto
.internal_static_google_cloud_dataplex_v1_BusinessGlossaryEvent_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.dataplex.v1.LogsProto
.internal_static_google_cloud_dataplex_v1_BusinessGlossaryEvent_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.dataplex.v1.BusinessGlossaryEvent.class,
com.google.cloud.dataplex.v1.BusinessGlossaryEvent.Builder.class);
}
/**
*
*
* <pre>
* Type of glossary log event.
* </pre>
*
* Protobuf enum {@code google.cloud.dataplex.v1.BusinessGlossaryEvent.EventType}
*/
public enum EventType implements com.google.protobuf.ProtocolMessageEnum {
/**
*
*
* <pre>
* An unspecified event type.
* </pre>
*
* <code>EVENT_TYPE_UNSPECIFIED = 0;</code>
*/
EVENT_TYPE_UNSPECIFIED(0),
/**
*
*
* <pre>
* Glossary create event.
* </pre>
*
* <code>GLOSSARY_CREATE = 1;</code>
*/
GLOSSARY_CREATE(1),
/**
*
*
* <pre>
* Glossary update event.
* </pre>
*
* <code>GLOSSARY_UPDATE = 2;</code>
*/
GLOSSARY_UPDATE(2),
/**
*
*
* <pre>
* Glossary delete event.
* </pre>
*
* <code>GLOSSARY_DELETE = 3;</code>
*/
GLOSSARY_DELETE(3),
/**
*
*
* <pre>
* Glossary category create event.
* </pre>
*
* <code>GLOSSARY_CATEGORY_CREATE = 4;</code>
*/
GLOSSARY_CATEGORY_CREATE(4),
/**
*
*
* <pre>
* Glossary category update event.
* </pre>
*
* <code>GLOSSARY_CATEGORY_UPDATE = 5;</code>
*/
GLOSSARY_CATEGORY_UPDATE(5),
/**
*
*
* <pre>
* Glossary category delete event.
* </pre>
*
* <code>GLOSSARY_CATEGORY_DELETE = 6;</code>
*/
GLOSSARY_CATEGORY_DELETE(6),
/**
*
*
* <pre>
* Glossary term create event.
* </pre>
*
* <code>GLOSSARY_TERM_CREATE = 7;</code>
*/
GLOSSARY_TERM_CREATE(7),
/**
*
*
* <pre>
* Glossary term update event.
* </pre>
*
* <code>GLOSSARY_TERM_UPDATE = 8;</code>
*/
GLOSSARY_TERM_UPDATE(8),
/**
*
*
* <pre>
* Glossary term delete event.
* </pre>
*
* <code>GLOSSARY_TERM_DELETE = 9;</code>
*/
GLOSSARY_TERM_DELETE(9),
UNRECOGNIZED(-1),
;
/**
*
*
* <pre>
* An unspecified event type.
* </pre>
*
* <code>EVENT_TYPE_UNSPECIFIED = 0;</code>
*/
public static final int EVENT_TYPE_UNSPECIFIED_VALUE = 0;
/**
*
*
* <pre>
* Glossary create event.
* </pre>
*
* <code>GLOSSARY_CREATE = 1;</code>
*/
public static final int GLOSSARY_CREATE_VALUE = 1;
/**
*
*
* <pre>
* Glossary update event.
* </pre>
*
* <code>GLOSSARY_UPDATE = 2;</code>
*/
public static final int GLOSSARY_UPDATE_VALUE = 2;
/**
*
*
* <pre>
* Glossary delete event.
* </pre>
*
* <code>GLOSSARY_DELETE = 3;</code>
*/
public static final int GLOSSARY_DELETE_VALUE = 3;
/**
*
*
* <pre>
* Glossary category create event.
* </pre>
*
* <code>GLOSSARY_CATEGORY_CREATE = 4;</code>
*/
public static final int GLOSSARY_CATEGORY_CREATE_VALUE = 4;
/**
*
*
* <pre>
* Glossary category update event.
* </pre>
*
* <code>GLOSSARY_CATEGORY_UPDATE = 5;</code>
*/
public static final int GLOSSARY_CATEGORY_UPDATE_VALUE = 5;
/**
*
*
* <pre>
* Glossary category delete event.
* </pre>
*
* <code>GLOSSARY_CATEGORY_DELETE = 6;</code>
*/
public static final int GLOSSARY_CATEGORY_DELETE_VALUE = 6;
/**
*
*
* <pre>
* Glossary term create event.
* </pre>
*
* <code>GLOSSARY_TERM_CREATE = 7;</code>
*/
public static final int GLOSSARY_TERM_CREATE_VALUE = 7;
/**
*
*
* <pre>
* Glossary term update event.
* </pre>
*
* <code>GLOSSARY_TERM_UPDATE = 8;</code>
*/
public static final int GLOSSARY_TERM_UPDATE_VALUE = 8;
/**
*
*
* <pre>
* Glossary term delete event.
* </pre>
*
* <code>GLOSSARY_TERM_DELETE = 9;</code>
*/
public static final int GLOSSARY_TERM_DELETE_VALUE = 9;
public final int getNumber() {
if (this == UNRECOGNIZED) {
throw new java.lang.IllegalArgumentException(
"Can't get the number of an unknown enum value.");
}
return value;
}
/**
* @param value The numeric wire value of the corresponding enum entry.
* @return The enum associated with the given numeric wire value.
* @deprecated Use {@link #forNumber(int)} instead.
*/
@java.lang.Deprecated
public static EventType valueOf(int value) {
return forNumber(value);
}
/**
* @param value The numeric wire value of the corresponding enum entry.
* @return The enum associated with the given numeric wire value.
*/
public static EventType forNumber(int value) {
switch (value) {
case 0:
return EVENT_TYPE_UNSPECIFIED;
case 1:
return GLOSSARY_CREATE;
case 2:
return GLOSSARY_UPDATE;
case 3:
return GLOSSARY_DELETE;
case 4:
return GLOSSARY_CATEGORY_CREATE;
case 5:
return GLOSSARY_CATEGORY_UPDATE;
case 6:
return GLOSSARY_CATEGORY_DELETE;
case 7:
return GLOSSARY_TERM_CREATE;
case 8:
return GLOSSARY_TERM_UPDATE;
case 9:
return GLOSSARY_TERM_DELETE;
default:
return null;
}
}
public static com.google.protobuf.Internal.EnumLiteMap<EventType> internalGetValueMap() {
return internalValueMap;
}
private static final com.google.protobuf.Internal.EnumLiteMap<EventType> internalValueMap =
new com.google.protobuf.Internal.EnumLiteMap<EventType>() {
public EventType findValueByNumber(int number) {
return EventType.forNumber(number);
}
};
public final com.google.protobuf.Descriptors.EnumValueDescriptor getValueDescriptor() {
if (this == UNRECOGNIZED) {
throw new java.lang.IllegalStateException(
"Can't get the descriptor of an unrecognized enum value.");
}
return getDescriptor().getValues().get(ordinal());
}
public final com.google.protobuf.Descriptors.EnumDescriptor getDescriptorForType() {
return getDescriptor();
}
public static final com.google.protobuf.Descriptors.EnumDescriptor getDescriptor() {
return com.google.cloud.dataplex.v1.BusinessGlossaryEvent.getDescriptor()
.getEnumTypes()
.get(0);
}
private static final EventType[] VALUES = values();
public static EventType valueOf(com.google.protobuf.Descriptors.EnumValueDescriptor desc) {
if (desc.getType() != getDescriptor()) {
throw new java.lang.IllegalArgumentException("EnumValueDescriptor is not for this type.");
}
if (desc.getIndex() == -1) {
return UNRECOGNIZED;
}
return VALUES[desc.getIndex()];
}
private final int value;
private EventType(int value) {
this.value = value;
}
// @@protoc_insertion_point(enum_scope:google.cloud.dataplex.v1.BusinessGlossaryEvent.EventType)
}
public static final int MESSAGE_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private volatile java.lang.Object message_ = "";
/**
*
*
* <pre>
* The log message.
* </pre>
*
* <code>string message = 1;</code>
*
* @return The message.
*/
@java.lang.Override
public java.lang.String getMessage() {
java.lang.Object ref = message_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
message_ = s;
return s;
}
}
/**
*
*
* <pre>
* The log message.
* </pre>
*
* <code>string message = 1;</code>
*
* @return The bytes for message.
*/
@java.lang.Override
public com.google.protobuf.ByteString getMessageBytes() {
java.lang.Object ref = message_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
message_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int EVENT_TYPE_FIELD_NUMBER = 2;
private int eventType_ = 0;
/**
*
*
* <pre>
* The type of the event.
* </pre>
*
* <code>.google.cloud.dataplex.v1.BusinessGlossaryEvent.EventType event_type = 2;</code>
*
* @return The enum numeric value on the wire for eventType.
*/
@java.lang.Override
public int getEventTypeValue() {
return eventType_;
}
/**
*
*
* <pre>
* The type of the event.
* </pre>
*
* <code>.google.cloud.dataplex.v1.BusinessGlossaryEvent.EventType event_type = 2;</code>
*
* @return The eventType.
*/
@java.lang.Override
public com.google.cloud.dataplex.v1.BusinessGlossaryEvent.EventType getEventType() {
com.google.cloud.dataplex.v1.BusinessGlossaryEvent.EventType result =
com.google.cloud.dataplex.v1.BusinessGlossaryEvent.EventType.forNumber(eventType_);
return result == null
? com.google.cloud.dataplex.v1.BusinessGlossaryEvent.EventType.UNRECOGNIZED
: result;
}
public static final int RESOURCE_FIELD_NUMBER = 3;
@SuppressWarnings("serial")
private volatile java.lang.Object resource_ = "";
/**
*
*
* <pre>
* Name of the resource.
* </pre>
*
* <code>string resource = 3;</code>
*
* @return The resource.
*/
@java.lang.Override
public java.lang.String getResource() {
java.lang.Object ref = resource_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
resource_ = s;
return s;
}
}
/**
*
*
* <pre>
* Name of the resource.
* </pre>
*
* <code>string resource = 3;</code>
*
* @return The bytes for resource.
*/
@java.lang.Override
public com.google.protobuf.ByteString getResourceBytes() {
java.lang.Object ref = resource_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
resource_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(message_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 1, message_);
}
if (eventType_
!= com.google.cloud.dataplex.v1.BusinessGlossaryEvent.EventType.EVENT_TYPE_UNSPECIFIED
.getNumber()) {
output.writeEnum(2, eventType_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(resource_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 3, resource_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(message_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, message_);
}
if (eventType_
!= com.google.cloud.dataplex.v1.BusinessGlossaryEvent.EventType.EVENT_TYPE_UNSPECIFIED
.getNumber()) {
size += com.google.protobuf.CodedOutputStream.computeEnumSize(2, eventType_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(resource_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(3, resource_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.dataplex.v1.BusinessGlossaryEvent)) {
return super.equals(obj);
}
com.google.cloud.dataplex.v1.BusinessGlossaryEvent other =
(com.google.cloud.dataplex.v1.BusinessGlossaryEvent) obj;
if (!getMessage().equals(other.getMessage())) return false;
if (eventType_ != other.eventType_) return false;
if (!getResource().equals(other.getResource())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + MESSAGE_FIELD_NUMBER;
hash = (53 * hash) + getMessage().hashCode();
hash = (37 * hash) + EVENT_TYPE_FIELD_NUMBER;
hash = (53 * hash) + eventType_;
hash = (37 * hash) + RESOURCE_FIELD_NUMBER;
hash = (53 * hash) + getResource().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.dataplex.v1.BusinessGlossaryEvent parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.dataplex.v1.BusinessGlossaryEvent parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.dataplex.v1.BusinessGlossaryEvent parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.dataplex.v1.BusinessGlossaryEvent parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.dataplex.v1.BusinessGlossaryEvent parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.dataplex.v1.BusinessGlossaryEvent parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.dataplex.v1.BusinessGlossaryEvent parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.dataplex.v1.BusinessGlossaryEvent parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.dataplex.v1.BusinessGlossaryEvent parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.dataplex.v1.BusinessGlossaryEvent parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.dataplex.v1.BusinessGlossaryEvent parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.dataplex.v1.BusinessGlossaryEvent parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.cloud.dataplex.v1.BusinessGlossaryEvent prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Payload associated with Business Glossary related log events.
* </pre>
*
* Protobuf type {@code google.cloud.dataplex.v1.BusinessGlossaryEvent}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.dataplex.v1.BusinessGlossaryEvent)
com.google.cloud.dataplex.v1.BusinessGlossaryEventOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.dataplex.v1.LogsProto
.internal_static_google_cloud_dataplex_v1_BusinessGlossaryEvent_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.dataplex.v1.LogsProto
.internal_static_google_cloud_dataplex_v1_BusinessGlossaryEvent_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.dataplex.v1.BusinessGlossaryEvent.class,
com.google.cloud.dataplex.v1.BusinessGlossaryEvent.Builder.class);
}
// Construct using com.google.cloud.dataplex.v1.BusinessGlossaryEvent.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
message_ = "";
eventType_ = 0;
resource_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.dataplex.v1.LogsProto
.internal_static_google_cloud_dataplex_v1_BusinessGlossaryEvent_descriptor;
}
@java.lang.Override
public com.google.cloud.dataplex.v1.BusinessGlossaryEvent getDefaultInstanceForType() {
return com.google.cloud.dataplex.v1.BusinessGlossaryEvent.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.dataplex.v1.BusinessGlossaryEvent build() {
com.google.cloud.dataplex.v1.BusinessGlossaryEvent result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.dataplex.v1.BusinessGlossaryEvent buildPartial() {
com.google.cloud.dataplex.v1.BusinessGlossaryEvent result =
new com.google.cloud.dataplex.v1.BusinessGlossaryEvent(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(com.google.cloud.dataplex.v1.BusinessGlossaryEvent result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.message_ = message_;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.eventType_ = eventType_;
}
if (((from_bitField0_ & 0x00000004) != 0)) {
result.resource_ = resource_;
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.dataplex.v1.BusinessGlossaryEvent) {
return mergeFrom((com.google.cloud.dataplex.v1.BusinessGlossaryEvent) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.dataplex.v1.BusinessGlossaryEvent other) {
if (other == com.google.cloud.dataplex.v1.BusinessGlossaryEvent.getDefaultInstance())
return this;
if (!other.getMessage().isEmpty()) {
message_ = other.message_;
bitField0_ |= 0x00000001;
onChanged();
}
if (other.eventType_ != 0) {
setEventTypeValue(other.getEventTypeValue());
}
if (!other.getResource().isEmpty()) {
resource_ = other.resource_;
bitField0_ |= 0x00000004;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
message_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000001;
break;
} // case 10
case 16:
{
eventType_ = input.readEnum();
bitField0_ |= 0x00000002;
break;
} // case 16
case 26:
{
resource_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000004;
break;
} // case 26
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.lang.Object message_ = "";
/**
*
*
* <pre>
* The log message.
* </pre>
*
* <code>string message = 1;</code>
*
* @return The message.
*/
public java.lang.String getMessage() {
java.lang.Object ref = message_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
message_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* The log message.
* </pre>
*
* <code>string message = 1;</code>
*
* @return The bytes for message.
*/
public com.google.protobuf.ByteString getMessageBytes() {
java.lang.Object ref = message_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
message_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* The log message.
* </pre>
*
* <code>string message = 1;</code>
*
* @param value The message to set.
* @return This builder for chaining.
*/
public Builder setMessage(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
message_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* The log message.
* </pre>
*
* <code>string message = 1;</code>
*
* @return This builder for chaining.
*/
public Builder clearMessage() {
message_ = getDefaultInstance().getMessage();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
return this;
}
/**
*
*
* <pre>
* The log message.
* </pre>
*
* <code>string message = 1;</code>
*
* @param value The bytes for message to set.
* @return This builder for chaining.
*/
public Builder setMessageBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
message_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
private int eventType_ = 0;
/**
*
*
* <pre>
* The type of the event.
* </pre>
*
* <code>.google.cloud.dataplex.v1.BusinessGlossaryEvent.EventType event_type = 2;</code>
*
* @return The enum numeric value on the wire for eventType.
*/
@java.lang.Override
public int getEventTypeValue() {
return eventType_;
}
/**
*
*
* <pre>
* The type of the event.
* </pre>
*
* <code>.google.cloud.dataplex.v1.BusinessGlossaryEvent.EventType event_type = 2;</code>
*
* @param value The enum numeric value on the wire for eventType to set.
* @return This builder for chaining.
*/
public Builder setEventTypeValue(int value) {
eventType_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* The type of the event.
* </pre>
*
* <code>.google.cloud.dataplex.v1.BusinessGlossaryEvent.EventType event_type = 2;</code>
*
* @return The eventType.
*/
@java.lang.Override
public com.google.cloud.dataplex.v1.BusinessGlossaryEvent.EventType getEventType() {
com.google.cloud.dataplex.v1.BusinessGlossaryEvent.EventType result =
com.google.cloud.dataplex.v1.BusinessGlossaryEvent.EventType.forNumber(eventType_);
return result == null
? com.google.cloud.dataplex.v1.BusinessGlossaryEvent.EventType.UNRECOGNIZED
: result;
}
/**
*
*
* <pre>
* The type of the event.
* </pre>
*
* <code>.google.cloud.dataplex.v1.BusinessGlossaryEvent.EventType event_type = 2;</code>
*
* @param value The eventType to set.
* @return This builder for chaining.
*/
public Builder setEventType(
com.google.cloud.dataplex.v1.BusinessGlossaryEvent.EventType value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000002;
eventType_ = value.getNumber();
onChanged();
return this;
}
/**
*
*
* <pre>
* The type of the event.
* </pre>
*
* <code>.google.cloud.dataplex.v1.BusinessGlossaryEvent.EventType event_type = 2;</code>
*
* @return This builder for chaining.
*/
public Builder clearEventType() {
bitField0_ = (bitField0_ & ~0x00000002);
eventType_ = 0;
onChanged();
return this;
}
private java.lang.Object resource_ = "";
/**
*
*
* <pre>
* Name of the resource.
* </pre>
*
* <code>string resource = 3;</code>
*
* @return The resource.
*/
public java.lang.String getResource() {
java.lang.Object ref = resource_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
resource_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Name of the resource.
* </pre>
*
* <code>string resource = 3;</code>
*
* @return The bytes for resource.
*/
public com.google.protobuf.ByteString getResourceBytes() {
java.lang.Object ref = resource_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
resource_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Name of the resource.
* </pre>
*
* <code>string resource = 3;</code>
*
* @param value The resource to set.
* @return This builder for chaining.
*/
public Builder setResource(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
resource_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
*
*
* <pre>
* Name of the resource.
* </pre>
*
* <code>string resource = 3;</code>
*
* @return This builder for chaining.
*/
public Builder clearResource() {
resource_ = getDefaultInstance().getResource();
bitField0_ = (bitField0_ & ~0x00000004);
onChanged();
return this;
}
/**
*
*
* <pre>
* Name of the resource.
* </pre>
*
* <code>string resource = 3;</code>
*
* @param value The bytes for resource to set.
* @return This builder for chaining.
*/
public Builder setResourceBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
resource_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.dataplex.v1.BusinessGlossaryEvent)
}
// @@protoc_insertion_point(class_scope:google.cloud.dataplex.v1.BusinessGlossaryEvent)
private static final com.google.cloud.dataplex.v1.BusinessGlossaryEvent DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.dataplex.v1.BusinessGlossaryEvent();
}
public static com.google.cloud.dataplex.v1.BusinessGlossaryEvent getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<BusinessGlossaryEvent> PARSER =
new com.google.protobuf.AbstractParser<BusinessGlossaryEvent>() {
@java.lang.Override
public BusinessGlossaryEvent parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<BusinessGlossaryEvent> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<BusinessGlossaryEvent> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.dataplex.v1.BusinessGlossaryEvent getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleapis/google-cloud-java | 36,838 | java-aiplatform/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/TrajectoryPrecisionInstance.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/aiplatform/v1beta1/evaluation_service.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.aiplatform.v1beta1;
/**
*
*
* <pre>
* Spec for TrajectoryPrecision instance.
* </pre>
*
* Protobuf type {@code google.cloud.aiplatform.v1beta1.TrajectoryPrecisionInstance}
*/
public final class TrajectoryPrecisionInstance extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.aiplatform.v1beta1.TrajectoryPrecisionInstance)
TrajectoryPrecisionInstanceOrBuilder {
private static final long serialVersionUID = 0L;
// Use TrajectoryPrecisionInstance.newBuilder() to construct.
private TrajectoryPrecisionInstance(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private TrajectoryPrecisionInstance() {}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new TrajectoryPrecisionInstance();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.aiplatform.v1beta1.EvaluationServiceProto
.internal_static_google_cloud_aiplatform_v1beta1_TrajectoryPrecisionInstance_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.aiplatform.v1beta1.EvaluationServiceProto
.internal_static_google_cloud_aiplatform_v1beta1_TrajectoryPrecisionInstance_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.aiplatform.v1beta1.TrajectoryPrecisionInstance.class,
com.google.cloud.aiplatform.v1beta1.TrajectoryPrecisionInstance.Builder.class);
}
private int bitField0_;
public static final int PREDICTED_TRAJECTORY_FIELD_NUMBER = 1;
private com.google.cloud.aiplatform.v1beta1.Trajectory predictedTrajectory_;
/**
*
*
* <pre>
* Required. Spec for predicted tool call trajectory.
* </pre>
*
* <code>
* optional .google.cloud.aiplatform.v1beta1.Trajectory predicted_trajectory = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the predictedTrajectory field is set.
*/
@java.lang.Override
public boolean hasPredictedTrajectory() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* Required. Spec for predicted tool call trajectory.
* </pre>
*
* <code>
* optional .google.cloud.aiplatform.v1beta1.Trajectory predicted_trajectory = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The predictedTrajectory.
*/
@java.lang.Override
public com.google.cloud.aiplatform.v1beta1.Trajectory getPredictedTrajectory() {
return predictedTrajectory_ == null
? com.google.cloud.aiplatform.v1beta1.Trajectory.getDefaultInstance()
: predictedTrajectory_;
}
/**
*
*
* <pre>
* Required. Spec for predicted tool call trajectory.
* </pre>
*
* <code>
* optional .google.cloud.aiplatform.v1beta1.Trajectory predicted_trajectory = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
@java.lang.Override
public com.google.cloud.aiplatform.v1beta1.TrajectoryOrBuilder getPredictedTrajectoryOrBuilder() {
return predictedTrajectory_ == null
? com.google.cloud.aiplatform.v1beta1.Trajectory.getDefaultInstance()
: predictedTrajectory_;
}
public static final int REFERENCE_TRAJECTORY_FIELD_NUMBER = 2;
private com.google.cloud.aiplatform.v1beta1.Trajectory referenceTrajectory_;
/**
*
*
* <pre>
* Required. Spec for reference tool call trajectory.
* </pre>
*
* <code>
* optional .google.cloud.aiplatform.v1beta1.Trajectory reference_trajectory = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the referenceTrajectory field is set.
*/
@java.lang.Override
public boolean hasReferenceTrajectory() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
*
*
* <pre>
* Required. Spec for reference tool call trajectory.
* </pre>
*
* <code>
* optional .google.cloud.aiplatform.v1beta1.Trajectory reference_trajectory = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The referenceTrajectory.
*/
@java.lang.Override
public com.google.cloud.aiplatform.v1beta1.Trajectory getReferenceTrajectory() {
return referenceTrajectory_ == null
? com.google.cloud.aiplatform.v1beta1.Trajectory.getDefaultInstance()
: referenceTrajectory_;
}
/**
*
*
* <pre>
* Required. Spec for reference tool call trajectory.
* </pre>
*
* <code>
* optional .google.cloud.aiplatform.v1beta1.Trajectory reference_trajectory = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
@java.lang.Override
public com.google.cloud.aiplatform.v1beta1.TrajectoryOrBuilder getReferenceTrajectoryOrBuilder() {
return referenceTrajectory_ == null
? com.google.cloud.aiplatform.v1beta1.Trajectory.getDefaultInstance()
: referenceTrajectory_;
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (((bitField0_ & 0x00000001) != 0)) {
output.writeMessage(1, getPredictedTrajectory());
}
if (((bitField0_ & 0x00000002) != 0)) {
output.writeMessage(2, getReferenceTrajectory());
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) != 0)) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, getPredictedTrajectory());
}
if (((bitField0_ & 0x00000002) != 0)) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, getReferenceTrajectory());
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.aiplatform.v1beta1.TrajectoryPrecisionInstance)) {
return super.equals(obj);
}
com.google.cloud.aiplatform.v1beta1.TrajectoryPrecisionInstance other =
(com.google.cloud.aiplatform.v1beta1.TrajectoryPrecisionInstance) obj;
if (hasPredictedTrajectory() != other.hasPredictedTrajectory()) return false;
if (hasPredictedTrajectory()) {
if (!getPredictedTrajectory().equals(other.getPredictedTrajectory())) return false;
}
if (hasReferenceTrajectory() != other.hasReferenceTrajectory()) return false;
if (hasReferenceTrajectory()) {
if (!getReferenceTrajectory().equals(other.getReferenceTrajectory())) return false;
}
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (hasPredictedTrajectory()) {
hash = (37 * hash) + PREDICTED_TRAJECTORY_FIELD_NUMBER;
hash = (53 * hash) + getPredictedTrajectory().hashCode();
}
if (hasReferenceTrajectory()) {
hash = (37 * hash) + REFERENCE_TRAJECTORY_FIELD_NUMBER;
hash = (53 * hash) + getReferenceTrajectory().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.aiplatform.v1beta1.TrajectoryPrecisionInstance parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.aiplatform.v1beta1.TrajectoryPrecisionInstance parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.aiplatform.v1beta1.TrajectoryPrecisionInstance parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.aiplatform.v1beta1.TrajectoryPrecisionInstance parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.aiplatform.v1beta1.TrajectoryPrecisionInstance parseFrom(
byte[] data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.aiplatform.v1beta1.TrajectoryPrecisionInstance parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.aiplatform.v1beta1.TrajectoryPrecisionInstance parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.aiplatform.v1beta1.TrajectoryPrecisionInstance parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.aiplatform.v1beta1.TrajectoryPrecisionInstance parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.aiplatform.v1beta1.TrajectoryPrecisionInstance parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.aiplatform.v1beta1.TrajectoryPrecisionInstance parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.aiplatform.v1beta1.TrajectoryPrecisionInstance parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.aiplatform.v1beta1.TrajectoryPrecisionInstance prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Spec for TrajectoryPrecision instance.
* </pre>
*
* Protobuf type {@code google.cloud.aiplatform.v1beta1.TrajectoryPrecisionInstance}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.aiplatform.v1beta1.TrajectoryPrecisionInstance)
com.google.cloud.aiplatform.v1beta1.TrajectoryPrecisionInstanceOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.aiplatform.v1beta1.EvaluationServiceProto
.internal_static_google_cloud_aiplatform_v1beta1_TrajectoryPrecisionInstance_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.aiplatform.v1beta1.EvaluationServiceProto
.internal_static_google_cloud_aiplatform_v1beta1_TrajectoryPrecisionInstance_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.aiplatform.v1beta1.TrajectoryPrecisionInstance.class,
com.google.cloud.aiplatform.v1beta1.TrajectoryPrecisionInstance.Builder.class);
}
// Construct using com.google.cloud.aiplatform.v1beta1.TrajectoryPrecisionInstance.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {
getPredictedTrajectoryFieldBuilder();
getReferenceTrajectoryFieldBuilder();
}
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
predictedTrajectory_ = null;
if (predictedTrajectoryBuilder_ != null) {
predictedTrajectoryBuilder_.dispose();
predictedTrajectoryBuilder_ = null;
}
referenceTrajectory_ = null;
if (referenceTrajectoryBuilder_ != null) {
referenceTrajectoryBuilder_.dispose();
referenceTrajectoryBuilder_ = null;
}
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.aiplatform.v1beta1.EvaluationServiceProto
.internal_static_google_cloud_aiplatform_v1beta1_TrajectoryPrecisionInstance_descriptor;
}
@java.lang.Override
public com.google.cloud.aiplatform.v1beta1.TrajectoryPrecisionInstance
getDefaultInstanceForType() {
return com.google.cloud.aiplatform.v1beta1.TrajectoryPrecisionInstance.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.aiplatform.v1beta1.TrajectoryPrecisionInstance build() {
com.google.cloud.aiplatform.v1beta1.TrajectoryPrecisionInstance result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.aiplatform.v1beta1.TrajectoryPrecisionInstance buildPartial() {
com.google.cloud.aiplatform.v1beta1.TrajectoryPrecisionInstance result =
new com.google.cloud.aiplatform.v1beta1.TrajectoryPrecisionInstance(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(
com.google.cloud.aiplatform.v1beta1.TrajectoryPrecisionInstance result) {
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.predictedTrajectory_ =
predictedTrajectoryBuilder_ == null
? predictedTrajectory_
: predictedTrajectoryBuilder_.build();
to_bitField0_ |= 0x00000001;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.referenceTrajectory_ =
referenceTrajectoryBuilder_ == null
? referenceTrajectory_
: referenceTrajectoryBuilder_.build();
to_bitField0_ |= 0x00000002;
}
result.bitField0_ |= to_bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.aiplatform.v1beta1.TrajectoryPrecisionInstance) {
return mergeFrom((com.google.cloud.aiplatform.v1beta1.TrajectoryPrecisionInstance) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(
com.google.cloud.aiplatform.v1beta1.TrajectoryPrecisionInstance other) {
if (other
== com.google.cloud.aiplatform.v1beta1.TrajectoryPrecisionInstance.getDefaultInstance())
return this;
if (other.hasPredictedTrajectory()) {
mergePredictedTrajectory(other.getPredictedTrajectory());
}
if (other.hasReferenceTrajectory()) {
mergeReferenceTrajectory(other.getReferenceTrajectory());
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
input.readMessage(
getPredictedTrajectoryFieldBuilder().getBuilder(), extensionRegistry);
bitField0_ |= 0x00000001;
break;
} // case 10
case 18:
{
input.readMessage(
getReferenceTrajectoryFieldBuilder().getBuilder(), extensionRegistry);
bitField0_ |= 0x00000002;
break;
} // case 18
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private com.google.cloud.aiplatform.v1beta1.Trajectory predictedTrajectory_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.aiplatform.v1beta1.Trajectory,
com.google.cloud.aiplatform.v1beta1.Trajectory.Builder,
com.google.cloud.aiplatform.v1beta1.TrajectoryOrBuilder>
predictedTrajectoryBuilder_;
/**
*
*
* <pre>
* Required. Spec for predicted tool call trajectory.
* </pre>
*
* <code>
* optional .google.cloud.aiplatform.v1beta1.Trajectory predicted_trajectory = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the predictedTrajectory field is set.
*/
public boolean hasPredictedTrajectory() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* Required. Spec for predicted tool call trajectory.
* </pre>
*
* <code>
* optional .google.cloud.aiplatform.v1beta1.Trajectory predicted_trajectory = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The predictedTrajectory.
*/
public com.google.cloud.aiplatform.v1beta1.Trajectory getPredictedTrajectory() {
if (predictedTrajectoryBuilder_ == null) {
return predictedTrajectory_ == null
? com.google.cloud.aiplatform.v1beta1.Trajectory.getDefaultInstance()
: predictedTrajectory_;
} else {
return predictedTrajectoryBuilder_.getMessage();
}
}
/**
*
*
* <pre>
* Required. Spec for predicted tool call trajectory.
* </pre>
*
* <code>
* optional .google.cloud.aiplatform.v1beta1.Trajectory predicted_trajectory = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setPredictedTrajectory(com.google.cloud.aiplatform.v1beta1.Trajectory value) {
if (predictedTrajectoryBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
predictedTrajectory_ = value;
} else {
predictedTrajectoryBuilder_.setMessage(value);
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. Spec for predicted tool call trajectory.
* </pre>
*
* <code>
* optional .google.cloud.aiplatform.v1beta1.Trajectory predicted_trajectory = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setPredictedTrajectory(
com.google.cloud.aiplatform.v1beta1.Trajectory.Builder builderForValue) {
if (predictedTrajectoryBuilder_ == null) {
predictedTrajectory_ = builderForValue.build();
} else {
predictedTrajectoryBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. Spec for predicted tool call trajectory.
* </pre>
*
* <code>
* optional .google.cloud.aiplatform.v1beta1.Trajectory predicted_trajectory = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder mergePredictedTrajectory(com.google.cloud.aiplatform.v1beta1.Trajectory value) {
if (predictedTrajectoryBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0)
&& predictedTrajectory_ != null
&& predictedTrajectory_
!= com.google.cloud.aiplatform.v1beta1.Trajectory.getDefaultInstance()) {
getPredictedTrajectoryBuilder().mergeFrom(value);
} else {
predictedTrajectory_ = value;
}
} else {
predictedTrajectoryBuilder_.mergeFrom(value);
}
if (predictedTrajectory_ != null) {
bitField0_ |= 0x00000001;
onChanged();
}
return this;
}
/**
*
*
* <pre>
* Required. Spec for predicted tool call trajectory.
* </pre>
*
* <code>
* optional .google.cloud.aiplatform.v1beta1.Trajectory predicted_trajectory = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder clearPredictedTrajectory() {
bitField0_ = (bitField0_ & ~0x00000001);
predictedTrajectory_ = null;
if (predictedTrajectoryBuilder_ != null) {
predictedTrajectoryBuilder_.dispose();
predictedTrajectoryBuilder_ = null;
}
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. Spec for predicted tool call trajectory.
* </pre>
*
* <code>
* optional .google.cloud.aiplatform.v1beta1.Trajectory predicted_trajectory = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.cloud.aiplatform.v1beta1.Trajectory.Builder getPredictedTrajectoryBuilder() {
bitField0_ |= 0x00000001;
onChanged();
return getPredictedTrajectoryFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* Required. Spec for predicted tool call trajectory.
* </pre>
*
* <code>
* optional .google.cloud.aiplatform.v1beta1.Trajectory predicted_trajectory = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.cloud.aiplatform.v1beta1.TrajectoryOrBuilder
getPredictedTrajectoryOrBuilder() {
if (predictedTrajectoryBuilder_ != null) {
return predictedTrajectoryBuilder_.getMessageOrBuilder();
} else {
return predictedTrajectory_ == null
? com.google.cloud.aiplatform.v1beta1.Trajectory.getDefaultInstance()
: predictedTrajectory_;
}
}
/**
*
*
* <pre>
* Required. Spec for predicted tool call trajectory.
* </pre>
*
* <code>
* optional .google.cloud.aiplatform.v1beta1.Trajectory predicted_trajectory = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.aiplatform.v1beta1.Trajectory,
com.google.cloud.aiplatform.v1beta1.Trajectory.Builder,
com.google.cloud.aiplatform.v1beta1.TrajectoryOrBuilder>
getPredictedTrajectoryFieldBuilder() {
if (predictedTrajectoryBuilder_ == null) {
predictedTrajectoryBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.aiplatform.v1beta1.Trajectory,
com.google.cloud.aiplatform.v1beta1.Trajectory.Builder,
com.google.cloud.aiplatform.v1beta1.TrajectoryOrBuilder>(
getPredictedTrajectory(), getParentForChildren(), isClean());
predictedTrajectory_ = null;
}
return predictedTrajectoryBuilder_;
}
private com.google.cloud.aiplatform.v1beta1.Trajectory referenceTrajectory_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.aiplatform.v1beta1.Trajectory,
com.google.cloud.aiplatform.v1beta1.Trajectory.Builder,
com.google.cloud.aiplatform.v1beta1.TrajectoryOrBuilder>
referenceTrajectoryBuilder_;
/**
*
*
* <pre>
* Required. Spec for reference tool call trajectory.
* </pre>
*
* <code>
* optional .google.cloud.aiplatform.v1beta1.Trajectory reference_trajectory = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the referenceTrajectory field is set.
*/
public boolean hasReferenceTrajectory() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
*
*
* <pre>
* Required. Spec for reference tool call trajectory.
* </pre>
*
* <code>
* optional .google.cloud.aiplatform.v1beta1.Trajectory reference_trajectory = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The referenceTrajectory.
*/
public com.google.cloud.aiplatform.v1beta1.Trajectory getReferenceTrajectory() {
if (referenceTrajectoryBuilder_ == null) {
return referenceTrajectory_ == null
? com.google.cloud.aiplatform.v1beta1.Trajectory.getDefaultInstance()
: referenceTrajectory_;
} else {
return referenceTrajectoryBuilder_.getMessage();
}
}
/**
*
*
* <pre>
* Required. Spec for reference tool call trajectory.
* </pre>
*
* <code>
* optional .google.cloud.aiplatform.v1beta1.Trajectory reference_trajectory = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setReferenceTrajectory(com.google.cloud.aiplatform.v1beta1.Trajectory value) {
if (referenceTrajectoryBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
referenceTrajectory_ = value;
} else {
referenceTrajectoryBuilder_.setMessage(value);
}
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. Spec for reference tool call trajectory.
* </pre>
*
* <code>
* optional .google.cloud.aiplatform.v1beta1.Trajectory reference_trajectory = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setReferenceTrajectory(
com.google.cloud.aiplatform.v1beta1.Trajectory.Builder builderForValue) {
if (referenceTrajectoryBuilder_ == null) {
referenceTrajectory_ = builderForValue.build();
} else {
referenceTrajectoryBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. Spec for reference tool call trajectory.
* </pre>
*
* <code>
* optional .google.cloud.aiplatform.v1beta1.Trajectory reference_trajectory = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder mergeReferenceTrajectory(com.google.cloud.aiplatform.v1beta1.Trajectory value) {
if (referenceTrajectoryBuilder_ == null) {
if (((bitField0_ & 0x00000002) != 0)
&& referenceTrajectory_ != null
&& referenceTrajectory_
!= com.google.cloud.aiplatform.v1beta1.Trajectory.getDefaultInstance()) {
getReferenceTrajectoryBuilder().mergeFrom(value);
} else {
referenceTrajectory_ = value;
}
} else {
referenceTrajectoryBuilder_.mergeFrom(value);
}
if (referenceTrajectory_ != null) {
bitField0_ |= 0x00000002;
onChanged();
}
return this;
}
/**
*
*
* <pre>
* Required. Spec for reference tool call trajectory.
* </pre>
*
* <code>
* optional .google.cloud.aiplatform.v1beta1.Trajectory reference_trajectory = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder clearReferenceTrajectory() {
bitField0_ = (bitField0_ & ~0x00000002);
referenceTrajectory_ = null;
if (referenceTrajectoryBuilder_ != null) {
referenceTrajectoryBuilder_.dispose();
referenceTrajectoryBuilder_ = null;
}
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. Spec for reference tool call trajectory.
* </pre>
*
* <code>
* optional .google.cloud.aiplatform.v1beta1.Trajectory reference_trajectory = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.cloud.aiplatform.v1beta1.Trajectory.Builder getReferenceTrajectoryBuilder() {
bitField0_ |= 0x00000002;
onChanged();
return getReferenceTrajectoryFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* Required. Spec for reference tool call trajectory.
* </pre>
*
* <code>
* optional .google.cloud.aiplatform.v1beta1.Trajectory reference_trajectory = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.cloud.aiplatform.v1beta1.TrajectoryOrBuilder
getReferenceTrajectoryOrBuilder() {
if (referenceTrajectoryBuilder_ != null) {
return referenceTrajectoryBuilder_.getMessageOrBuilder();
} else {
return referenceTrajectory_ == null
? com.google.cloud.aiplatform.v1beta1.Trajectory.getDefaultInstance()
: referenceTrajectory_;
}
}
/**
*
*
* <pre>
* Required. Spec for reference tool call trajectory.
* </pre>
*
* <code>
* optional .google.cloud.aiplatform.v1beta1.Trajectory reference_trajectory = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.aiplatform.v1beta1.Trajectory,
com.google.cloud.aiplatform.v1beta1.Trajectory.Builder,
com.google.cloud.aiplatform.v1beta1.TrajectoryOrBuilder>
getReferenceTrajectoryFieldBuilder() {
if (referenceTrajectoryBuilder_ == null) {
referenceTrajectoryBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.aiplatform.v1beta1.Trajectory,
com.google.cloud.aiplatform.v1beta1.Trajectory.Builder,
com.google.cloud.aiplatform.v1beta1.TrajectoryOrBuilder>(
getReferenceTrajectory(), getParentForChildren(), isClean());
referenceTrajectory_ = null;
}
return referenceTrajectoryBuilder_;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.aiplatform.v1beta1.TrajectoryPrecisionInstance)
}
// @@protoc_insertion_point(class_scope:google.cloud.aiplatform.v1beta1.TrajectoryPrecisionInstance)
private static final com.google.cloud.aiplatform.v1beta1.TrajectoryPrecisionInstance
DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.aiplatform.v1beta1.TrajectoryPrecisionInstance();
}
public static com.google.cloud.aiplatform.v1beta1.TrajectoryPrecisionInstance
getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<TrajectoryPrecisionInstance> PARSER =
new com.google.protobuf.AbstractParser<TrajectoryPrecisionInstance>() {
@java.lang.Override
public TrajectoryPrecisionInstance parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<TrajectoryPrecisionInstance> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<TrajectoryPrecisionInstance> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.aiplatform.v1beta1.TrajectoryPrecisionInstance
getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
apache/cxf | 37,102 | services/sts/sts-core/src/main/java/org/apache/cxf/sts/request/RequestParser.java | /**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.cxf.sts.request;
import java.io.ByteArrayInputStream;
import java.net.URI;
import java.net.URISyntaxException;
import java.security.KeyException;
import java.security.NoSuchProviderException;
import java.security.cert.CertificateException;
import java.security.cert.CertificateFactory;
import java.security.cert.X509Certificate;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.logging.Level;
import java.util.logging.Logger;
import javax.xml.crypto.MarshalException;
import javax.xml.crypto.dom.DOMStructure;
import javax.xml.crypto.dsig.keyinfo.KeyInfo;
import javax.xml.crypto.dsig.keyinfo.KeyInfoFactory;
import javax.xml.crypto.dsig.keyinfo.KeyValue;
import javax.xml.crypto.dsig.keyinfo.X509Data;
import org.w3c.dom.Element;
import org.w3c.dom.NodeList;
import jakarta.xml.bind.JAXBElement;
import org.apache.cxf.common.logging.LogUtils;
import org.apache.cxf.common.util.Base64Utility;
import org.apache.cxf.helpers.CastUtils;
import org.apache.cxf.helpers.DOMUtils;
import org.apache.cxf.rt.security.claims.Claim;
import org.apache.cxf.rt.security.claims.ClaimCollection;
import org.apache.cxf.sts.QNameConstants;
import org.apache.cxf.sts.STSConstants;
import org.apache.cxf.sts.STSPropertiesMBean;
import org.apache.cxf.sts.claims.ClaimsParser;
import org.apache.cxf.sts.claims.IdentityClaimsParser;
import org.apache.cxf.ws.security.sts.provider.STSException;
import org.apache.cxf.ws.security.sts.provider.model.BinarySecretType;
import org.apache.cxf.ws.security.sts.provider.model.CancelTargetType;
import org.apache.cxf.ws.security.sts.provider.model.ClaimsType;
import org.apache.cxf.ws.security.sts.provider.model.EntropyType;
import org.apache.cxf.ws.security.sts.provider.model.LifetimeType;
import org.apache.cxf.ws.security.sts.provider.model.OnBehalfOfType;
import org.apache.cxf.ws.security.sts.provider.model.ParticipantType;
import org.apache.cxf.ws.security.sts.provider.model.ParticipantsType;
import org.apache.cxf.ws.security.sts.provider.model.RenewTargetType;
import org.apache.cxf.ws.security.sts.provider.model.RenewingType;
import org.apache.cxf.ws.security.sts.provider.model.RequestSecurityTokenType;
import org.apache.cxf.ws.security.sts.provider.model.UseKeyType;
import org.apache.cxf.ws.security.sts.provider.model.ValidateTargetType;
import org.apache.cxf.ws.security.sts.provider.model.secext.ReferenceType;
import org.apache.cxf.ws.security.sts.provider.model.secext.SecurityTokenReferenceType;
import org.apache.cxf.ws.security.sts.provider.model.wstrust14.ActAsType;
import org.apache.cxf.ws.security.sts.provider.model.xmldsig.KeyInfoType;
import org.apache.cxf.ws.security.sts.provider.model.xmldsig.X509DataType;
import org.apache.wss4j.common.ext.WSSecurityException;
import org.apache.wss4j.common.util.XMLUtils;
import org.apache.wss4j.dom.WSConstants;
import org.apache.wss4j.dom.WSDocInfo;
import org.apache.wss4j.dom.engine.WSSConfig;
import org.apache.wss4j.dom.engine.WSSecurityEngineResult;
import org.apache.wss4j.dom.handler.RequestData;
import org.apache.wss4j.dom.handler.WSHandlerConstants;
import org.apache.wss4j.dom.handler.WSHandlerResult;
import org.apache.wss4j.dom.message.token.SecurityContextToken;
import org.apache.wss4j.dom.processor.EncryptedKeyProcessor;
import org.apache.xml.security.utils.Constants;
/**
* This class parses a RequestSecurityToken object. It stores the values that it finds into a KeyRequirements
* and TokenRequirements objects.
*/
public class RequestParser {
private static final Logger LOG = LogUtils.getL7dLogger(RequestParser.class);
private boolean allowCustomContent;
public RequestRequirements parseRequest(
RequestSecurityTokenType request, Map<String, Object> messageContext, STSPropertiesMBean stsProperties,
List<ClaimsParser> claimsParsers
) throws STSException {
LOG.fine("Parsing RequestSecurityToken");
KeyRequirements keyRequirements = new KeyRequirements();
TokenRequirements tokenRequirements = new TokenRequirements();
for (Object requestObject : request.getAny()) {
// JAXB types
if (requestObject instanceof JAXBElement<?>) {
JAXBElement<?> jaxbElement = (JAXBElement<?>) requestObject;
if (LOG.isLoggable(Level.FINE)) {
LOG.fine("Found " + jaxbElement.getName() + ": " + jaxbElement.getValue());
}
try {
boolean found =
parseTokenRequirements(jaxbElement, tokenRequirements, messageContext, claimsParsers);
if (!found) {
found = parseKeyRequirements(jaxbElement, keyRequirements, messageContext, stsProperties);
}
if (!found) {
if (allowCustomContent) {
tokenRequirements.addCustomContent(jaxbElement);
} else {
LOG.log(
Level.WARNING,
"Found a JAXB object of unknown type: " + jaxbElement.getName()
);
throw new STSException(
"An unknown element was received", STSException.BAD_REQUEST
);
}
}
} catch (RuntimeException ex) {
LOG.log(Level.WARNING, "", ex);
throw ex;
}
// SecondaryParameters/AppliesTo
} else if (requestObject instanceof Element) {
Element element = (Element)requestObject;
if (STSConstants.WST_NS_05_12.equals(element.getNamespaceURI())
&& "SecondaryParameters".equals(element.getLocalName())) {
parseSecondaryParameters(element, claimsParsers, tokenRequirements, keyRequirements);
} else if ("AppliesTo".equals(element.getLocalName())
&& (STSConstants.WSP_NS.equals(element.getNamespaceURI())
|| STSConstants.WSP_NS_04.equals(element.getNamespaceURI())
|| STSConstants.WSP_NS_06.equals(element.getNamespaceURI()))) {
tokenRequirements.setAppliesTo(element);
LOG.fine("Found AppliesTo element");
} else if (allowCustomContent) {
tokenRequirements.addCustomContent(requestObject);
} else {
LOG.log(
Level.WARNING,
"An unknown (DOM) element was received: " + element.getLocalName()
+ " " + element.getNamespaceURI()
);
throw new STSException(
"An unknown element was received", STSException.BAD_REQUEST
);
}
} else {
LOG.log(Level.WARNING, "An unknown element was received");
throw new STSException(
"An unknown element was received", STSException.BAD_REQUEST
);
}
}
String context = request.getContext();
tokenRequirements.setContext(context);
if (LOG.isLoggable(Level.FINE)) {
LOG.fine("Received Context attribute: " + context);
}
RequestRequirements requestRequirements = new RequestRequirements();
requestRequirements.setKeyRequirements(keyRequirements);
requestRequirements.setTokenRequirements(tokenRequirements);
return requestRequirements;
}
/**
* Parse the Key and Encryption requirements into the KeyRequirements argument.
*/
private static boolean parseKeyRequirements(
JAXBElement<?> jaxbElement, KeyRequirements keyRequirements,
Map<String, Object> messageContext, STSPropertiesMBean stsProperties
) {
if (QNameConstants.AUTHENTICATION_TYPE.equals(jaxbElement.getName())) {
String authenticationType = (String)jaxbElement.getValue();
keyRequirements.setAuthenticationType(authenticationType);
} else if (QNameConstants.KEY_TYPE.equals(jaxbElement.getName())) {
String keyType = (String)jaxbElement.getValue();
keyRequirements.setKeyType(keyType);
} else if (QNameConstants.KEY_SIZE.equals(jaxbElement.getName())) {
long keySize = ((Long)jaxbElement.getValue()).longValue();
keyRequirements.setKeySize(keySize);
} else if (QNameConstants.SIGNATURE_ALGORITHM.equals(jaxbElement.getName())) {
String signatureAlgorithm = (String)jaxbElement.getValue();
keyRequirements.setSignatureAlgorithm(signatureAlgorithm);
} else if (QNameConstants.ENCRYPTION_ALGORITHM.equals(jaxbElement.getName())) {
String encryptionAlgorithm = (String)jaxbElement.getValue();
keyRequirements.setEncryptionAlgorithm(encryptionAlgorithm);
} else if (QNameConstants.C14N_ALGORITHM.equals(jaxbElement.getName())) {
String c14nAlgorithm = (String)jaxbElement.getValue();
keyRequirements.setC14nAlgorithm(c14nAlgorithm);
} else if (QNameConstants.COMPUTED_KEY_ALGORITHM.equals(jaxbElement.getName())) {
String computedKeyAlgorithm = (String)jaxbElement.getValue();
keyRequirements.setComputedKeyAlgorithm(computedKeyAlgorithm);
} else if (QNameConstants.KEYWRAP_ALGORITHM.equals(jaxbElement.getName())) {
String keywrapAlgorithm = (String)jaxbElement.getValue();
keyRequirements.setKeywrapAlgorithm(keywrapAlgorithm);
} else if (QNameConstants.USE_KEY.equals(jaxbElement.getName())) {
UseKeyType useKey = (UseKeyType)jaxbElement.getValue();
ReceivedCredential receivedCredential = parseUseKey(useKey, messageContext);
keyRequirements.setReceivedCredential(receivedCredential);
} else if (QNameConstants.ENTROPY.equals(jaxbElement.getName())) {
EntropyType entropyType = (EntropyType)jaxbElement.getValue();
Entropy entropy = parseEntropy(entropyType, stsProperties);
keyRequirements.setEntropy(entropy);
} else if (QNameConstants.SIGN_WITH.equals(jaxbElement.getName())) {
String signWith = (String)jaxbElement.getValue();
keyRequirements.setSignWith(signWith);
} else if (QNameConstants.ENCRYPT_WITH.equals(jaxbElement.getName())) {
String encryptWith = (String)jaxbElement.getValue();
keyRequirements.setEncryptWith(encryptWith);
} else if (QNameConstants.REQUEST_TYPE.equals(jaxbElement.getName())) {
// Skip the request type.
} else {
return false;
}
return true;
}
/**
* Parse the Token requirements into the TokenRequirements argument.
*/
private static boolean parseTokenRequirements(
JAXBElement<?> jaxbElement,
TokenRequirements tokenRequirements,
Map<String, Object> messageContext,
List<ClaimsParser> claimsParsers
) {
if (QNameConstants.TOKEN_TYPE.equals(jaxbElement.getName())) {
String tokenType = (String)jaxbElement.getValue();
tokenRequirements.setTokenType(tokenType);
} else if (QNameConstants.ON_BEHALF_OF.equals(jaxbElement.getName())) {
OnBehalfOfType onBehalfOfType = (OnBehalfOfType)jaxbElement.getValue();
ReceivedToken onBehalfOf = new ReceivedToken(onBehalfOfType.getAny());
tokenRequirements.setOnBehalfOf(onBehalfOf);
} else if (QNameConstants.ACT_AS.equals(jaxbElement.getName())) {
ActAsType actAsType = (ActAsType)jaxbElement.getValue();
ReceivedToken actAs = new ReceivedToken(actAsType.getAny());
tokenRequirements.setActAs(actAs);
} else if (QNameConstants.LIFETIME.equals(jaxbElement.getName())) {
LifetimeType lifetimeType = (LifetimeType)jaxbElement.getValue();
Lifetime lifetime = new Lifetime();
if (lifetimeType.getCreated() != null) {
lifetime.setCreated(lifetimeType.getCreated().getValue());
}
if (lifetimeType.getExpires() != null) {
lifetime.setExpires(lifetimeType.getExpires().getValue());
}
tokenRequirements.setLifetime(lifetime);
} else if (QNameConstants.VALIDATE_TARGET.equals(jaxbElement.getName())) {
ValidateTargetType validateTargetType = (ValidateTargetType)jaxbElement.getValue();
ReceivedToken validateTarget = new ReceivedToken(validateTargetType.getAny());
if (isTokenReferenced(validateTarget.getToken())) {
Element target = fetchTokenElementFromReference(validateTarget.getToken(), messageContext);
validateTarget = new ReceivedToken(target);
}
tokenRequirements.setValidateTarget(validateTarget);
} else if (QNameConstants.CANCEL_TARGET.equals(jaxbElement.getName())) {
CancelTargetType cancelTargetType = (CancelTargetType)jaxbElement.getValue();
ReceivedToken cancelTarget = new ReceivedToken(cancelTargetType.getAny());
if (isTokenReferenced(cancelTarget.getToken())) {
Element target = fetchTokenElementFromReference(cancelTarget.getToken(), messageContext);
cancelTarget = new ReceivedToken(target);
}
tokenRequirements.setCancelTarget(cancelTarget);
} else if (QNameConstants.RENEW_TARGET.equals(jaxbElement.getName())) {
RenewTargetType renewTargetType = (RenewTargetType)jaxbElement.getValue();
ReceivedToken renewTarget = new ReceivedToken(renewTargetType.getAny());
if (isTokenReferenced(renewTarget.getToken())) {
Element target = fetchTokenElementFromReference(renewTarget.getToken(), messageContext);
renewTarget = new ReceivedToken(target);
}
tokenRequirements.setRenewTarget(renewTarget);
} else if (QNameConstants.CLAIMS.equals(jaxbElement.getName())) {
ClaimsType claimsType = (ClaimsType)jaxbElement.getValue();
ClaimCollection requestedClaims = parseClaims(claimsType, claimsParsers);
tokenRequirements.setPrimaryClaims(requestedClaims);
} else if (QNameConstants.RENEWING.equals(jaxbElement.getName())) {
RenewingType renewingType = (RenewingType)jaxbElement.getValue();
Renewing renewing = new Renewing();
if (renewingType.isAllow() != null) {
renewing.setAllowRenewing(renewingType.isAllow());
}
if (renewingType.isOK() != null) {
renewing.setAllowRenewingAfterExpiry(renewingType.isOK());
}
tokenRequirements.setRenewing(renewing);
} else if (QNameConstants.PARTICIPANTS.equals(jaxbElement.getName())) {
ParticipantsType participantsType = (ParticipantsType)jaxbElement.getValue();
Participants participants = parseParticipants(participantsType);
tokenRequirements.setParticipants(participants);
} else {
return false;
}
return true;
}
/**
* Parse the UseKey structure to get a ReceivedKey containing a cert/public-key/secret-key.
* @param useKey The UseKey object
* @param messageContext The message context object
* @return the ReceivedKey that has been parsed
* @throws STSException
*/
private static ReceivedCredential parseUseKey(
UseKeyType useKey,
Map<String, Object> messageContext
) throws STSException {
byte[] x509 = null;
if (useKey.getAny() instanceof JAXBElement<?>) {
JAXBElement<?> useKeyJaxb = (JAXBElement<?>)useKey.getAny();
Object obj = useKeyJaxb.getValue();
if (KeyInfoType.class == useKeyJaxb.getDeclaredType() || obj instanceof KeyInfoType) {
KeyInfoType keyInfoType = KeyInfoType.class.cast(useKeyJaxb.getValue());
LOG.fine("Found KeyInfo UseKey type");
for (Object keyInfoContent : keyInfoType.getContent()) {
X509DataType x509DataType = extractType(keyInfoContent, X509DataType.class);
if (null != x509DataType) {
LOG.fine("Found X509Data KeyInfo type");
for (Object x509Object
: x509DataType.getX509IssuerSerialOrX509SKIOrX509SubjectName()) {
x509 = extractType(x509Object, byte[].class);
if (null != x509) {
LOG.fine("Found X509Certificate UseKey type");
break;
}
}
}
}
} else if (SecurityTokenReferenceType.class == useKeyJaxb.getDeclaredType()
|| obj instanceof SecurityTokenReferenceType) {
SecurityTokenReferenceType strType =
SecurityTokenReferenceType.class.cast(useKeyJaxb.getValue());
Element token = fetchTokenElementFromReference(strType, messageContext);
try {
x509 = Base64Utility.decode(token.getTextContent().trim());
LOG.fine("Found X509Certificate UseKey type via reference");
} catch (Exception e) {
LOG.log(Level.WARNING, "", e);
throw new STSException(e.getMessage(), e, STSException.INVALID_REQUEST);
}
}
} else if (useKey.getAny() instanceof Element) {
if (isTokenReferenced(useKey.getAny())) {
Element token = fetchTokenElementFromReference(useKey.getAny(), messageContext);
try {
x509 = Base64Utility.decode(token.getTextContent().trim());
LOG.fine("Found X509Certificate UseKey type via reference");
} catch (Exception e) {
LOG.log(Level.WARNING, "", e);
throw new STSException(e.getMessage(), e, STSException.INVALID_REQUEST);
}
} else {
Element element = (Element)useKey.getAny();
if ("KeyInfo".equals(element.getLocalName())) {
return parseKeyInfoElement((Element)useKey.getAny());
}
NodeList x509CertData =
element.getElementsByTagNameNS(
Constants.SignatureSpecNS, Constants._TAG_X509CERTIFICATE
);
if (x509CertData != null && x509CertData.getLength() > 0) {
try {
x509 = Base64Utility.decode(x509CertData.item(0).getTextContent().trim());
LOG.fine("Found X509Certificate UseKey type");
} catch (Exception e) {
LOG.log(Level.WARNING, "", e);
throw new STSException(e.getMessage(), e, STSException.INVALID_REQUEST);
}
}
}
} else {
LOG.log(Level.WARNING, "An unknown element was received");
throw new STSException(
"An unknown element was received", STSException.BAD_REQUEST
);
}
if (x509 != null) {
try {
CertificateFactory cf = CertificateFactory.getInstance("X.509");
X509Certificate cert =
(X509Certificate)cf.generateCertificate(new ByteArrayInputStream(x509));
LOG.fine("Successfully parsed X509 Certificate from UseKey");
ReceivedCredential receivedCredential = new ReceivedCredential();
receivedCredential.setX509Cert(cert);
return receivedCredential;
} catch (CertificateException ex) {
LOG.log(Level.WARNING, "", ex);
throw new STSException("Error in parsing certificate: ", ex, STSException.INVALID_REQUEST);
}
}
return null;
}
private static Participants parseParticipants(ParticipantsType participantsType) {
Participants participants = new Participants();
if (participantsType.getPrimary() != null) {
participants.setPrimaryParticipant(participantsType.getPrimary().getAny());
}
if (participantsType.getParticipant() != null
&& !participantsType.getParticipant().isEmpty()) {
List<Object> secondaryParticipants =
new ArrayList<>(participantsType.getParticipant().size());
for (ParticipantType secondaryParticipant : participantsType.getParticipant()) {
secondaryParticipants.add(secondaryParticipant.getAny());
}
participants.setParticipants(secondaryParticipants);
}
return participants;
}
private static <T> T extractType(Object param, Class<T> clazz) {
if (param instanceof JAXBElement<?>) {
JAXBElement<?> jaxbElement = (JAXBElement<?>) param;
if (clazz == jaxbElement.getDeclaredType()) {
return clazz.cast(jaxbElement.getValue());
}
}
return null;
}
/**
* Parse the KeyInfo Element to return a ReceivedCredential object containing the found certificate or
* public key.
*/
private static ReceivedCredential parseKeyInfoElement(Element keyInfoElement) throws STSException {
KeyInfoFactory keyInfoFactory;
try {
keyInfoFactory = KeyInfoFactory.getInstance("DOM", "ApacheXMLDSig");
} catch (NoSuchProviderException ex) {
keyInfoFactory = KeyInfoFactory.getInstance("DOM");
}
try {
KeyInfo keyInfo = keyInfoFactory.unmarshalKeyInfo(new DOMStructure(keyInfoElement));
List<?> list = keyInfo.getContent();
for (int i = 0; i < list.size(); i++) {
if (list.get(i) instanceof KeyValue) {
KeyValue keyValue = (KeyValue)list.get(i);
ReceivedCredential receivedKey = new ReceivedCredential();
receivedKey.setPublicKey(keyValue.getPublicKey());
return receivedKey;
} else if (list.get(i) instanceof X509Certificate) {
ReceivedCredential receivedKey = new ReceivedCredential();
receivedKey.setX509Cert((X509Certificate)list.get(i));
return receivedKey;
} else if (list.get(i) instanceof X509Data) {
X509Data x509Data = (X509Data)list.get(i);
for (int j = 0; j < x509Data.getContent().size(); j++) {
if (x509Data.getContent().get(j) instanceof X509Certificate) {
ReceivedCredential receivedKey = new ReceivedCredential();
receivedKey.setX509Cert((X509Certificate)x509Data.getContent().get(j));
return receivedKey;
}
}
}
}
} catch (MarshalException | KeyException e) {
LOG.log(Level.WARNING, "", e);
throw new STSException(e.getMessage(), e, STSException.INVALID_REQUEST);
}
return null;
}
/**
* Parse an Entropy object
* @param entropyType an EntropyType object
* @param stsProperties A STSPropertiesMBean object used to decrypt an EncryptedKey
*/
private static Entropy parseEntropy(
EntropyType entropyType, STSPropertiesMBean stsProperties
) throws STSException {
for (Object entropyObject : entropyType.getAny()) {
if (entropyObject instanceof JAXBElement<?>) {
JAXBElement<?> entropyObjectJaxb = (JAXBElement<?>) entropyObject;
if (QNameConstants.BINARY_SECRET.equals(entropyObjectJaxb.getName())) {
BinarySecretType binarySecretType =
(BinarySecretType)entropyObjectJaxb.getValue();
LOG.fine("Found BinarySecret Entropy type");
Entropy entropy = new Entropy();
BinarySecret binarySecret = new BinarySecret();
binarySecret.setBinarySecretType(binarySecretType.getType());
binarySecret.setBinarySecretValue(binarySecretType.getValue());
entropy.setBinarySecret(binarySecret);
return entropy;
} else if (LOG.isLoggable(Level.FINE)) {
LOG.fine("Unsupported Entropy type: " + entropyObjectJaxb.getName());
}
} else if (entropyObject instanceof Element
&& "EncryptedKey".equals(((Element)entropyObject).getLocalName())) {
EncryptedKeyProcessor processor = new EncryptedKeyProcessor();
Element entropyElement = (Element)entropyObject;
RequestData requestData = new RequestData();
requestData.setDecCrypto(stsProperties.getSignatureCrypto());
requestData.setCallbackHandler(stsProperties.getCallbackHandler());
requestData.setWssConfig(WSSConfig.getNewInstance());
requestData.setWsDocInfo(new WSDocInfo(entropyElement.getOwnerDocument()));
try {
List<WSSecurityEngineResult> results =
processor.handleToken(entropyElement, requestData);
Entropy entropy = new Entropy();
entropy.setDecryptedKey((byte[])results.get(0).get(WSSecurityEngineResult.TAG_SECRET));
return entropy;
} catch (WSSecurityException e) {
LOG.log(Level.WARNING, "", e);
throw new STSException(e.getMessage(), e, STSException.INVALID_REQUEST);
}
} else {
LOG.log(Level.WARNING, "An unknown element was received");
throw new STSException(
"An unknown element was received", STSException.BAD_REQUEST
);
}
}
return null;
}
/**
* Parse the secondaryParameters element. Precedence goes to values that are specified as
* direct children of the RequestSecurityToken element.
* @param secondaryParameters the secondaryParameters element to parse
*/
private void parseSecondaryParameters(Element secondaryParameters, List<ClaimsParser> claimsParsers,
TokenRequirements tokenRequirements, KeyRequirements keyRequirements) {
LOG.fine("Found SecondaryParameters element");
Element child = DOMUtils.getFirstElement(secondaryParameters);
while (child != null) {
String localName = child.getLocalName();
String namespace = child.getNamespaceURI();
if (LOG.isLoggable(Level.FINE)) {
LOG.fine("Found " + localName + ": " + child.getTextContent().trim());
}
if (keyRequirements.getKeySize() == 0 && "KeySize".equals(localName)
&& STSConstants.WST_NS_05_12.equals(namespace)) {
long keySize = Integer.parseInt(child.getTextContent().trim());
keyRequirements.setKeySize(keySize);
} else if (tokenRequirements.getTokenType() == null
&& "TokenType".equals(localName) && STSConstants.WST_NS_05_12.equals(namespace)) {
String tokenType = child.getTextContent().trim();
tokenRequirements.setTokenType(tokenType);
} else if (keyRequirements.getKeyType() == null
&& "KeyType".equals(localName) && STSConstants.WST_NS_05_12.equals(namespace)) {
String keyType = child.getTextContent().trim();
keyRequirements.setKeyType(keyType);
} else if ("Claims".equals(localName) && STSConstants.WST_NS_05_12.equals(namespace)) {
ClaimCollection requestedClaims = parseClaims(child, claimsParsers);
tokenRequirements.setSecondaryClaims(requestedClaims);
} else {
LOG.fine("Found unknown element: " + localName + " " + namespace);
}
child = DOMUtils.getNextElement(child);
}
}
/**
* Create a ClaimCollection from a DOM Element
*/
private ClaimCollection parseClaims(Element claimsElement, List<ClaimsParser> claimsParsers) {
String dialectAttr = null;
ClaimCollection requestedClaims = new ClaimCollection();
try {
dialectAttr = claimsElement.getAttributeNS(null, "Dialect");
if (dialectAttr != null && !"".equals(dialectAttr)) {
requestedClaims.setDialect(new URI(dialectAttr));
}
} catch (URISyntaxException e1) {
LOG.log(
Level.WARNING,
"Cannot create URI from the given Dialect attribute value " + dialectAttr,
e1
);
}
Element childClaimType = DOMUtils.getFirstElement(claimsElement);
while (childClaimType != null) {
Claim requestClaim = parseChildClaimType(childClaimType, dialectAttr, claimsParsers);
if (requestClaim != null) {
requestedClaims.add(requestClaim);
}
childClaimType = DOMUtils.getNextElement(childClaimType);
}
return requestedClaims;
}
/**
* Create a ClaimCollection from a JAXB ClaimsType object
*/
private static ClaimCollection parseClaims(
ClaimsType claimsType, List<ClaimsParser> claimsParsers
) {
String dialectAttr = null;
ClaimCollection requestedClaims = new ClaimCollection();
try {
dialectAttr = claimsType.getDialect();
if (dialectAttr != null && !"".equals(dialectAttr)) {
requestedClaims.setDialect(new URI(dialectAttr));
}
} catch (URISyntaxException e1) {
LOG.log(
Level.WARNING,
"Cannot create URI from the given Dialect attribute value " + dialectAttr,
e1
);
}
for (Object claim : claimsType.getAny()) {
if (claim instanceof Element) {
Claim requestClaim = parseChildClaimType((Element)claim, dialectAttr, claimsParsers);
if (requestClaim != null) {
requestedClaims.add(requestClaim);
}
}
}
return requestedClaims;
}
/**
* Parse a child ClaimType into a Claim object.
*/
private static Claim parseChildClaimType(
Element childClaimType, String dialect, List<ClaimsParser> claimsParsers
) {
if (claimsParsers != null) {
for (ClaimsParser parser : claimsParsers) {
if (parser != null && dialect.equals(parser.getSupportedDialect())) {
return parser.parse(childClaimType);
}
}
}
if (IdentityClaimsParser.IDENTITY_CLAIMS_DIALECT.equals(dialect)) {
return IdentityClaimsParser.parseClaimType(childClaimType);
}
LOG.log(Level.WARNING, "No ClaimsParser is registered for dialect " + dialect);
throw new STSException(
"No ClaimsParser is registered for dialect " + dialect, STSException.BAD_REQUEST
);
}
/**
* Method to check if the passed token is a SecurityTokenReference
*/
private static boolean isTokenReferenced(Object targetToken) {
if (targetToken instanceof Element) {
Element tokenElement = (Element)targetToken;
String namespace = tokenElement.getNamespaceURI();
String localname = tokenElement.getLocalName();
if (STSConstants.WSSE_EXT_04_01.equals(namespace)
&& "SecurityTokenReference".equals(localname)) {
return true;
}
} else if (targetToken instanceof SecurityTokenReferenceType) {
return true;
}
return false;
}
/**
* Method to fetch token from the SecurityTokenReference
*/
private static Element fetchTokenElementFromReference(
Object targetToken, Map<String, Object> messageContext
) {
// Get the reference URI
String referenceURI = null;
if (targetToken instanceof Element) {
Element tokenElement = (Element) targetToken;
NodeList refList =
tokenElement.getElementsByTagNameNS(STSConstants.WSSE_EXT_04_01, "Reference");
if (refList.getLength() == 0) {
throw new STSException(
"Cannot find Reference element in the SecurityTokenReference.",
STSException.REQUEST_FAILED
);
}
referenceURI = refList.item(0).getNodeValue();
} else if (targetToken instanceof SecurityTokenReferenceType) {
Iterator<?> iterator = ((SecurityTokenReferenceType) targetToken).getAny().iterator();
while (iterator.hasNext()) {
JAXBElement<?> jaxbElement = (JAXBElement<?>) iterator.next();
if (jaxbElement.getValue() instanceof ReferenceType) {
referenceURI = ((ReferenceType) jaxbElement.getValue()).getURI();
}
}
}
if (LOG.isLoggable(Level.FINE)) {
LOG.fine("Reference URI found " + referenceURI);
}
if (referenceURI == null) {
LOG.log(Level.WARNING, "No Reference URI was received");
throw new STSException(
"An unknown element was received", STSException.BAD_REQUEST
);
}
// Find processed token corresponding to the URI
referenceURI = XMLUtils.getIDFromReference(referenceURI);
final List<WSHandlerResult> handlerResults =
CastUtils.cast((List<?>) messageContext.get(WSHandlerConstants.RECV_RESULTS));
if (handlerResults != null && !handlerResults.isEmpty()) {
WSHandlerResult handlerResult = handlerResults.get(0);
List<WSSecurityEngineResult> engineResults = handlerResult.getResults();
for (WSSecurityEngineResult engineResult : engineResults) {
Integer actInt = (Integer)engineResult.get(WSSecurityEngineResult.TAG_ACTION);
String id = (String)engineResult.get(WSSecurityEngineResult.TAG_ID);
if (referenceURI.equals(id)) {
Element tokenElement =
(Element)engineResult.get(WSSecurityEngineResult.TAG_TOKEN_ELEMENT);
if (tokenElement == null) {
throw new STSException(
"Cannot retrieve token from reference", STSException.INVALID_REQUEST
);
}
return tokenElement;
} else if (actInt == WSConstants.SCT) {
// Need to check special case of SecurityContextToken Identifier separately
SecurityContextToken sct =
(SecurityContextToken)
engineResult.get(WSSecurityEngineResult.TAG_SECURITY_CONTEXT_TOKEN);
if (referenceURI.equals(sct.getIdentifier())) {
return sct.getElement();
}
}
}
}
throw new STSException("Cannot retreive token from reference", STSException.REQUEST_FAILED);
}
public boolean isAllowCustomContent() {
return allowCustomContent;
}
public void setAllowCustomContent(boolean allowCustomContent) {
this.allowCustomContent = allowCustomContent;
}
}
|
apache/flink | 36,708 | flink-table/flink-table-planner/src/test/java/org/apache/flink/table/planner/runtime/batch/sql/AnalyzeTableITCase.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.table.planner.runtime.batch.sql;
import org.apache.flink.table.api.TableEnvironment;
import org.apache.flink.table.api.ValidationException;
import org.apache.flink.table.catalog.Catalog;
import org.apache.flink.table.catalog.CatalogPartitionImpl;
import org.apache.flink.table.catalog.CatalogPartitionSpec;
import org.apache.flink.table.catalog.ObjectPath;
import org.apache.flink.table.catalog.exceptions.TableNotExistException;
import org.apache.flink.table.catalog.stats.CatalogColumnStatistics;
import org.apache.flink.table.catalog.stats.CatalogColumnStatisticsDataBase;
import org.apache.flink.table.catalog.stats.CatalogColumnStatisticsDataBoolean;
import org.apache.flink.table.catalog.stats.CatalogColumnStatisticsDataDate;
import org.apache.flink.table.catalog.stats.CatalogColumnStatisticsDataDouble;
import org.apache.flink.table.catalog.stats.CatalogColumnStatisticsDataLong;
import org.apache.flink.table.catalog.stats.CatalogColumnStatisticsDataString;
import org.apache.flink.table.catalog.stats.CatalogTableStatistics;
import org.apache.flink.table.catalog.stats.Date;
import org.apache.flink.table.planner.factories.TestValuesCatalog;
import org.apache.flink.table.planner.factories.TestValuesTableFactory;
import org.apache.flink.table.planner.runtime.utils.BatchTestBase;
import org.apache.flink.table.planner.runtime.utils.TestData;
import org.apache.flink.util.Preconditions;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import java.util.HashMap;
import java.util.Map;
import static org.assertj.core.api.Assertions.assertThat;
import static org.assertj.core.api.Assertions.assertThatThrownBy;
/** Tests for `ANALYZE TABLE`. */
public class AnalyzeTableITCase extends BatchTestBase {
private TableEnvironment tEnv;
@BeforeEach
@Override
public void before() throws Exception {
super.before();
tEnv = tEnv();
Catalog catalog = new TestValuesCatalog("cat", "db", true);
tEnv.registerCatalog("cat", catalog);
tEnv.useCatalog("cat");
tEnv.useDatabase("db");
String dataId1 = TestValuesTableFactory.registerData(TestData.fullDataTypesData());
tEnv.executeSql(
String.format(
"CREATE TABLE NonPartitionTable (\n"
+ " `a` BOOLEAN,\n"
+ " `b` TINYINT,\n"
+ " `c` SMALLINT,\n"
+ " `d` INT,\n"
+ " `e` BIGINT,\n"
+ " `f` FLOAT,\n"
+ " `g` DOUBLE,\n"
+ " `h` DECIMAL(5, 2),\n"
+ " `x` DECIMAL(30, 10),\n"
+ " `i` VARCHAR(5),\n"
+ " `j` CHAR(5),\n"
+ " `k` DATE,\n"
+ " `l` TIME(0),\n"
+ " `m` TIMESTAMP(9),\n"
+ " `n` TIMESTAMP(9) WITH LOCAL TIME ZONE,\n"
+ " `o` ARRAY<BIGINT>,\n"
+ " `p` ROW<f1 BIGINT, f2 STRING, f3 DOUBLE>,\n"
+ " `q` MAP<STRING, INT>\n"
+ ") WITH (\n"
+ " 'connector' = 'values',\n"
+ " 'data-id' = '%s',\n"
+ " 'bounded' = 'true'\n"
+ ")",
dataId1));
String dataId2 = TestValuesTableFactory.registerData(TestData.data5());
tEnv.executeSql(
String.format(
"CREATE TABLE PartitionTable (\n"
+ " `a` INT,\n"
+ " `b` BIGINT,\n"
+ " `c` INT,\n"
+ " `d` VARCHAR,\n"
+ " `e` BIGINT\n"
+ ") partitioned by (e, a)\n"
+ " WITH (\n"
+ " 'connector' = 'values',\n"
+ " 'partition-list' = 'e:1,a:1;e:1,a:2;e:1,a:4;e:1,a:5;e:2,a:2;e:2,a:3;e:2,a:4;e:2,a:5;e:3,a:3;e:3,a:5;',\n"
+ " 'data-id' = '%s',\n"
+ " 'disable-lookup' = 'true',\n"
+ " 'bounded' = 'true'\n"
+ ")",
dataId2));
createPartition(catalog, "db", "PartitionTable", "e=1,a=1");
createPartition(catalog, "db", "PartitionTable", "e=1,a=2");
createPartition(catalog, "db", "PartitionTable", "e=1,a=4");
createPartition(catalog, "db", "PartitionTable", "e=1,a=5");
createPartition(catalog, "db", "PartitionTable", "e=2,a=2");
createPartition(catalog, "db", "PartitionTable", "e=2,a=3");
createPartition(catalog, "db", "PartitionTable", "e=2,a=4");
createPartition(catalog, "db", "PartitionTable", "e=2,a=5");
createPartition(catalog, "db", "PartitionTable", "e=3,a=3");
createPartition(catalog, "db", "PartitionTable", "e=3,a=5");
String dataId3 = TestValuesTableFactory.registerData(TestData.smallData5());
tEnv.executeSql(
String.format(
"CREATE TABLE NonPartitionTable2 (\n"
+ " `a` INT,\n"
+ " `b` BIGINT,\n"
+ " `c` INT,\n"
+ " `d` VARCHAR METADATA VIRTUAL,\n"
+ " `e` BIGINT METADATA,"
+ " `f` as a + 1\n"
+ ") WITH (\n"
+ " 'connector' = 'values',\n"
+ " 'data-id' = '%s',\n"
+ " 'disable-lookup' = 'true',\n"
+ " 'readable-metadata'='d:varchar,e:bigint',\n"
+ " 'bounded' = 'true'\n"
+ ")",
dataId3));
String dataId4 = TestValuesTableFactory.registerData(TestData.smallData5());
tEnv.executeSql(
String.format(
"CREATE TABLE PartitionTable2 (\n"
+ " `a` INT,\n"
+ " `b` BIGINT,\n"
+ " `c` INT,\n"
+ " `d` VARCHAR METADATA VIRTUAL,\n"
+ " `e` BIGINT METADATA,"
+ " `f` as a + 1\n"
+ ") partitioned by (a)\n"
+ " WITH (\n"
+ " 'connector' = 'values',\n"
+ " 'partition-list' = 'a:1;a:2;',\n"
+ " 'data-id' = '%s',\n"
+ " 'disable-lookup' = 'true',\n"
+ " 'readable-metadata'='d:varchar,e:bigint',\n"
+ " 'bounded' = 'true'\n"
+ ")",
dataId4));
createPartition(catalog, "db", "PartitionTable2", "a=1");
createPartition(catalog, "db", "PartitionTable2", "a=2");
}
private void createPartition(Catalog catalog, String db, String table, String partitionSpecs)
throws Exception {
catalog.createPartition(
new ObjectPath(db, table),
createCatalogPartitionSpec(partitionSpecs),
new CatalogPartitionImpl(new HashMap<>(), ""),
false);
}
private CatalogPartitionSpec createCatalogPartitionSpec(String partitionSpecs) {
Map<String, String> partitionSpec = new HashMap<>();
for (String partition : partitionSpecs.split(",")) {
String[] items = partition.split("=");
Preconditions.checkArgument(
items.length == 2, "Partition key value should be joined with '='");
partitionSpec.put(items[0], items[1]);
}
return new CatalogPartitionSpec(partitionSpec);
}
@Test
public void testNonPartitionTableWithoutTableNotExisted() {
assertThatThrownBy(
() -> tEnv.executeSql("analyze table not_exist_table compute statistics"))
.isInstanceOf(ValidationException.class)
.hasMessageContaining("Table `cat`.`db`.`not_exist_table` doesn't exist");
}
@Test
public void testNonPartitionTableWithoutColumns() throws Exception {
tEnv.executeSql("analyze table NonPartitionTable compute statistics");
ObjectPath path = new ObjectPath(tEnv.getCurrentDatabase(), "NonPartitionTable");
assertThat(tEnv.getCatalog(tEnv.getCurrentCatalog()).get().getTableStatistics(path))
.isEqualTo(new CatalogTableStatistics(5L, -1, -1L, -1L));
assertThat(tEnv.getCatalog(tEnv.getCurrentCatalog()).get().getTableColumnStatistics(path))
.isEqualTo(new CatalogColumnStatistics(new HashMap<>()));
}
@Test
public void testNonPartitionTableWithColumnsNotExisted() {
assertThatThrownBy(
() ->
tEnv.executeSql(
"analyze table NonPartitionTable compute statistics for columns not_existed_column"))
.isInstanceOf(ValidationException.class)
.hasMessageContaining(
"Column: not_existed_column does not exist in the table: `cat`.`db`.`NonPartitionTable`");
}
@Test
public void testNonPartitionTableWithComputeColumn() {
assertThatThrownBy(
() ->
tEnv.executeSql(
"analyze table NonPartitionTable2 compute statistics for columns f"))
.isInstanceOf(ValidationException.class)
.hasMessageContaining(
"Column: f is a computed column, ANALYZE TABLE does not support computed column");
}
@Test
public void testNonPartitionTableWithVirtualMetadataColumn() {
assertThatThrownBy(
() ->
tEnv.executeSql(
"analyze table NonPartitionTable2 compute statistics for columns d"))
.isInstanceOf(ValidationException.class)
.hasMessageContaining(
"Column: d is a metadata column, ANALYZE TABLE does not support metadata column");
}
@Test
public void testNonPartitionTableWithMetadataColumn() {
assertThatThrownBy(
() ->
tEnv.executeSql(
"analyze table NonPartitionTable2 compute statistics for columns e"))
.isInstanceOf(ValidationException.class)
.hasMessageContaining(
"Column: e is a metadata column, ANALYZE TABLE does not support metadata column");
}
@Test
public void testNonPartitionTableWithPartition() {
assertThatThrownBy(
() ->
tEnv.executeSql(
"analyze table NonPartitionTable PARTITION(a) compute statistics"))
.isInstanceOf(ValidationException.class)
.hasMessageContaining(
"Invalid ANALYZE TABLE statement. Table: `cat`.`db`.`NonPartitionTable` is not a partition table, while partition values are given");
}
@Test
public void testNonPartitionTableWithPartialColumns() throws Exception {
tEnv.executeSql("analyze table NonPartitionTable compute statistics for columns f, a, d");
ObjectPath path1 = new ObjectPath(tEnv.getCurrentDatabase(), "NonPartitionTable");
assertThat(tEnv.getCatalog(tEnv.getCurrentCatalog()).get().getTableStatistics(path1))
.isEqualTo(new CatalogTableStatistics(5L, -1, -1L, -1L));
Map<String, CatalogColumnStatisticsDataBase> columnStatisticsData1 = new HashMap<>();
columnStatisticsData1.put("a", new CatalogColumnStatisticsDataBoolean(2L, 2L, 1L));
columnStatisticsData1.put(
"f", new CatalogColumnStatisticsDataDouble(-1.123d, 3.4d, 4L, 1L));
columnStatisticsData1.put(
"d",
new CatalogColumnStatisticsDataLong(
(long) Integer.MIN_VALUE, (long) Integer.MAX_VALUE, 4L, 1L));
assertThat(tEnv.getCatalog(tEnv.getCurrentCatalog()).get().getTableColumnStatistics(path1))
.isEqualTo(new CatalogColumnStatistics(columnStatisticsData1));
tEnv.executeSql("analyze table NonPartitionTable2 compute statistics for columns a, b, c");
ObjectPath path2 = new ObjectPath(tEnv.getCurrentDatabase(), "NonPartitionTable2");
Map<String, CatalogColumnStatisticsDataBase> columnStatisticsData2 = new HashMap<>();
columnStatisticsData2.put("a", new CatalogColumnStatisticsDataLong(1L, 2L, 2L, 0L));
columnStatisticsData2.put("b", new CatalogColumnStatisticsDataLong(1L, 3L, 3L, 0L));
columnStatisticsData2.put("c", new CatalogColumnStatisticsDataLong(0L, 2L, 3L, 0L));
assertThat(tEnv.getCatalog(tEnv.getCurrentCatalog()).get().getTableColumnStatistics(path2))
.isEqualTo(new CatalogColumnStatistics(columnStatisticsData2));
}
@Test
public void testNonPartitionTableWithAllColumns() throws Exception {
tEnv.executeSql("analyze table NonPartitionTable compute statistics for all columns");
ObjectPath path1 = new ObjectPath(tEnv.getCurrentDatabase(), "NonPartitionTable");
assertThat(tEnv.getCatalog(tEnv.getCurrentCatalog()).get().getTableStatistics(path1))
.isEqualTo(new CatalogTableStatistics(5L, -1, -1L, -1L));
Map<String, CatalogColumnStatisticsDataBase> columnStatisticsData1 = new HashMap<>();
// boolean
columnStatisticsData1.put("a", new CatalogColumnStatisticsDataBoolean(2L, 2L, 1L));
// byte
columnStatisticsData1.put(
"b",
new CatalogColumnStatisticsDataLong(
(long) Byte.MIN_VALUE, (long) Byte.MAX_VALUE, 4L, 1L));
// short
columnStatisticsData1.put(
"c",
new CatalogColumnStatisticsDataLong(
(long) Short.MIN_VALUE, (long) Short.MAX_VALUE, 4L, 1L));
// int
columnStatisticsData1.put(
"d",
new CatalogColumnStatisticsDataLong(
(long) Integer.MIN_VALUE, (long) Integer.MAX_VALUE, 4L, 1L));
// long
columnStatisticsData1.put(
"e", new CatalogColumnStatisticsDataLong(Long.MIN_VALUE, Long.MAX_VALUE, 4L, 1L));
// float
columnStatisticsData1.put(
"f", new CatalogColumnStatisticsDataDouble(-1.123d, 3.4d, 4L, 1L));
// double
columnStatisticsData1.put(
"g", new CatalogColumnStatisticsDataDouble(-1.123d, 3.4d, 4L, 1L));
// DECIMAL(5, 2)
columnStatisticsData1.put("h", new CatalogColumnStatisticsDataDouble(5.1d, 8.12d, 4L, 1L));
// DECIMAL(30, 10)
columnStatisticsData1.put(
"x",
new CatalogColumnStatisticsDataDouble(
1234567891012345.1d, 812345678910123451.0123456789d, 4L, 1L));
// varchar
columnStatisticsData1.put("i", new CatalogColumnStatisticsDataString(4L, 2.5d, 4L, 1L));
// char
columnStatisticsData1.put("j", new CatalogColumnStatisticsDataString(4L, 2.5d, 4L, 1L));
// date
columnStatisticsData1.put(
"k", new CatalogColumnStatisticsDataDate(new Date(-365), new Date(18383), 4L, 1L));
// time
columnStatisticsData1.put(
"l", new CatalogColumnStatisticsDataLong(123000000L, 84203000000000L, 4L, 1L));
// timestamp
columnStatisticsData1.put(
"m", new CatalogColumnStatisticsDataLong(-31536000L, 1588375403L, 4L, 1L));
// timestamp with local timezone
columnStatisticsData1.put(
"n", new CatalogColumnStatisticsDataLong(-31535999877L, 1588375403000L, 4L, 1L));
assertThat(tEnv.getCatalog(tEnv.getCurrentCatalog()).get().getTableColumnStatistics(path1))
.isEqualTo(new CatalogColumnStatistics(columnStatisticsData1));
tEnv.executeSql("analyze table NonPartitionTable2 compute statistics for all columns");
ObjectPath path2 = new ObjectPath(tEnv.getCurrentDatabase(), "NonPartitionTable2");
Map<String, CatalogColumnStatisticsDataBase> columnStatisticsData2 = new HashMap<>();
columnStatisticsData2.put("a", new CatalogColumnStatisticsDataLong(1L, 2L, 2L, 0L));
columnStatisticsData2.put("b", new CatalogColumnStatisticsDataLong(1L, 3L, 3L, 0L));
columnStatisticsData2.put("c", new CatalogColumnStatisticsDataLong(0L, 2L, 3L, 0L));
assertThat(tEnv.getCatalog(tEnv.getCurrentCatalog()).get().getTableColumnStatistics(path2))
.isEqualTo(new CatalogColumnStatistics(columnStatisticsData2));
}
@Test
public void testNonPartitionTableAnalyzePartialColumnsWithSomeColumnsHaveColumnStats()
throws TableNotExistException {
// If some columns have table column stats, analyze table for partial columns will merge
// these exist columns stats instead of covering it.
// Adding column stats to partial columns.
tEnv.executeSql("analyze table NonPartitionTable compute statistics for columns f, a, d");
ObjectPath path = new ObjectPath(tEnv.getCurrentDatabase(), "NonPartitionTable");
assertThat(tEnv.getCatalog(tEnv.getCurrentCatalog()).get().getTableStatistics(path))
.isEqualTo(new CatalogTableStatistics(5L, -1, -1L, -1L));
Map<String, CatalogColumnStatisticsDataBase> columnStatisticsData = new HashMap<>();
columnStatisticsData.put("a", new CatalogColumnStatisticsDataBoolean(2L, 2L, 1L));
columnStatisticsData.put("f", new CatalogColumnStatisticsDataDouble(-1.123d, 3.4d, 4L, 1L));
columnStatisticsData.put(
"d",
new CatalogColumnStatisticsDataLong(
(long) Integer.MIN_VALUE, (long) Integer.MAX_VALUE, 4L, 1L));
assertThat(tEnv.getCatalog(tEnv.getCurrentCatalog()).get().getTableColumnStatistics(path))
.isEqualTo(new CatalogColumnStatistics(columnStatisticsData));
// Analyze different column sets.
tEnv.executeSql("analyze table NonPartitionTable compute statistics for columns d, e");
assertThat(tEnv.getCatalog(tEnv.getCurrentCatalog()).get().getTableStatistics(path))
.isEqualTo(new CatalogTableStatistics(5L, -1, -1L, -1L));
columnStatisticsData.put(
"e", new CatalogColumnStatisticsDataLong(Long.MIN_VALUE, Long.MAX_VALUE, 4L, 1L));
assertThat(tEnv.getCatalog(tEnv.getCurrentCatalog()).get().getTableColumnStatistics(path))
.isEqualTo(new CatalogColumnStatistics(columnStatisticsData));
}
@Test
public void testPartitionTableWithoutPartition() {
assertThatThrownBy(() -> tEnv.executeSql("analyze table PartitionTable compute statistics"))
.isInstanceOf(ValidationException.class)
.hasMessageContaining(
"Invalid ANALYZE TABLE statement. For partition table, all partition keys should be specified explicitly. "
+ "The given partition keys: [] are not match the target partition keys: [e,a]");
}
@Test
public void testPartitionTableWithPartitionKeyNotExisted() {
assertThatThrownBy(
() ->
tEnv.executeSql(
"analyze table PartitionTable PARTITION(d) compute statistics"))
.isInstanceOf(ValidationException.class)
.hasMessageContaining(
"Invalid ANALYZE TABLE statement. For partition table, all partition keys should be specified explicitly. "
+ "The given partition keys: [d] are not match the target partition keys: [e,a]");
assertThatThrownBy(
() ->
tEnv.executeSql(
"analyze table PartitionTable PARTITION(e=1) compute statistics"))
.isInstanceOf(ValidationException.class)
.hasMessageContaining(
"Invalid ANALYZE TABLE statement. For partition table, all partition keys should be specified explicitly. "
+ "The given partition keys: [e] are not match the target partition keys: [e,a]");
assertThatThrownBy(
() ->
tEnv.executeSql(
"analyze table PartitionTable PARTITION(e=1,d) compute statistics"))
.isInstanceOf(ValidationException.class)
.hasMessageContaining(
"Invalid ANALYZE TABLE statement. For partition table, all partition keys should be specified explicitly. "
+ "The given partition keys: [e,d] are not match the target partition keys: [e,a]");
}
@Test
public void testPartitionTableWithPartitionValueNotExisted() throws Exception {
tEnv.executeSql("analyze table PartitionTable partition(e=10,a) compute statistics");
ObjectPath path = new ObjectPath(tEnv.getCurrentDatabase(), "PartitionTable");
assertThat(tEnv.getCatalog(tEnv.getCurrentCatalog()).get().getTableStatistics(path))
.isEqualTo(new CatalogTableStatistics(-1L, -1, -1L, -1L));
assertThat(tEnv.getCatalog(tEnv.getCurrentCatalog()).get().getTableColumnStatistics(path))
.isEqualTo(new CatalogColumnStatistics(new HashMap<>()));
}
@Test
public void testPartitionTableWithColumnsNotExisted() {
assertThatThrownBy(
() ->
tEnv.executeSql(
"analyze table PartitionTable partition(e, a) compute statistics for columns not_existed_column"))
.isInstanceOf(ValidationException.class)
.hasMessageContaining(
"Column: not_existed_column does not exist in the table: `cat`.`db`.`PartitionTable`");
}
@Test
public void testPartitionTableWithVirtualMetadataColumn() {
assertThatThrownBy(
() ->
tEnv.executeSql(
"analyze table PartitionTable2 PARTITION(a) compute statistics for columns d"))
.isInstanceOf(ValidationException.class)
.hasMessageContaining(
"Column: d is a metadata column, ANALYZE TABLE does not support metadata column");
}
@Test
public void testPartitionTableWithMetadataColumn() {
assertThatThrownBy(
() ->
tEnv.executeSql(
"analyze table PartitionTable2 PARTITION(a) compute statistics for columns e"))
.isInstanceOf(ValidationException.class)
.hasMessageContaining(
"Column: e is a metadata column, ANALYZE TABLE does not support metadata column");
}
@Test
public void testPartitionTableWithComputeColumn() {
assertThatThrownBy(
() ->
tEnv.executeSql(
"analyze table PartitionTable2 PARTITION(a) compute statistics for columns f"))
.isInstanceOf(ValidationException.class)
.hasMessageContaining(
"Column: f is a computed column, ANALYZE TABLE does not support computed column");
}
@Test
public void testPartitionTableWithPartition() {
assertThatThrownBy(
() ->
tEnv.executeSql(
"analyze table NonPartitionTable PARTITION(a) compute statistics"))
.isInstanceOf(ValidationException.class)
.hasMessageContaining(
"Invalid ANALYZE TABLE statement. Table: `cat`.`db`.`NonPartitionTable` is not a partition table, while partition values are given");
}
@Test
public void testPartitionTableWithoutColumns() throws Exception {
// Strict order is not required
tEnv.executeSql("analyze table PartitionTable partition(a, e) compute statistics");
ObjectPath path = new ObjectPath(tEnv.getCurrentDatabase(), "PartitionTable");
assertThat(tEnv.getCatalog(tEnv.getCurrentCatalog()).get().getTableStatistics(path))
.isEqualTo(new CatalogTableStatistics(-1L, -1, -1L, -1L));
assertPartitionStatistics(path, "e=1,a=1", 1L);
assertPartitionStatistics(path, "e=1,a=2", 1L);
assertPartitionStatistics(path, "e=1,a=4", 2L);
assertPartitionStatistics(path, "e=1,a=5", 1L);
assertPartitionStatistics(path, "e=2,a=2", 1L);
assertPartitionStatistics(path, "e=2,a=3", 2L);
assertPartitionStatistics(path, "e=2,a=4", 2L);
assertPartitionStatistics(path, "e=2,a=5", 2L);
assertPartitionStatistics(path, "e=3,a=3", 1L);
assertPartitionStatistics(path, "e=3,a=5", 2L);
tEnv.executeSql(
"analyze table PartitionTable2 partition(a) compute statistics for all columns");
ObjectPath path2 = new ObjectPath(tEnv.getCurrentDatabase(), "PartitionTable2");
Map<String, CatalogColumnStatisticsDataBase> columnStatisticsData2 = new HashMap<>();
columnStatisticsData2.put("a", new CatalogColumnStatisticsDataLong(1L, 1L, 1L, 0L));
columnStatisticsData2.put("b", new CatalogColumnStatisticsDataLong(1L, 1L, 1L, 0L));
columnStatisticsData2.put("c", new CatalogColumnStatisticsDataLong(0L, 0L, 1L, 0L));
assertPartitionStatistics(
path2, "a=1", 1L, new CatalogColumnStatistics(columnStatisticsData2));
Map<String, CatalogColumnStatisticsDataBase> columnStatisticsData3 = new HashMap<>();
columnStatisticsData3.put("a", new CatalogColumnStatisticsDataLong(2L, 2L, 1L, 0L));
columnStatisticsData3.put("b", new CatalogColumnStatisticsDataLong(2L, 3L, 2L, 0L));
columnStatisticsData3.put("c", new CatalogColumnStatisticsDataLong(1L, 2L, 2L, 0L));
assertPartitionStatistics(
path2, "a=2", 2L, new CatalogColumnStatistics(columnStatisticsData3));
}
@Test
public void testPartitionTableWithFullPartitionPath() throws Exception {
tEnv.executeSql(
"analyze table PartitionTable partition(e=2, a=5) compute statistics for all columns");
ObjectPath path = new ObjectPath(tEnv.getCurrentDatabase(), "PartitionTable");
assertThat(tEnv.getCatalog(tEnv.getCurrentCatalog()).get().getTableStatistics(path))
.isEqualTo(new CatalogTableStatistics(-1L, -1, -1L, -1L));
assertPartitionStatistics(path, "e=1,a=1", -1L);
assertPartitionStatistics(path, "e=1,a=2", -1L);
assertPartitionStatistics(path, "e=1,a=4", -1L);
assertPartitionStatistics(path, "e=1,a=5", -1L);
assertPartitionStatistics(path, "e=2,a=2", -1L);
assertPartitionStatistics(path, "e=2,a=3", -1L);
assertPartitionStatistics(path, "e=2,a=4", -1L);
assertPartitionStatistics(path, "e=3,a=3", -1L);
assertPartitionStatistics(path, "e=3,a=5", -1L);
Map<String, CatalogColumnStatisticsDataBase> columnStatisticsData = new HashMap<>();
columnStatisticsData.put("a", new CatalogColumnStatisticsDataLong(5L, 5L, 1L, 0L));
columnStatisticsData.put("b", new CatalogColumnStatisticsDataLong(14L, 15L, 2L, 0L));
columnStatisticsData.put("c", new CatalogColumnStatisticsDataLong(13L, 14L, 2L, 0L));
columnStatisticsData.put("d", new CatalogColumnStatisticsDataString(3L, 3.0, 2L, 0L));
columnStatisticsData.put("e", new CatalogColumnStatisticsDataLong(2L, 2L, 1L, 0L));
assertPartitionStatistics(
path, "e=2,a=5", 2L, new CatalogColumnStatistics(columnStatisticsData));
}
@Test
public void testPartitionTableWithPartialPartitionPath() throws Exception {
tEnv.executeSql("analyze table PartitionTable partition(e=2, a) compute statistics");
ObjectPath path = new ObjectPath(tEnv.getCurrentDatabase(), "PartitionTable");
assertThat(tEnv.getCatalog(tEnv.getCurrentCatalog()).get().getTableStatistics(path))
.isEqualTo(new CatalogTableStatistics(-1L, -1, -1L, -1L));
assertPartitionStatistics(path, "e=1,a=1", -1L);
assertPartitionStatistics(path, "e=1,a=2", -1L);
assertPartitionStatistics(path, "e=1,a=4", -1L);
assertPartitionStatistics(path, "e=1,a=5", -1L);
assertPartitionStatistics(path, "e=2,a=2", 1L);
assertPartitionStatistics(path, "e=2,a=3", 2L);
assertPartitionStatistics(path, "e=2,a=4", 2L);
assertPartitionStatistics(path, "e=2,a=5", 2L);
assertPartitionStatistics(path, "e=3,a=3", -1L);
assertPartitionStatistics(path, "e=3,a=5", -1L);
}
@Test
public void testPartitionTableAnalyzePartialColumnsWithSomeColumnsHaveColumnStats()
throws Exception {
// If some columns have table column stats, analyze table for partial columns will merge
// these exist columns stats instead of covering it.
// Adding column stats to partial columns.
tEnv.executeSql(
"analyze table PartitionTable partition(e=2, a=5) compute statistics for columns a, b, c");
ObjectPath path = new ObjectPath(tEnv.getCurrentDatabase(), "PartitionTable");
assertThat(tEnv.getCatalog(tEnv.getCurrentCatalog()).get().getTableStatistics(path))
.isEqualTo(new CatalogTableStatistics(-1L, -1, -1L, -1L));
Map<String, CatalogColumnStatisticsDataBase> columnStatisticsData = new HashMap<>();
columnStatisticsData.put("a", new CatalogColumnStatisticsDataLong(5L, 5L, 1L, 0L));
columnStatisticsData.put("b", new CatalogColumnStatisticsDataLong(14L, 15L, 2L, 0L));
columnStatisticsData.put("c", new CatalogColumnStatisticsDataLong(13L, 14L, 2L, 0L));
assertPartitionStatistics(
path, "e=2,a=5", 2L, new CatalogColumnStatistics(columnStatisticsData));
tEnv.executeSql(
"analyze table PartitionTable partition(e=2, a=5) compute statistics for columns c, d");
assertThat(tEnv.getCatalog(tEnv.getCurrentCatalog()).get().getTableStatistics(path))
.isEqualTo(new CatalogTableStatistics(-1L, -1, -1L, -1L));
columnStatisticsData.put("d", new CatalogColumnStatisticsDataString(3L, 3.0, 2L, 0L));
assertPartitionStatistics(
path, "e=2,a=5", 2L, new CatalogColumnStatistics(columnStatisticsData));
}
@Test
public void testPartitionTableAnalyzePartialPartitionWithSomePartitionHaveColumnStats()
throws Exception {
// For different partitions, their column stats are isolated and should not affect each
// other.
// Adding column stats to one partition.
tEnv.executeSql(
"analyze table PartitionTable partition(e=2, a=5) compute statistics for columns a, b, c");
ObjectPath path = new ObjectPath(tEnv.getCurrentDatabase(), "PartitionTable");
assertThat(tEnv.getCatalog(tEnv.getCurrentCatalog()).get().getTableStatistics(path))
.isEqualTo(new CatalogTableStatistics(-1L, -1, -1L, -1L));
Map<String, CatalogColumnStatisticsDataBase> columnStatisticsData1 = new HashMap<>();
columnStatisticsData1.put("a", new CatalogColumnStatisticsDataLong(5L, 5L, 1L, 0L));
columnStatisticsData1.put("b", new CatalogColumnStatisticsDataLong(14L, 15L, 2L, 0L));
columnStatisticsData1.put("c", new CatalogColumnStatisticsDataLong(13L, 14L, 2L, 0L));
assertPartitionStatistics(
path, "e=2,a=5", 2L, new CatalogColumnStatistics(columnStatisticsData1));
// Adding column stats to another partition.
tEnv.executeSql(
"analyze table PartitionTable partition(e=2, a=4) compute statistics for columns a, d");
assertThat(tEnv.getCatalog(tEnv.getCurrentCatalog()).get().getTableStatistics(path))
.isEqualTo(new CatalogTableStatistics(-1L, -1, -1L, -1L));
// origin analyze partition.
assertPartitionStatistics(
path, "e=2,a=5", 2L, new CatalogColumnStatistics(columnStatisticsData1));
Map<String, CatalogColumnStatisticsDataBase> columnStatisticsData2 = new HashMap<>();
columnStatisticsData2.put("a", new CatalogColumnStatisticsDataLong(4L, 4L, 1L, 0L));
columnStatisticsData2.put("d", new CatalogColumnStatisticsDataString(3L, 3.0, 2L, 0L));
// new analyze partition.
assertPartitionStatistics(
path, "e=2,a=4", 2L, new CatalogColumnStatistics(columnStatisticsData2));
}
private void assertPartitionStatistics(ObjectPath path, String partitionSpec, long rowCount)
throws Exception {
CatalogPartitionSpec spec = createCatalogPartitionSpec(partitionSpec);
assertThat(
tEnv.getCatalog(tEnv.getCurrentCatalog())
.get()
.getPartitionStatistics(path, spec))
.isEqualTo(new CatalogTableStatistics(rowCount, -1, -1L, -1L));
assertThat(tEnv.getCatalog(tEnv.getCurrentCatalog()).get().getTableColumnStatistics(path))
.isEqualTo(new CatalogColumnStatistics(new HashMap<>()));
assertThat(
tEnv.getCatalog(tEnv.getCurrentCatalog())
.get()
.getPartitionColumnStatistics(path, spec))
.isEqualTo(new CatalogColumnStatistics(new HashMap<>()));
}
private void assertPartitionStatistics(
ObjectPath path,
String partitionSpec,
long rowCount,
CatalogColumnStatistics columnStats)
throws Exception {
CatalogPartitionSpec spec = createCatalogPartitionSpec(partitionSpec);
assertThat(
tEnv.getCatalog(tEnv.getCurrentCatalog())
.get()
.getPartitionStatistics(path, spec))
.isEqualTo(new CatalogTableStatistics(rowCount, -1, -1L, -1L));
assertThat(tEnv.getCatalog(tEnv.getCurrentCatalog()).get().getTableColumnStatistics(path))
.isEqualTo(new CatalogColumnStatistics(new HashMap<>()));
assertThat(
tEnv.getCatalog(tEnv.getCurrentCatalog())
.get()
.getPartitionColumnStatistics(path, spec))
.isEqualTo(columnStats);
}
}
|
apache/inlong | 37,006 | inlong-tubemq/tubemq-server/src/main/java/org/apache/inlong/tubemq/server/broker/msgstore/MessageStoreManager.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.inlong.tubemq.server.broker.msgstore;
import org.apache.inlong.tubemq.corebase.TBaseConstants;
import org.apache.inlong.tubemq.corebase.TErrCodeConstants;
import org.apache.inlong.tubemq.corebase.utils.TStringUtils;
import org.apache.inlong.tubemq.corebase.utils.ThreadUtils;
import org.apache.inlong.tubemq.server.broker.BrokerConfig;
import org.apache.inlong.tubemq.server.broker.TubeBroker;
import org.apache.inlong.tubemq.server.broker.exception.StartupException;
import org.apache.inlong.tubemq.server.broker.metadata.MetadataManager;
import org.apache.inlong.tubemq.server.broker.metadata.TopicMetadata;
import org.apache.inlong.tubemq.server.broker.msgstore.disk.GetMessageResult;
import org.apache.inlong.tubemq.server.broker.nodeinfo.ConsumerNodeInfo;
import org.apache.inlong.tubemq.server.broker.offset.topicpub.TopicPubInfo;
import org.apache.inlong.tubemq.server.broker.utils.DataStoreUtils;
import org.apache.inlong.tubemq.server.broker.utils.TopicPubStoreInfo;
import org.apache.inlong.tubemq.server.common.TStatusConstants;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.beans.PropertyChangeEvent;
import java.beans.PropertyChangeListener;
import java.io.File;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.Callable;
import java.util.concurrent.CompletionService;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ExecutorCompletionService;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.ScheduledExecutorService;
import java.util.concurrent.ThreadFactory;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.atomic.AtomicInteger;
/**
* Message storage management. It contains all topics on broker. In charge of store, expire, and flush operation,
*/
public class MessageStoreManager implements StoreService {
private static final Logger logger = LoggerFactory.getLogger(MessageStoreManager.class);
private final BrokerConfig tubeConfig;
private final TubeBroker tubeBroker;
// metadata manager, get metadata from master.
private final MetadataManager metadataManager;
// storeId to store on each topic.
private final ConcurrentHashMap<String/* topic */, ConcurrentHashMap<Integer/* storeId */, MessageStore>> dataStores =
new ConcurrentHashMap<>();
// store service status
private final AtomicBoolean stopped = new AtomicBoolean(false);
// data expire operation scheduler.
private final ScheduledExecutorService logClearScheduler;
// flush operation scheduler.
private final ScheduledExecutorService unFlushDiskScheduler;
// message on memory sink to disk operation scheduler.
private final ScheduledExecutorService unFlushMemScheduler;
// max transfer size.
private final int maxMsgTransferSize;
// the status that is deleting topic.
private final AtomicBoolean isRemovingTopic = new AtomicBoolean(false);
/**
* Initial the message-store manager.
*
* @param tubeBroker the broker instance
* @param tubeConfig the initial configure
* @throws IOException the exception during processing
*/
public MessageStoreManager(final TubeBroker tubeBroker,
final BrokerConfig tubeConfig) throws IOException {
super();
this.tubeConfig = tubeConfig;
this.tubeBroker = tubeBroker;
this.metadataManager = this.tubeBroker.getMetadataManager();
this.isRemovingTopic.set(false);
this.maxMsgTransferSize =
Math.min(tubeConfig.getTransferSize(), DataStoreUtils.MAX_MSG_TRANSFER_SIZE);
this.metadataManager.addPropertyChangeListener("topicConfigMap", new PropertyChangeListener() {
@Override
public void propertyChange(final PropertyChangeEvent evt) {
Map<String, TopicMetadata> oldTopicConfigMap = (Map<String, TopicMetadata>) evt.getOldValue();
Map<String, TopicMetadata> newTopicConfigMap = (Map<String, TopicMetadata>) evt.getNewValue();
MessageStoreManager.this.refreshMessageStoresHoldVals(oldTopicConfigMap, newTopicConfigMap);
}
});
this.logClearScheduler =
Executors.newSingleThreadScheduledExecutor(new ThreadFactory() {
@Override
public Thread newThread(Runnable r) {
return new Thread(r, "Broker Log Clear Thread");
}
});
this.unFlushDiskScheduler =
Executors.newSingleThreadScheduledExecutor(new ThreadFactory() {
@Override
public Thread newThread(Runnable r) {
return new Thread(r, "Broker Log Disk Flush Thread");
}
});
this.unFlushMemScheduler =
Executors.newSingleThreadScheduledExecutor(new ThreadFactory() {
@Override
public Thread newThread(Runnable r) {
return new Thread(r, "Broker Log Mem Flush Thread");
}
});
}
@Override
public void start() {
try {
this.loadMessageStores(this.tubeConfig);
} catch (final IOException e) {
logger.error("[Store Manager] load message stores failed", e);
throw new StartupException("Initialize message store manager failed", e);
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
}
this.logClearScheduler.scheduleWithFixedDelay(new LogClearRunner(),
tubeConfig.getLogClearupDurationMs(),
tubeConfig.getLogClearupDurationMs(),
TimeUnit.MILLISECONDS);
this.unFlushDiskScheduler.scheduleWithFixedDelay(new DiskUnFlushRunner(),
tubeConfig.getLogFlushDiskDurMs(),
tubeConfig.getLogFlushDiskDurMs(),
TimeUnit.MILLISECONDS);
this.unFlushMemScheduler.scheduleWithFixedDelay(new MemUnFlushRunner(),
tubeConfig.getLogFlushMemDurMs(),
tubeConfig.getLogFlushMemDurMs(),
TimeUnit.MILLISECONDS);
}
@Override
public void close() {
if (this.stopped.get()) {
return;
}
if (this.stopped.compareAndSet(false, true)) {
logger.info("[Store Manager] begin close store manager......");
this.logClearScheduler.shutdownNow();
this.unFlushDiskScheduler.shutdownNow();
this.unFlushMemScheduler.shutdownNow();
for (Map.Entry<String, ConcurrentHashMap<Integer, MessageStore>> entry : this.dataStores.entrySet()) {
if (entry.getValue() != null) {
ConcurrentHashMap<Integer, MessageStore> subMap = entry.getValue();
for (Map.Entry<Integer, MessageStore> subEntry : subMap.entrySet()) {
if (subEntry.getValue() != null) {
try {
subEntry.getValue().close();
} catch (final Throwable e) {
logger.error(new StringBuilder(512)
.append("[Store Manager] Try to run close ")
.append(subEntry.getValue().getStoreKey()).append(" failed").toString(), e);
}
}
}
}
}
this.dataStores.clear();
logger.info("[Store Manager] Store Manager stopped!");
}
}
@Override
public Set<String> removeTopicStore() {
if (isRemovingTopic.get()) {
return null;
}
if (!isRemovingTopic.compareAndSet(false, true)) {
return null;
}
try {
Set<String> removedTopics = new HashSet<>();
Map<String, TopicMetadata> removedTopicMap =
this.metadataManager.getRemovedTopicConfigMap();
if (removedTopicMap.isEmpty()) {
return removedTopics;
}
Set<String> targetTopics = new HashSet<>();
for (Map.Entry<String, TopicMetadata> entry : removedTopicMap.entrySet()) {
if (entry.getKey() == null || entry.getValue() == null) {
continue;
}
if (entry.getValue().getStatusId() == TStatusConstants.STATUS_TOPIC_SOFT_REMOVE) {
targetTopics.add(entry.getKey());
}
}
if (targetTopics.isEmpty()) {
return removedTopics;
}
logger.info("[Remove Topic] start remove topics : {}", targetTopics);
for (String tmpTopic : targetTopics) {
ConcurrentHashMap<Integer, MessageStore> topicStores =
dataStores.get(tmpTopic);
if (topicStores != null) {
Set<Integer> storeIds = topicStores.keySet();
for (Integer storeId : storeIds) {
try {
MessageStore tmpStore = topicStores.remove(storeId);
tmpStore.close();
if (topicStores.isEmpty()) {
this.dataStores.remove(tmpTopic);
}
} catch (Throwable ee) {
logger.error(new StringBuilder(512)
.append("[Remove Topic] Close removed store failure, storeKey=")
.append(tmpTopic).append("-").append(storeId).toString(), ee);
}
}
}
TopicMetadata tmpTopicConf = removedTopicMap.get(tmpTopic);
if (tmpTopicConf != null) {
StringBuilder sBuilder = new StringBuilder(512);
for (int storeId = 0; storeId < tmpTopicConf.getNumTopicStores(); storeId++) {
String storeDir = sBuilder.append(tubeConfig.getPrimaryPath())
.append(File.separator).append(tmpTopic).append("-")
.append(storeId).toString();
sBuilder.delete(0, sBuilder.length());
logger.info("[Remove Topic] remove topic files : {}", storeDir);
try {
delTopicFiles(storeDir);
} catch (Throwable e) {
logger.error("[Remove Topic] remove topic files error : ", e);
}
ThreadUtils.sleep(50);
}
tmpTopicConf.setStatusId(TStatusConstants.STATUS_TOPIC_HARD_REMOVE);
removedTopics.add(tmpTopic);
}
ThreadUtils.sleep(100);
}
logger.info("[Remove Topic] finished remove topics : {}", removedTopics);
return removedTopics;
} finally {
this.isRemovingTopic.set(false);
}
}
/**
* Get message store by topic.
*
* @param topic query topic name
* @return the queried topic's store list
*/
@Override
public Collection<MessageStore> getMessageStoresByTopic(final String topic) {
final ConcurrentHashMap<Integer, MessageStore> map = this.dataStores.get(topic);
if (map == null) {
return Collections.emptyList();
}
return map.values();
}
/**
* Get or create message store.
*
* @param topic the topic name
* @param partition the partition id
* @return the message-store instance
* @throws IOException the exception during processing
*/
@Override
public MessageStore getOrCreateMessageStore(final String topic,
final int partition) throws Throwable {
StringBuilder sBuilder = new StringBuilder(512);
final int storeId = partition < TBaseConstants.META_STORE_INS_BASE
? 0
: partition / TBaseConstants.META_STORE_INS_BASE;
int realPartition = partition < TBaseConstants.META_STORE_INS_BASE
? partition
: partition % TBaseConstants.META_STORE_INS_BASE;
final String dataStoreToken = sBuilder.append("tube_store_manager_").append(topic).toString();
sBuilder.delete(0, sBuilder.length());
if (realPartition < 0 || realPartition >= this.metadataManager.getNumPartitions(topic)) {
throw new IllegalArgumentException(sBuilder.append("Wrong partition value ")
.append(partition).append(",valid partitions in (0,")
.append(this.metadataManager.getNumPartitions(topic) - 1)
.append(")").toString());
}
ConcurrentHashMap<Integer, MessageStore> dataMap = dataStores.get(topic);
if (dataMap == null) {
ConcurrentHashMap<Integer, MessageStore> tmpTopicMap =
new ConcurrentHashMap<>();
dataMap = this.dataStores.putIfAbsent(topic, tmpTopicMap);
if (dataMap == null) {
dataMap = tmpTopicMap;
}
}
MessageStore messageStore = dataMap.get(storeId);
if (messageStore == null) {
synchronized (dataStoreToken.intern()) {
messageStore = dataMap.get(storeId);
if (messageStore == null) {
TopicMetadata topicMetadata =
metadataManager.getTopicMetadata(topic);
MessageStore tmpMessageStore =
new MessageStore(this, topicMetadata, storeId,
tubeConfig, 0, maxMsgTransferSize);
messageStore = dataMap.putIfAbsent(storeId, tmpMessageStore);
if (messageStore == null) {
messageStore = tmpMessageStore;
logger.info(sBuilder
.append("[Store Manager] Created a new message storage, storeKey=")
.append(topic).append("-").append(storeId).toString());
} else {
tmpMessageStore.close();
}
}
}
}
return messageStore;
}
public TubeBroker getTubeBroker() {
return this.tubeBroker;
}
/**
* Get message from store.
*
* @param msgStore the message-store
* @param topic the topic name
* @param partitionId the partition id
* @param msgCount the message count to read
* @param filterCondSet the filter condition set
* @return the query result
* @throws IOException the exception during processing
*/
public GetMessageResult getMessages(final MessageStore msgStore,
final String topic,
final int partitionId,
final int msgCount,
final Set<String> filterCondSet) throws IOException {
long requestOffset = 0L;
try {
final long maxOffset = msgStore.getIndexMaxOffset();
ConsumerNodeInfo consumerNodeInfo =
new ConsumerNodeInfo(tubeBroker.getStoreManager(), "visit",
"visit", filterCondSet, "", System.currentTimeMillis(), "", "");
int maxIndexReadSize = (msgCount + 1)
* DataStoreUtils.STORE_INDEX_HEAD_LEN * msgStore.getPartitionNum();
if (filterCondSet != null && !filterCondSet.isEmpty()) {
maxIndexReadSize *= 5;
}
requestOffset = maxOffset - maxIndexReadSize < 0 ? 0L : maxOffset - maxIndexReadSize;
return msgStore.getMessages(303, requestOffset, partitionId,
consumerNodeInfo, topic, this.maxMsgTransferSize, 0);
} catch (Throwable e1) {
return new GetMessageResult(false, TErrCodeConstants.INTERNAL_SERVER_ERROR,
requestOffset, 0, "Get message failure, errMsg=" + e1.getMessage());
}
}
public MetadataManager getMetadataManager() {
return tubeBroker.getMetadataManager();
}
public int getMaxMsgTransferSize() {
return maxMsgTransferSize;
}
public Map<String, ConcurrentHashMap<Integer, MessageStore>> getMessageStores() {
return Collections.unmodifiableMap(this.dataStores);
}
/**
* Query topic's publish info.
*
* @param topicSet query's topic set
*
* @return the topic's offset info
*/
@Override
public Map<String, Map<Integer, TopicPubStoreInfo>> getTopicPublishInfos(
Set<String> topicSet) {
MessageStore store;
TopicMetadata topicMetadata;
Set<String> qryTopicSet = new HashSet<>();
Map<String, Map<Integer, TopicPubStoreInfo>> topicPubStoreInfoMap = new HashMap<>();
Map<String, TopicMetadata> confTopicInfo = metadataManager.getTopicConfigMap();
if (topicSet == null || topicSet.isEmpty()) {
qryTopicSet.addAll(confTopicInfo.keySet());
} else {
for (String topic : topicSet) {
if (confTopicInfo.containsKey(topic)) {
qryTopicSet.add(topic);
}
}
}
if (qryTopicSet.isEmpty()) {
return topicPubStoreInfoMap;
}
Map<Integer, MessageStore> storeMap;
Map<Integer, TopicPubStoreInfo> storeInfoMap;
for (String topic : qryTopicSet) {
topicMetadata = confTopicInfo.get(topic);
if (topicMetadata == null) {
continue;
}
storeMap = dataStores.get(topic);
if (storeMap == null) {
continue;
}
storeInfoMap = new HashMap<>();
for (Map.Entry<Integer, MessageStore> entry : storeMap.entrySet()) {
if (entry == null
|| entry.getKey() == null
|| entry.getValue() == null) {
continue;
}
store = entry.getValue();
for (Integer partitionId : topicMetadata.getPartIdsByStoreId(entry.getKey())) {
storeInfoMap.put(partitionId, new TopicPubStoreInfo(topic,
entry.getKey(), partitionId, store.getIndexMinOffset(),
store.getIndexMaxOffset(), store.getDataMinOffset(),
store.getDataMaxOffset()));
}
}
topicPubStoreInfoMap.put(topic, storeInfoMap);
}
return topicPubStoreInfoMap;
}
/**
* Query all topic's publish info.
*
* @return the current topic's offset info
*
*/
@Override
public Map<String, TopicPubInfo> getTopicPublishInfos() {
TopicPubInfo topicPubInfo;
Map<String, TopicPubInfo> result = new HashMap<>();
for (Map.Entry<String, ConcurrentHashMap<Integer, MessageStore>> entry : dataStores.entrySet()) {
if (entry == null
|| entry.getKey() == null
|| entry.getValue() == null
|| entry.getValue().isEmpty()) {
continue;
}
for (Map.Entry<Integer, MessageStore> entry1 : entry.getValue().entrySet()) {
if (entry1 == null
|| entry1.getKey() == null
|| entry1.getValue() == null) {
continue;
}
topicPubInfo = result.get(entry.getKey());
if (topicPubInfo == null) {
topicPubInfo = new TopicPubInfo(entry.getKey());
result.put(entry.getKey(), topicPubInfo);
}
topicPubInfo.addStorePubInfo(entry1.getKey(), entry1.getValue().getPartitionNum(),
entry1.getValue().getIndexMinOffset(), entry1.getValue().getIndexMaxOffset(),
entry1.getValue().getDataMinOffset(), entry1.getValue().getDataMaxOffset());
}
}
return result;
}
private Set<File> getLogDirSet(final BrokerConfig tubeConfig) throws IOException {
TopicMetadata topicMetadata = null;
final Set<String> paths = new HashSet<>();
paths.add(tubeConfig.getPrimaryPath());
for (final String topic : metadataManager.getTopics()) {
topicMetadata = metadataManager.getTopicMetadata(topic);
if (topicMetadata != null
&& TStringUtils.isNotBlank(topicMetadata.getDataPath())) {
paths.add(topicMetadata.getDataPath());
}
}
final Set<File> fileSet = new HashSet<>();
for (final String path : paths) {
final File dir = new File(path);
if (!dir.exists() && !dir.mkdirs()) {
throw new IOException(new StringBuilder(512)
.append("Could not make Log directory ")
.append(dir.getAbsolutePath()).toString());
}
if (!dir.isDirectory() || !dir.canRead()) {
throw new IOException(new StringBuilder(512).append("Log path ")
.append(dir.getAbsolutePath())
.append(" is not a readable directory").toString());
}
fileSet.add(dir);
}
return fileSet;
}
/**
* Load stores sequential.
*
* @param tubeConfig the broker's configure
* @throws IOException the exception during processing
* @throws InterruptedException the exception during processing
*/
private void loadMessageStores(final BrokerConfig tubeConfig)
throws IOException, InterruptedException {
StringBuilder sBuilder = new StringBuilder(512);
logger.info(sBuilder.append("[Store Manager] Begin to load message stores from path ")
.append(tubeConfig.getPrimaryPath()).toString());
sBuilder.delete(0, sBuilder.length());
final long start = System.currentTimeMillis();
final AtomicInteger errCnt = new AtomicInteger(0);
final AtomicInteger finishCnt = new AtomicInteger(0);
List<Callable<MessageStore>> tasks = new ArrayList<>();
for (final File dir : this.getLogDirSet(tubeConfig)) {
if (dir == null) {
continue;
}
final File[] ls = dir.listFiles();
if (ls == null) {
continue;
}
for (final File subDir : ls) {
if (subDir == null) {
continue;
}
if (!subDir.isDirectory()) {
continue;
}
final String name = subDir.getName();
final int index = name.lastIndexOf('-');
if (index < 0) {
logger.warn(sBuilder.append("[Store Manager] Ignore invalid directory:")
.append(subDir.getAbsolutePath()).toString());
sBuilder.delete(0, sBuilder.length());
continue;
}
final String topic = name.substring(0, index);
final TopicMetadata topicMetadata = metadataManager.getTopicMetadata(topic);
if (topicMetadata == null) {
logger.warn(sBuilder
.append("[Store Manager] No valid topic config for topic data directories:")
.append(topic).toString());
sBuilder.delete(0, sBuilder.length());
continue;
}
final int storeId = Integer.parseInt(name.substring(index + 1));
final MessageStoreManager messageStoreManager = this;
tasks.add(new Callable<MessageStore>() {
@Override
public MessageStore call() throws Exception {
MessageStore msgStore = null;
try {
msgStore = new MessageStore(messageStoreManager,
topicMetadata, storeId, tubeConfig, maxMsgTransferSize);
ConcurrentHashMap<Integer, MessageStore> map =
dataStores.get(msgStore.getTopic());
if (map == null) {
map = new ConcurrentHashMap<>();
ConcurrentHashMap<Integer, MessageStore> oldmap =
dataStores.putIfAbsent(msgStore.getTopic(), map);
if (oldmap != null) {
map = oldmap;
}
}
MessageStore oldMsgStore = map.putIfAbsent(msgStore.getStoreId(), msgStore);
if (oldMsgStore != null) {
try {
msgStore.close();
logger.info(new StringBuilder(512)
.append("[Store Manager] Close duplicated messageStore ")
.append(msgStore.getStoreKey()).toString());
} catch (Throwable e2) {
//
logger.info("[Store Manager] Close duplicated messageStore failure", e2);
}
}
} catch (Throwable e2) {
errCnt.incrementAndGet();
logger.error(new StringBuilder(512).append("[Store Manager] Loaded ")
.append(subDir.getAbsolutePath())
.append("message store failure:").toString(), e2);
} finally {
finishCnt.incrementAndGet();
}
return null;
}
});
}
}
this.loadStoresInParallel(tasks);
tasks.clear();
if (errCnt.get() > 0) {
throw new RuntimeException(
"[Store Manager] failure to load message stores, please check load logger and fix first!");
}
logger.info(sBuilder.append("[Store Manager] End to load message stores in ")
.append((System.currentTimeMillis() - start) / 1000).append(" secs").toString());
}
/**
* Load stores in parallel.
*
* @param tasks the load tasks
* @throws InterruptedException the exception during processing
*/
private void loadStoresInParallel(List<Callable<MessageStore>> tasks) throws InterruptedException {
ExecutorService executor =
Executors.newFixedThreadPool(Runtime.getRuntime().availableProcessors() + 1);
CompletionService<MessageStore> completionService =
new ExecutorCompletionService<>(executor);
for (Callable<MessageStore> task : tasks) {
completionService.submit(task);
}
for (int i = 0; i < tasks.size(); i++) {
try {
completionService.take().get();
} catch (Throwable e) {
//
}
}
executor.shutdown();
}
private void delTopicFiles(String filepath) throws IOException {
File targetFile = new File(filepath);
if (targetFile.exists()) {
if (targetFile.isFile()) {
targetFile.delete();
} else if (targetFile.isDirectory()) {
File[] files = targetFile.listFiles();
if (files != null) {
for (int i = 0; i < files.length; i++) {
this.delTopicFiles(files[i].getAbsolutePath());
}
}
}
targetFile.delete();
}
}
/**
* Refresh message-store's dynamic configures
*
* @param oldTopicConfigMap the stored topic configure map
* @param newTopicConfigMap the newly topic configure map
*/
public void refreshMessageStoresHoldVals(Map<String, TopicMetadata> oldTopicConfigMap,
Map<String, TopicMetadata> newTopicConfigMap) {
if (((newTopicConfigMap == null) || newTopicConfigMap.isEmpty())
|| ((oldTopicConfigMap == null) || oldTopicConfigMap.isEmpty())) {
return;
}
StringBuilder sBuilder = new StringBuilder(512);
for (TopicMetadata newTopicMetadata : newTopicConfigMap.values()) {
TopicMetadata oldTopicMetadata = oldTopicConfigMap.get(newTopicMetadata.getTopic());
if ((oldTopicMetadata == null) || oldTopicMetadata.isPropertyEquals(newTopicMetadata)) {
continue;
}
ConcurrentHashMap<Integer, MessageStore> messageStores =
MessageStoreManager.this.dataStores.get(newTopicMetadata.getTopic());
if ((messageStores == null) || messageStores.isEmpty()) {
continue;
}
for (Map.Entry<Integer, MessageStore> entry : messageStores.entrySet()) {
if (entry.getValue() != null) {
try {
entry.getValue().refreshUnflushThreshold(newTopicMetadata);
} catch (Throwable ee) {
logger.error(sBuilder.append("[Store Manager] refresh ")
.append(entry.getValue().getStoreKey())
.append("'s parameter error,").toString(), ee);
sBuilder.delete(0, sBuilder.length());
}
}
}
}
}
private class LogClearRunner implements Runnable {
public LogClearRunner() {
//
}
@Override
public void run() {
StringBuilder sBuilder = new StringBuilder(256);
long startTime = System.currentTimeMillis();
Set<String> expiredTopic = getExpiredTopicSet(sBuilder);
if (!expiredTopic.isEmpty()) {
logger.info(sBuilder.append("Found ").append(expiredTopic.size())
.append(" files expired, start delete files!").toString());
sBuilder.delete(0, sBuilder.length());
for (String topicName : expiredTopic) {
if (topicName == null) {
continue;
}
Map<Integer, MessageStore> storeMap = dataStores.get(topicName);
if (storeMap == null || storeMap.isEmpty()) {
continue;
}
for (Map.Entry<Integer, MessageStore> entry : storeMap.entrySet()) {
if (entry.getValue() == null) {
continue;
}
try {
entry.getValue().runClearupPolicy(false);
} catch (final Throwable e) {
logger.error(sBuilder.append("Try to run delete policy with ")
.append(entry.getValue().getStoreKey())
.append("'s log file failed").toString(), e);
sBuilder.delete(0, sBuilder.length());
}
}
}
logger.info("Log Clear Scheduler finished file delete!");
}
long dltTime = System.currentTimeMillis() - startTime;
if (dltTime >= tubeConfig.getLogClearupDurationMs()) {
logger.warn(sBuilder.append("Log Clear up task continue over the clearup duration, ")
.append("used ").append(dltTime).append(", configure value is ")
.append(tubeConfig.getLogClearupDurationMs()).toString());
sBuilder.delete(0, sBuilder.length());
}
}
private Set<String> getExpiredTopicSet(final StringBuilder sb) {
Set<String> expiredTopic = new HashSet<>();
for (Map<Integer, MessageStore> storeMap : dataStores.values()) {
if (storeMap == null || storeMap.isEmpty()) {
continue;
}
for (MessageStore msgStore : storeMap.values()) {
if (msgStore == null) {
continue;
}
try {
if (msgStore.runClearupPolicy(true)) {
expiredTopic.add(msgStore.getTopic());
}
} catch (final Throwable e) {
logger.error(sb.append("Try to run delete policy with ")
.append(msgStore.getStoreKey())
.append("'s log file failed").toString(), e);
sb.delete(0, sb.length());
}
}
}
return expiredTopic;
}
}
private class DiskUnFlushRunner implements Runnable {
public DiskUnFlushRunner() {
//
}
@Override
public void run() {
StringBuilder sBuilder = new StringBuilder(256);
for (Map<Integer, MessageStore> storeMap : dataStores.values()) {
if (storeMap == null || storeMap.isEmpty()) {
continue;
}
for (MessageStore msgStore : storeMap.values()) {
if (msgStore == null) {
continue;
}
try {
msgStore.flushFile();
} catch (final Throwable e) {
logger.error(sBuilder.append("[Store Manager] Try to flush ")
.append(msgStore.getStoreKey())
.append("'s file-store failed : ").toString(), e);
sBuilder.delete(0, sBuilder.length());
}
}
}
}
}
private class MemUnFlushRunner implements Runnable {
public MemUnFlushRunner() {
//
}
@Override
public void run() {
StringBuilder sBuilder = new StringBuilder(256);
for (Map<Integer, MessageStore> storeMap : dataStores.values()) {
if (storeMap == null || storeMap.isEmpty()) {
continue;
}
for (MessageStore msgStore : storeMap.values()) {
if (msgStore == null) {
continue;
}
try {
msgStore.flushMemCacheData();
} catch (final Throwable e) {
logger.error(sBuilder.append("[Store Manager] Try to flush ")
.append(msgStore.getStoreKey())
.append("'s mem-store failed : ").toString(), e);
sBuilder.delete(0, sBuilder.length());
}
}
}
}
}
}
|
googleapis/google-cloud-java | 36,665 | java-shopping-merchant-quota/proto-google-shopping-merchant-quota-v1beta/src/main/java/com/google/shopping/merchant/quota/v1beta/MethodDetails.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/shopping/merchant/quota/v1beta/quota.proto
// Protobuf Java Version: 3.25.8
package com.google.shopping.merchant.quota.v1beta;
/**
*
*
* <pre>
* The method details per method in the Merchant API.
* </pre>
*
* Protobuf type {@code google.shopping.merchant.quota.v1beta.MethodDetails}
*/
public final class MethodDetails extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.shopping.merchant.quota.v1beta.MethodDetails)
MethodDetailsOrBuilder {
private static final long serialVersionUID = 0L;
// Use MethodDetails.newBuilder() to construct.
private MethodDetails(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private MethodDetails() {
method_ = "";
version_ = "";
subapi_ = "";
path_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new MethodDetails();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.shopping.merchant.quota.v1beta.QuotaProto
.internal_static_google_shopping_merchant_quota_v1beta_MethodDetails_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.shopping.merchant.quota.v1beta.QuotaProto
.internal_static_google_shopping_merchant_quota_v1beta_MethodDetails_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.shopping.merchant.quota.v1beta.MethodDetails.class,
com.google.shopping.merchant.quota.v1beta.MethodDetails.Builder.class);
}
public static final int METHOD_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private volatile java.lang.Object method_ = "";
/**
*
*
* <pre>
* Output only. The name of the method for example `products.list`.
* </pre>
*
* <code>string method = 1 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
*
* @return The method.
*/
@java.lang.Override
public java.lang.String getMethod() {
java.lang.Object ref = method_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
method_ = s;
return s;
}
}
/**
*
*
* <pre>
* Output only. The name of the method for example `products.list`.
* </pre>
*
* <code>string method = 1 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
*
* @return The bytes for method.
*/
@java.lang.Override
public com.google.protobuf.ByteString getMethodBytes() {
java.lang.Object ref = method_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
method_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int VERSION_FIELD_NUMBER = 2;
@SuppressWarnings("serial")
private volatile java.lang.Object version_ = "";
/**
*
*
* <pre>
* Output only. The API version that the method belongs to.
* </pre>
*
* <code>string version = 2 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
*
* @return The version.
*/
@java.lang.Override
public java.lang.String getVersion() {
java.lang.Object ref = version_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
version_ = s;
return s;
}
}
/**
*
*
* <pre>
* Output only. The API version that the method belongs to.
* </pre>
*
* <code>string version = 2 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
*
* @return The bytes for version.
*/
@java.lang.Override
public com.google.protobuf.ByteString getVersionBytes() {
java.lang.Object ref = version_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
version_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int SUBAPI_FIELD_NUMBER = 3;
@SuppressWarnings("serial")
private volatile java.lang.Object subapi_ = "";
/**
*
*
* <pre>
* Output only. The sub-API that the method belongs to.
* </pre>
*
* <code>string subapi = 3 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
*
* @return The subapi.
*/
@java.lang.Override
public java.lang.String getSubapi() {
java.lang.Object ref = subapi_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
subapi_ = s;
return s;
}
}
/**
*
*
* <pre>
* Output only. The sub-API that the method belongs to.
* </pre>
*
* <code>string subapi = 3 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
*
* @return The bytes for subapi.
*/
@java.lang.Override
public com.google.protobuf.ByteString getSubapiBytes() {
java.lang.Object ref = subapi_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
subapi_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int PATH_FIELD_NUMBER = 4;
@SuppressWarnings("serial")
private volatile java.lang.Object path_ = "";
/**
*
*
* <pre>
* Output only. The path for the method such as
* `products/v1/productInputs.insert`
* </pre>
*
* <code>string path = 4 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
*
* @return The path.
*/
@java.lang.Override
public java.lang.String getPath() {
java.lang.Object ref = path_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
path_ = s;
return s;
}
}
/**
*
*
* <pre>
* Output only. The path for the method such as
* `products/v1/productInputs.insert`
* </pre>
*
* <code>string path = 4 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
*
* @return The bytes for path.
*/
@java.lang.Override
public com.google.protobuf.ByteString getPathBytes() {
java.lang.Object ref = path_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
path_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(method_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 1, method_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(version_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 2, version_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(subapi_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 3, subapi_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(path_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 4, path_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(method_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, method_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(version_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, version_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(subapi_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(3, subapi_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(path_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(4, path_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.shopping.merchant.quota.v1beta.MethodDetails)) {
return super.equals(obj);
}
com.google.shopping.merchant.quota.v1beta.MethodDetails other =
(com.google.shopping.merchant.quota.v1beta.MethodDetails) obj;
if (!getMethod().equals(other.getMethod())) return false;
if (!getVersion().equals(other.getVersion())) return false;
if (!getSubapi().equals(other.getSubapi())) return false;
if (!getPath().equals(other.getPath())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + METHOD_FIELD_NUMBER;
hash = (53 * hash) + getMethod().hashCode();
hash = (37 * hash) + VERSION_FIELD_NUMBER;
hash = (53 * hash) + getVersion().hashCode();
hash = (37 * hash) + SUBAPI_FIELD_NUMBER;
hash = (53 * hash) + getSubapi().hashCode();
hash = (37 * hash) + PATH_FIELD_NUMBER;
hash = (53 * hash) + getPath().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.shopping.merchant.quota.v1beta.MethodDetails parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.shopping.merchant.quota.v1beta.MethodDetails parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.shopping.merchant.quota.v1beta.MethodDetails parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.shopping.merchant.quota.v1beta.MethodDetails parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.shopping.merchant.quota.v1beta.MethodDetails parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.shopping.merchant.quota.v1beta.MethodDetails parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.shopping.merchant.quota.v1beta.MethodDetails parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.shopping.merchant.quota.v1beta.MethodDetails parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.shopping.merchant.quota.v1beta.MethodDetails parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.shopping.merchant.quota.v1beta.MethodDetails parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.shopping.merchant.quota.v1beta.MethodDetails parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.shopping.merchant.quota.v1beta.MethodDetails parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.shopping.merchant.quota.v1beta.MethodDetails prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* The method details per method in the Merchant API.
* </pre>
*
* Protobuf type {@code google.shopping.merchant.quota.v1beta.MethodDetails}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.shopping.merchant.quota.v1beta.MethodDetails)
com.google.shopping.merchant.quota.v1beta.MethodDetailsOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.shopping.merchant.quota.v1beta.QuotaProto
.internal_static_google_shopping_merchant_quota_v1beta_MethodDetails_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.shopping.merchant.quota.v1beta.QuotaProto
.internal_static_google_shopping_merchant_quota_v1beta_MethodDetails_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.shopping.merchant.quota.v1beta.MethodDetails.class,
com.google.shopping.merchant.quota.v1beta.MethodDetails.Builder.class);
}
// Construct using com.google.shopping.merchant.quota.v1beta.MethodDetails.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
method_ = "";
version_ = "";
subapi_ = "";
path_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.shopping.merchant.quota.v1beta.QuotaProto
.internal_static_google_shopping_merchant_quota_v1beta_MethodDetails_descriptor;
}
@java.lang.Override
public com.google.shopping.merchant.quota.v1beta.MethodDetails getDefaultInstanceForType() {
return com.google.shopping.merchant.quota.v1beta.MethodDetails.getDefaultInstance();
}
@java.lang.Override
public com.google.shopping.merchant.quota.v1beta.MethodDetails build() {
com.google.shopping.merchant.quota.v1beta.MethodDetails result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.shopping.merchant.quota.v1beta.MethodDetails buildPartial() {
com.google.shopping.merchant.quota.v1beta.MethodDetails result =
new com.google.shopping.merchant.quota.v1beta.MethodDetails(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(com.google.shopping.merchant.quota.v1beta.MethodDetails result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.method_ = method_;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.version_ = version_;
}
if (((from_bitField0_ & 0x00000004) != 0)) {
result.subapi_ = subapi_;
}
if (((from_bitField0_ & 0x00000008) != 0)) {
result.path_ = path_;
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.shopping.merchant.quota.v1beta.MethodDetails) {
return mergeFrom((com.google.shopping.merchant.quota.v1beta.MethodDetails) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.shopping.merchant.quota.v1beta.MethodDetails other) {
if (other == com.google.shopping.merchant.quota.v1beta.MethodDetails.getDefaultInstance())
return this;
if (!other.getMethod().isEmpty()) {
method_ = other.method_;
bitField0_ |= 0x00000001;
onChanged();
}
if (!other.getVersion().isEmpty()) {
version_ = other.version_;
bitField0_ |= 0x00000002;
onChanged();
}
if (!other.getSubapi().isEmpty()) {
subapi_ = other.subapi_;
bitField0_ |= 0x00000004;
onChanged();
}
if (!other.getPath().isEmpty()) {
path_ = other.path_;
bitField0_ |= 0x00000008;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
method_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000001;
break;
} // case 10
case 18:
{
version_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000002;
break;
} // case 18
case 26:
{
subapi_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000004;
break;
} // case 26
case 34:
{
path_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000008;
break;
} // case 34
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.lang.Object method_ = "";
/**
*
*
* <pre>
* Output only. The name of the method for example `products.list`.
* </pre>
*
* <code>string method = 1 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
*
* @return The method.
*/
public java.lang.String getMethod() {
java.lang.Object ref = method_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
method_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Output only. The name of the method for example `products.list`.
* </pre>
*
* <code>string method = 1 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
*
* @return The bytes for method.
*/
public com.google.protobuf.ByteString getMethodBytes() {
java.lang.Object ref = method_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
method_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Output only. The name of the method for example `products.list`.
* </pre>
*
* <code>string method = 1 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
*
* @param value The method to set.
* @return This builder for chaining.
*/
public Builder setMethod(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
method_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Output only. The name of the method for example `products.list`.
* </pre>
*
* <code>string method = 1 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
*
* @return This builder for chaining.
*/
public Builder clearMethod() {
method_ = getDefaultInstance().getMethod();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
return this;
}
/**
*
*
* <pre>
* Output only. The name of the method for example `products.list`.
* </pre>
*
* <code>string method = 1 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
*
* @param value The bytes for method to set.
* @return This builder for chaining.
*/
public Builder setMethodBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
method_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
private java.lang.Object version_ = "";
/**
*
*
* <pre>
* Output only. The API version that the method belongs to.
* </pre>
*
* <code>string version = 2 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
*
* @return The version.
*/
public java.lang.String getVersion() {
java.lang.Object ref = version_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
version_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Output only. The API version that the method belongs to.
* </pre>
*
* <code>string version = 2 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
*
* @return The bytes for version.
*/
public com.google.protobuf.ByteString getVersionBytes() {
java.lang.Object ref = version_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
version_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Output only. The API version that the method belongs to.
* </pre>
*
* <code>string version = 2 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
*
* @param value The version to set.
* @return This builder for chaining.
*/
public Builder setVersion(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
version_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Output only. The API version that the method belongs to.
* </pre>
*
* <code>string version = 2 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
*
* @return This builder for chaining.
*/
public Builder clearVersion() {
version_ = getDefaultInstance().getVersion();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
return this;
}
/**
*
*
* <pre>
* Output only. The API version that the method belongs to.
* </pre>
*
* <code>string version = 2 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
*
* @param value The bytes for version to set.
* @return This builder for chaining.
*/
public Builder setVersionBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
version_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
private java.lang.Object subapi_ = "";
/**
*
*
* <pre>
* Output only. The sub-API that the method belongs to.
* </pre>
*
* <code>string subapi = 3 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
*
* @return The subapi.
*/
public java.lang.String getSubapi() {
java.lang.Object ref = subapi_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
subapi_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Output only. The sub-API that the method belongs to.
* </pre>
*
* <code>string subapi = 3 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
*
* @return The bytes for subapi.
*/
public com.google.protobuf.ByteString getSubapiBytes() {
java.lang.Object ref = subapi_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
subapi_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Output only. The sub-API that the method belongs to.
* </pre>
*
* <code>string subapi = 3 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
*
* @param value The subapi to set.
* @return This builder for chaining.
*/
public Builder setSubapi(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
subapi_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
*
*
* <pre>
* Output only. The sub-API that the method belongs to.
* </pre>
*
* <code>string subapi = 3 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
*
* @return This builder for chaining.
*/
public Builder clearSubapi() {
subapi_ = getDefaultInstance().getSubapi();
bitField0_ = (bitField0_ & ~0x00000004);
onChanged();
return this;
}
/**
*
*
* <pre>
* Output only. The sub-API that the method belongs to.
* </pre>
*
* <code>string subapi = 3 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
*
* @param value The bytes for subapi to set.
* @return This builder for chaining.
*/
public Builder setSubapiBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
subapi_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
private java.lang.Object path_ = "";
/**
*
*
* <pre>
* Output only. The path for the method such as
* `products/v1/productInputs.insert`
* </pre>
*
* <code>string path = 4 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
*
* @return The path.
*/
public java.lang.String getPath() {
java.lang.Object ref = path_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
path_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Output only. The path for the method such as
* `products/v1/productInputs.insert`
* </pre>
*
* <code>string path = 4 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
*
* @return The bytes for path.
*/
public com.google.protobuf.ByteString getPathBytes() {
java.lang.Object ref = path_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
path_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Output only. The path for the method such as
* `products/v1/productInputs.insert`
* </pre>
*
* <code>string path = 4 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
*
* @param value The path to set.
* @return This builder for chaining.
*/
public Builder setPath(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
path_ = value;
bitField0_ |= 0x00000008;
onChanged();
return this;
}
/**
*
*
* <pre>
* Output only. The path for the method such as
* `products/v1/productInputs.insert`
* </pre>
*
* <code>string path = 4 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
*
* @return This builder for chaining.
*/
public Builder clearPath() {
path_ = getDefaultInstance().getPath();
bitField0_ = (bitField0_ & ~0x00000008);
onChanged();
return this;
}
/**
*
*
* <pre>
* Output only. The path for the method such as
* `products/v1/productInputs.insert`
* </pre>
*
* <code>string path = 4 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
*
* @param value The bytes for path to set.
* @return This builder for chaining.
*/
public Builder setPathBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
path_ = value;
bitField0_ |= 0x00000008;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.shopping.merchant.quota.v1beta.MethodDetails)
}
// @@protoc_insertion_point(class_scope:google.shopping.merchant.quota.v1beta.MethodDetails)
private static final com.google.shopping.merchant.quota.v1beta.MethodDetails DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.shopping.merchant.quota.v1beta.MethodDetails();
}
public static com.google.shopping.merchant.quota.v1beta.MethodDetails getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<MethodDetails> PARSER =
new com.google.protobuf.AbstractParser<MethodDetails>() {
@java.lang.Override
public MethodDetails parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<MethodDetails> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<MethodDetails> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.shopping.merchant.quota.v1beta.MethodDetails getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleapis/google-cloud-java | 36,757 | java-aiplatform/proto-google-cloud-aiplatform-v1/src/main/java/com/google/cloud/aiplatform/v1/ListSchedulesResponse.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/aiplatform/v1/schedule_service.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.aiplatform.v1;
/**
*
*
* <pre>
* Response message for
* [ScheduleService.ListSchedules][google.cloud.aiplatform.v1.ScheduleService.ListSchedules]
* </pre>
*
* Protobuf type {@code google.cloud.aiplatform.v1.ListSchedulesResponse}
*/
public final class ListSchedulesResponse extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.aiplatform.v1.ListSchedulesResponse)
ListSchedulesResponseOrBuilder {
private static final long serialVersionUID = 0L;
// Use ListSchedulesResponse.newBuilder() to construct.
private ListSchedulesResponse(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private ListSchedulesResponse() {
schedules_ = java.util.Collections.emptyList();
nextPageToken_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new ListSchedulesResponse();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.aiplatform.v1.ScheduleServiceProto
.internal_static_google_cloud_aiplatform_v1_ListSchedulesResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.aiplatform.v1.ScheduleServiceProto
.internal_static_google_cloud_aiplatform_v1_ListSchedulesResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.aiplatform.v1.ListSchedulesResponse.class,
com.google.cloud.aiplatform.v1.ListSchedulesResponse.Builder.class);
}
public static final int SCHEDULES_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private java.util.List<com.google.cloud.aiplatform.v1.Schedule> schedules_;
/**
*
*
* <pre>
* List of Schedules in the requested page.
* </pre>
*
* <code>repeated .google.cloud.aiplatform.v1.Schedule schedules = 1;</code>
*/
@java.lang.Override
public java.util.List<com.google.cloud.aiplatform.v1.Schedule> getSchedulesList() {
return schedules_;
}
/**
*
*
* <pre>
* List of Schedules in the requested page.
* </pre>
*
* <code>repeated .google.cloud.aiplatform.v1.Schedule schedules = 1;</code>
*/
@java.lang.Override
public java.util.List<? extends com.google.cloud.aiplatform.v1.ScheduleOrBuilder>
getSchedulesOrBuilderList() {
return schedules_;
}
/**
*
*
* <pre>
* List of Schedules in the requested page.
* </pre>
*
* <code>repeated .google.cloud.aiplatform.v1.Schedule schedules = 1;</code>
*/
@java.lang.Override
public int getSchedulesCount() {
return schedules_.size();
}
/**
*
*
* <pre>
* List of Schedules in the requested page.
* </pre>
*
* <code>repeated .google.cloud.aiplatform.v1.Schedule schedules = 1;</code>
*/
@java.lang.Override
public com.google.cloud.aiplatform.v1.Schedule getSchedules(int index) {
return schedules_.get(index);
}
/**
*
*
* <pre>
* List of Schedules in the requested page.
* </pre>
*
* <code>repeated .google.cloud.aiplatform.v1.Schedule schedules = 1;</code>
*/
@java.lang.Override
public com.google.cloud.aiplatform.v1.ScheduleOrBuilder getSchedulesOrBuilder(int index) {
return schedules_.get(index);
}
public static final int NEXT_PAGE_TOKEN_FIELD_NUMBER = 2;
@SuppressWarnings("serial")
private volatile java.lang.Object nextPageToken_ = "";
/**
*
*
* <pre>
* A token to retrieve the next page of results.
* Pass to
* [ListSchedulesRequest.page_token][google.cloud.aiplatform.v1.ListSchedulesRequest.page_token]
* to obtain that page.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The nextPageToken.
*/
@java.lang.Override
public java.lang.String getNextPageToken() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
nextPageToken_ = s;
return s;
}
}
/**
*
*
* <pre>
* A token to retrieve the next page of results.
* Pass to
* [ListSchedulesRequest.page_token][google.cloud.aiplatform.v1.ListSchedulesRequest.page_token]
* to obtain that page.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The bytes for nextPageToken.
*/
@java.lang.Override
public com.google.protobuf.ByteString getNextPageTokenBytes() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
nextPageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
for (int i = 0; i < schedules_.size(); i++) {
output.writeMessage(1, schedules_.get(i));
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 2, nextPageToken_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
for (int i = 0; i < schedules_.size(); i++) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, schedules_.get(i));
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, nextPageToken_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.aiplatform.v1.ListSchedulesResponse)) {
return super.equals(obj);
}
com.google.cloud.aiplatform.v1.ListSchedulesResponse other =
(com.google.cloud.aiplatform.v1.ListSchedulesResponse) obj;
if (!getSchedulesList().equals(other.getSchedulesList())) return false;
if (!getNextPageToken().equals(other.getNextPageToken())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (getSchedulesCount() > 0) {
hash = (37 * hash) + SCHEDULES_FIELD_NUMBER;
hash = (53 * hash) + getSchedulesList().hashCode();
}
hash = (37 * hash) + NEXT_PAGE_TOKEN_FIELD_NUMBER;
hash = (53 * hash) + getNextPageToken().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.aiplatform.v1.ListSchedulesResponse parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.aiplatform.v1.ListSchedulesResponse parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.aiplatform.v1.ListSchedulesResponse parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.aiplatform.v1.ListSchedulesResponse parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.aiplatform.v1.ListSchedulesResponse parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.aiplatform.v1.ListSchedulesResponse parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.aiplatform.v1.ListSchedulesResponse parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.aiplatform.v1.ListSchedulesResponse parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.aiplatform.v1.ListSchedulesResponse parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.aiplatform.v1.ListSchedulesResponse parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.aiplatform.v1.ListSchedulesResponse parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.aiplatform.v1.ListSchedulesResponse parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.cloud.aiplatform.v1.ListSchedulesResponse prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Response message for
* [ScheduleService.ListSchedules][google.cloud.aiplatform.v1.ScheduleService.ListSchedules]
* </pre>
*
* Protobuf type {@code google.cloud.aiplatform.v1.ListSchedulesResponse}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.aiplatform.v1.ListSchedulesResponse)
com.google.cloud.aiplatform.v1.ListSchedulesResponseOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.aiplatform.v1.ScheduleServiceProto
.internal_static_google_cloud_aiplatform_v1_ListSchedulesResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.aiplatform.v1.ScheduleServiceProto
.internal_static_google_cloud_aiplatform_v1_ListSchedulesResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.aiplatform.v1.ListSchedulesResponse.class,
com.google.cloud.aiplatform.v1.ListSchedulesResponse.Builder.class);
}
// Construct using com.google.cloud.aiplatform.v1.ListSchedulesResponse.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
if (schedulesBuilder_ == null) {
schedules_ = java.util.Collections.emptyList();
} else {
schedules_ = null;
schedulesBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000001);
nextPageToken_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.aiplatform.v1.ScheduleServiceProto
.internal_static_google_cloud_aiplatform_v1_ListSchedulesResponse_descriptor;
}
@java.lang.Override
public com.google.cloud.aiplatform.v1.ListSchedulesResponse getDefaultInstanceForType() {
return com.google.cloud.aiplatform.v1.ListSchedulesResponse.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.aiplatform.v1.ListSchedulesResponse build() {
com.google.cloud.aiplatform.v1.ListSchedulesResponse result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.aiplatform.v1.ListSchedulesResponse buildPartial() {
com.google.cloud.aiplatform.v1.ListSchedulesResponse result =
new com.google.cloud.aiplatform.v1.ListSchedulesResponse(this);
buildPartialRepeatedFields(result);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartialRepeatedFields(
com.google.cloud.aiplatform.v1.ListSchedulesResponse result) {
if (schedulesBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0)) {
schedules_ = java.util.Collections.unmodifiableList(schedules_);
bitField0_ = (bitField0_ & ~0x00000001);
}
result.schedules_ = schedules_;
} else {
result.schedules_ = schedulesBuilder_.build();
}
}
private void buildPartial0(com.google.cloud.aiplatform.v1.ListSchedulesResponse result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000002) != 0)) {
result.nextPageToken_ = nextPageToken_;
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.aiplatform.v1.ListSchedulesResponse) {
return mergeFrom((com.google.cloud.aiplatform.v1.ListSchedulesResponse) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.aiplatform.v1.ListSchedulesResponse other) {
if (other == com.google.cloud.aiplatform.v1.ListSchedulesResponse.getDefaultInstance())
return this;
if (schedulesBuilder_ == null) {
if (!other.schedules_.isEmpty()) {
if (schedules_.isEmpty()) {
schedules_ = other.schedules_;
bitField0_ = (bitField0_ & ~0x00000001);
} else {
ensureSchedulesIsMutable();
schedules_.addAll(other.schedules_);
}
onChanged();
}
} else {
if (!other.schedules_.isEmpty()) {
if (schedulesBuilder_.isEmpty()) {
schedulesBuilder_.dispose();
schedulesBuilder_ = null;
schedules_ = other.schedules_;
bitField0_ = (bitField0_ & ~0x00000001);
schedulesBuilder_ =
com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders
? getSchedulesFieldBuilder()
: null;
} else {
schedulesBuilder_.addAllMessages(other.schedules_);
}
}
}
if (!other.getNextPageToken().isEmpty()) {
nextPageToken_ = other.nextPageToken_;
bitField0_ |= 0x00000002;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
com.google.cloud.aiplatform.v1.Schedule m =
input.readMessage(
com.google.cloud.aiplatform.v1.Schedule.parser(), extensionRegistry);
if (schedulesBuilder_ == null) {
ensureSchedulesIsMutable();
schedules_.add(m);
} else {
schedulesBuilder_.addMessage(m);
}
break;
} // case 10
case 18:
{
nextPageToken_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000002;
break;
} // case 18
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.util.List<com.google.cloud.aiplatform.v1.Schedule> schedules_ =
java.util.Collections.emptyList();
private void ensureSchedulesIsMutable() {
if (!((bitField0_ & 0x00000001) != 0)) {
schedules_ = new java.util.ArrayList<com.google.cloud.aiplatform.v1.Schedule>(schedules_);
bitField0_ |= 0x00000001;
}
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.aiplatform.v1.Schedule,
com.google.cloud.aiplatform.v1.Schedule.Builder,
com.google.cloud.aiplatform.v1.ScheduleOrBuilder>
schedulesBuilder_;
/**
*
*
* <pre>
* List of Schedules in the requested page.
* </pre>
*
* <code>repeated .google.cloud.aiplatform.v1.Schedule schedules = 1;</code>
*/
public java.util.List<com.google.cloud.aiplatform.v1.Schedule> getSchedulesList() {
if (schedulesBuilder_ == null) {
return java.util.Collections.unmodifiableList(schedules_);
} else {
return schedulesBuilder_.getMessageList();
}
}
/**
*
*
* <pre>
* List of Schedules in the requested page.
* </pre>
*
* <code>repeated .google.cloud.aiplatform.v1.Schedule schedules = 1;</code>
*/
public int getSchedulesCount() {
if (schedulesBuilder_ == null) {
return schedules_.size();
} else {
return schedulesBuilder_.getCount();
}
}
/**
*
*
* <pre>
* List of Schedules in the requested page.
* </pre>
*
* <code>repeated .google.cloud.aiplatform.v1.Schedule schedules = 1;</code>
*/
public com.google.cloud.aiplatform.v1.Schedule getSchedules(int index) {
if (schedulesBuilder_ == null) {
return schedules_.get(index);
} else {
return schedulesBuilder_.getMessage(index);
}
}
/**
*
*
* <pre>
* List of Schedules in the requested page.
* </pre>
*
* <code>repeated .google.cloud.aiplatform.v1.Schedule schedules = 1;</code>
*/
public Builder setSchedules(int index, com.google.cloud.aiplatform.v1.Schedule value) {
if (schedulesBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureSchedulesIsMutable();
schedules_.set(index, value);
onChanged();
} else {
schedulesBuilder_.setMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* List of Schedules in the requested page.
* </pre>
*
* <code>repeated .google.cloud.aiplatform.v1.Schedule schedules = 1;</code>
*/
public Builder setSchedules(
int index, com.google.cloud.aiplatform.v1.Schedule.Builder builderForValue) {
if (schedulesBuilder_ == null) {
ensureSchedulesIsMutable();
schedules_.set(index, builderForValue.build());
onChanged();
} else {
schedulesBuilder_.setMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* List of Schedules in the requested page.
* </pre>
*
* <code>repeated .google.cloud.aiplatform.v1.Schedule schedules = 1;</code>
*/
public Builder addSchedules(com.google.cloud.aiplatform.v1.Schedule value) {
if (schedulesBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureSchedulesIsMutable();
schedules_.add(value);
onChanged();
} else {
schedulesBuilder_.addMessage(value);
}
return this;
}
/**
*
*
* <pre>
* List of Schedules in the requested page.
* </pre>
*
* <code>repeated .google.cloud.aiplatform.v1.Schedule schedules = 1;</code>
*/
public Builder addSchedules(int index, com.google.cloud.aiplatform.v1.Schedule value) {
if (schedulesBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureSchedulesIsMutable();
schedules_.add(index, value);
onChanged();
} else {
schedulesBuilder_.addMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* List of Schedules in the requested page.
* </pre>
*
* <code>repeated .google.cloud.aiplatform.v1.Schedule schedules = 1;</code>
*/
public Builder addSchedules(com.google.cloud.aiplatform.v1.Schedule.Builder builderForValue) {
if (schedulesBuilder_ == null) {
ensureSchedulesIsMutable();
schedules_.add(builderForValue.build());
onChanged();
} else {
schedulesBuilder_.addMessage(builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* List of Schedules in the requested page.
* </pre>
*
* <code>repeated .google.cloud.aiplatform.v1.Schedule schedules = 1;</code>
*/
public Builder addSchedules(
int index, com.google.cloud.aiplatform.v1.Schedule.Builder builderForValue) {
if (schedulesBuilder_ == null) {
ensureSchedulesIsMutable();
schedules_.add(index, builderForValue.build());
onChanged();
} else {
schedulesBuilder_.addMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* List of Schedules in the requested page.
* </pre>
*
* <code>repeated .google.cloud.aiplatform.v1.Schedule schedules = 1;</code>
*/
public Builder addAllSchedules(
java.lang.Iterable<? extends com.google.cloud.aiplatform.v1.Schedule> values) {
if (schedulesBuilder_ == null) {
ensureSchedulesIsMutable();
com.google.protobuf.AbstractMessageLite.Builder.addAll(values, schedules_);
onChanged();
} else {
schedulesBuilder_.addAllMessages(values);
}
return this;
}
/**
*
*
* <pre>
* List of Schedules in the requested page.
* </pre>
*
* <code>repeated .google.cloud.aiplatform.v1.Schedule schedules = 1;</code>
*/
public Builder clearSchedules() {
if (schedulesBuilder_ == null) {
schedules_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
} else {
schedulesBuilder_.clear();
}
return this;
}
/**
*
*
* <pre>
* List of Schedules in the requested page.
* </pre>
*
* <code>repeated .google.cloud.aiplatform.v1.Schedule schedules = 1;</code>
*/
public Builder removeSchedules(int index) {
if (schedulesBuilder_ == null) {
ensureSchedulesIsMutable();
schedules_.remove(index);
onChanged();
} else {
schedulesBuilder_.remove(index);
}
return this;
}
/**
*
*
* <pre>
* List of Schedules in the requested page.
* </pre>
*
* <code>repeated .google.cloud.aiplatform.v1.Schedule schedules = 1;</code>
*/
public com.google.cloud.aiplatform.v1.Schedule.Builder getSchedulesBuilder(int index) {
return getSchedulesFieldBuilder().getBuilder(index);
}
/**
*
*
* <pre>
* List of Schedules in the requested page.
* </pre>
*
* <code>repeated .google.cloud.aiplatform.v1.Schedule schedules = 1;</code>
*/
public com.google.cloud.aiplatform.v1.ScheduleOrBuilder getSchedulesOrBuilder(int index) {
if (schedulesBuilder_ == null) {
return schedules_.get(index);
} else {
return schedulesBuilder_.getMessageOrBuilder(index);
}
}
/**
*
*
* <pre>
* List of Schedules in the requested page.
* </pre>
*
* <code>repeated .google.cloud.aiplatform.v1.Schedule schedules = 1;</code>
*/
public java.util.List<? extends com.google.cloud.aiplatform.v1.ScheduleOrBuilder>
getSchedulesOrBuilderList() {
if (schedulesBuilder_ != null) {
return schedulesBuilder_.getMessageOrBuilderList();
} else {
return java.util.Collections.unmodifiableList(schedules_);
}
}
/**
*
*
* <pre>
* List of Schedules in the requested page.
* </pre>
*
* <code>repeated .google.cloud.aiplatform.v1.Schedule schedules = 1;</code>
*/
public com.google.cloud.aiplatform.v1.Schedule.Builder addSchedulesBuilder() {
return getSchedulesFieldBuilder()
.addBuilder(com.google.cloud.aiplatform.v1.Schedule.getDefaultInstance());
}
/**
*
*
* <pre>
* List of Schedules in the requested page.
* </pre>
*
* <code>repeated .google.cloud.aiplatform.v1.Schedule schedules = 1;</code>
*/
public com.google.cloud.aiplatform.v1.Schedule.Builder addSchedulesBuilder(int index) {
return getSchedulesFieldBuilder()
.addBuilder(index, com.google.cloud.aiplatform.v1.Schedule.getDefaultInstance());
}
/**
*
*
* <pre>
* List of Schedules in the requested page.
* </pre>
*
* <code>repeated .google.cloud.aiplatform.v1.Schedule schedules = 1;</code>
*/
public java.util.List<com.google.cloud.aiplatform.v1.Schedule.Builder>
getSchedulesBuilderList() {
return getSchedulesFieldBuilder().getBuilderList();
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.aiplatform.v1.Schedule,
com.google.cloud.aiplatform.v1.Schedule.Builder,
com.google.cloud.aiplatform.v1.ScheduleOrBuilder>
getSchedulesFieldBuilder() {
if (schedulesBuilder_ == null) {
schedulesBuilder_ =
new com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.aiplatform.v1.Schedule,
com.google.cloud.aiplatform.v1.Schedule.Builder,
com.google.cloud.aiplatform.v1.ScheduleOrBuilder>(
schedules_, ((bitField0_ & 0x00000001) != 0), getParentForChildren(), isClean());
schedules_ = null;
}
return schedulesBuilder_;
}
private java.lang.Object nextPageToken_ = "";
/**
*
*
* <pre>
* A token to retrieve the next page of results.
* Pass to
* [ListSchedulesRequest.page_token][google.cloud.aiplatform.v1.ListSchedulesRequest.page_token]
* to obtain that page.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The nextPageToken.
*/
public java.lang.String getNextPageToken() {
java.lang.Object ref = nextPageToken_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
nextPageToken_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* A token to retrieve the next page of results.
* Pass to
* [ListSchedulesRequest.page_token][google.cloud.aiplatform.v1.ListSchedulesRequest.page_token]
* to obtain that page.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The bytes for nextPageToken.
*/
public com.google.protobuf.ByteString getNextPageTokenBytes() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
nextPageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* A token to retrieve the next page of results.
* Pass to
* [ListSchedulesRequest.page_token][google.cloud.aiplatform.v1.ListSchedulesRequest.page_token]
* to obtain that page.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @param value The nextPageToken to set.
* @return This builder for chaining.
*/
public Builder setNextPageToken(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
nextPageToken_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* A token to retrieve the next page of results.
* Pass to
* [ListSchedulesRequest.page_token][google.cloud.aiplatform.v1.ListSchedulesRequest.page_token]
* to obtain that page.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return This builder for chaining.
*/
public Builder clearNextPageToken() {
nextPageToken_ = getDefaultInstance().getNextPageToken();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
return this;
}
/**
*
*
* <pre>
* A token to retrieve the next page of results.
* Pass to
* [ListSchedulesRequest.page_token][google.cloud.aiplatform.v1.ListSchedulesRequest.page_token]
* to obtain that page.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @param value The bytes for nextPageToken to set.
* @return This builder for chaining.
*/
public Builder setNextPageTokenBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
nextPageToken_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.aiplatform.v1.ListSchedulesResponse)
}
// @@protoc_insertion_point(class_scope:google.cloud.aiplatform.v1.ListSchedulesResponse)
private static final com.google.cloud.aiplatform.v1.ListSchedulesResponse DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.aiplatform.v1.ListSchedulesResponse();
}
public static com.google.cloud.aiplatform.v1.ListSchedulesResponse getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<ListSchedulesResponse> PARSER =
new com.google.protobuf.AbstractParser<ListSchedulesResponse>() {
@java.lang.Override
public ListSchedulesResponse parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<ListSchedulesResponse> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<ListSchedulesResponse> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.aiplatform.v1.ListSchedulesResponse getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
apache/hadoop-hdfs | 36,932 | src/test/hdfs/org/apache/hadoop/hdfs/TestDFSPermission.java | /**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hdfs;
import java.io.IOException;
import java.util.EnumSet;
import java.util.HashMap;
import java.util.Map;
import java.util.Random;
import javax.security.auth.login.LoginException;
import junit.framework.AssertionFailedError;
import junit.framework.TestCase;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.CreateFlag;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.permission.FsPermission;
import org.apache.hadoop.hdfs.server.common.Util;
import org.apache.hadoop.security.AccessControlException;
import org.apache.hadoop.security.UserGroupInformation;
/** Unit tests for permission */
public class TestDFSPermission extends TestCase {
public static final Log LOG = LogFactory.getLog(TestDFSPermission.class);
final private static Configuration conf = new HdfsConfiguration();
final private static String GROUP1_NAME = "group1";
final private static String GROUP2_NAME = "group2";
final private static String GROUP3_NAME = "group3";
final private static String GROUP4_NAME = "group4";
final private static String USER1_NAME = "user1";
final private static String USER2_NAME = "user2";
final private static String USER3_NAME = "user3";
private static UserGroupInformation SUPERUSER;
private static UserGroupInformation USER1;
private static UserGroupInformation USER2;
private static UserGroupInformation USER3;
final private static short MAX_PERMISSION = 511;
final private static short DEFAULT_UMASK = 022;
final private static short FILE_MASK = 0666;
final private static FsPermission DEFAULT_PERMISSION =
FsPermission.createImmutable((short) 0777);
final static private int NUM_TEST_PERMISSIONS =
conf.getInt("test.dfs.permission.num", 10) * (MAX_PERMISSION + 1) / 100;
final private static String PATH_NAME = "xx";
final private static Path FILE_DIR_PATH = new Path("/", PATH_NAME);
final private static Path NON_EXISTENT_PATH = new Path("/parent", PATH_NAME);
final private static Path NON_EXISTENT_FILE = new Path("/NonExistentFile");
private FileSystem fs;
private static Random r;
static {
try {
// Initiate the random number generator and logging the seed
long seed = Util.now();
r = new Random(seed);
LOG.info("Random number generator uses seed " + seed);
LOG.info("NUM_TEST_PERMISSIONS=" + NUM_TEST_PERMISSIONS);
// explicitly turn on permission checking
conf.setBoolean(DFSConfigKeys.DFS_PERMISSIONS_ENABLED_KEY, true);
// create fake mapping for the groups
Map<String, String[]> u2g_map = new HashMap<String, String[]> (3);
u2g_map.put(USER1_NAME, new String[] {GROUP1_NAME, GROUP2_NAME });
u2g_map.put(USER2_NAME, new String[] {GROUP2_NAME, GROUP3_NAME });
u2g_map.put(USER3_NAME, new String[] {GROUP3_NAME, GROUP4_NAME });
DFSTestUtil.updateConfWithFakeGroupMapping(conf, u2g_map);
// Initiate all four users
SUPERUSER = UserGroupInformation.getCurrentUser();
USER1 = UserGroupInformation.createUserForTesting(USER1_NAME,
new String[] { GROUP1_NAME, GROUP2_NAME });
USER2 = UserGroupInformation.createUserForTesting(USER2_NAME,
new String[] { GROUP2_NAME, GROUP3_NAME });
USER3 = UserGroupInformation.createUserForTesting(USER3_NAME,
new String[] { GROUP3_NAME, GROUP4_NAME });
} catch (IOException e) {
throw new RuntimeException(e);
}
}
/** This tests if permission setting in create, mkdir, and
* setPermission works correctly
*/
public void testPermissionSetting() throws Exception {
MiniDFSCluster cluster = new MiniDFSCluster.Builder(conf).numDataNodes(3).build();
try {
cluster.waitActive();
fs = FileSystem.get(conf);
LOG.info("ROOT=" + fs.getFileStatus(new Path("/")));
testPermissionSetting(OpType.CREATE); // test file creation
testPermissionSetting(OpType.MKDIRS); // test directory creation
} finally {
fs.close();
cluster.shutdown();
}
}
/* check permission setting works correctly for file or directory */
private void testPermissionSetting(OpType op) throws Exception {
// case 1: use default permission but all possible umasks
PermissionGenerator generator = new PermissionGenerator(r);
for (short i = 0; i < NUM_TEST_PERMISSIONS; i++) {
createAndCheckPermission(op, FILE_DIR_PATH, generator.next(),
new FsPermission(DEFAULT_PERMISSION), true);
}
// case 2: use permission 0643 and the default umask
createAndCheckPermission(op, FILE_DIR_PATH, DEFAULT_UMASK,
new FsPermission((short) 0643), true);
// case 3: use permission 0643 and umask 0222
createAndCheckPermission(op, FILE_DIR_PATH, (short) 0222,
new FsPermission((short) 0643), false);
// case 4: set permission
fs.setPermission(FILE_DIR_PATH, new FsPermission((short) 0111));
short expectedPermission = (short) ((op == OpType.CREATE) ? 0 : 0111);
checkPermission(FILE_DIR_PATH, expectedPermission, true);
// case 5: test non-existent parent directory
assertFalse(fs.exists(NON_EXISTENT_PATH));
createAndCheckPermission(op, NON_EXISTENT_PATH, DEFAULT_UMASK,
new FsPermission(DEFAULT_PERMISSION), false);
Path parent = NON_EXISTENT_PATH.getParent();
checkPermission(parent, getPermission(parent.getParent()), true);
}
/* get the permission of a file/directory */
private short getPermission(Path path) throws IOException {
return fs.getFileStatus(path).getPermission().toShort();
}
/* create a file/directory with the default umask and permission */
private void create(OpType op, Path name) throws IOException {
create(op, name, DEFAULT_UMASK, new FsPermission(DEFAULT_PERMISSION));
}
/* create a file/directory with the given umask and permission */
private void create(OpType op, Path name, short umask,
FsPermission permission) throws IOException {
// set umask in configuration, converting to padded octal
conf.set(FsPermission.UMASK_LABEL, String.format("%1$03o", umask));
// create the file/directory
switch (op) {
case CREATE:
FSDataOutputStream out = fs.create(name, permission, true,
conf.getInt("io.file.buffer.size", 4096),
fs.getDefaultReplication(), fs.getDefaultBlockSize(), null);
out.close();
break;
case MKDIRS:
fs.mkdirs(name, permission);
break;
default:
throw new IOException("Unsupported operation: " + op);
}
}
/* create file/directory with the provided umask and permission; then it
* checks if the permission is set correctly;
* If the delete flag is true, delete the file afterwards; otherwise leave
* it in the file system.
*/
private void createAndCheckPermission(OpType op, Path name, short umask,
FsPermission permission, boolean delete) throws Exception {
// create the file/directory
create(op, name, umask, permission);
// get the short form of the permission
short permissionNum = (DEFAULT_PERMISSION.equals(permission)) ? MAX_PERMISSION
: permission.toShort();
// get the expected permission
short expectedPermission = (op == OpType.CREATE) ? (short) (~umask
& permissionNum & FILE_MASK) : (short) (~umask & permissionNum);
// check if permission is correctly set
checkPermission(name, expectedPermission, delete);
}
/* Check if the permission of a file/directory is the same as the
* expected permission; If the delete flag is true, delete the
* file/directory afterwards.
*/
private void checkPermission(Path name, short expectedPermission,
boolean delete) throws IOException {
try {
// check its permission
assertEquals(getPermission(name), expectedPermission);
} finally {
// delete the file
if (delete) {
fs.delete(name, true);
}
}
}
/* check if the ownership of a file/directory is set correctly */
public void testOwnership() throws Exception {
MiniDFSCluster cluster = new MiniDFSCluster.Builder(conf).numDataNodes(3).build();
try {
cluster.waitActive();
testOwnership(OpType.CREATE); // test file creation
testOwnership(OpType.MKDIRS); // test directory creation
} finally {
fs.close();
cluster.shutdown();
}
}
/* change a file/directory's owner and group.
* if expectDeny is set, expect an AccessControlException.
*/
private void setOwner(Path path, String owner, String group,
boolean expectDeny) throws IOException {
try {
String expectedOwner = (owner == null) ? getOwner(path) : owner;
String expectedGroup = (group == null) ? getGroup(path) : group;
fs.setOwner(path, owner, group);
checkOwnership(path, expectedOwner, expectedGroup);
assertFalse(expectDeny);
} catch(AccessControlException e) {
assertTrue(expectDeny);
}
}
/* check ownership is set correctly for a file or directory */
private void testOwnership(OpType op) throws Exception {
// case 1: superuser create a file/directory
fs = FileSystem.get(conf);
create(op, FILE_DIR_PATH, DEFAULT_UMASK,
new FsPermission(DEFAULT_PERMISSION));
checkOwnership(FILE_DIR_PATH, SUPERUSER.getShortUserName(),
getGroup(FILE_DIR_PATH.getParent()));
// case 2: superuser changes FILE_DIR_PATH's owner to be <user1, group3>
setOwner(FILE_DIR_PATH, USER1.getShortUserName(), GROUP3_NAME, false);
// case 3: user1 changes FILE_DIR_PATH's owner to be user2
login(USER1);
setOwner(FILE_DIR_PATH, USER2.getShortUserName(), null, true);
// case 4: user1 changes FILE_DIR_PATH's group to be group1 which it belongs
// to
setOwner(FILE_DIR_PATH, null, GROUP1_NAME, false);
// case 5: user1 changes FILE_DIR_PATH's group to be group3
// which it does not belong to
setOwner(FILE_DIR_PATH, null, GROUP3_NAME, true);
// case 6: user2 (non-owner) changes FILE_DIR_PATH's group to be group3
login(USER2);
setOwner(FILE_DIR_PATH, null, GROUP3_NAME, true);
// case 7: user2 (non-owner) changes FILE_DIR_PATH's user to be user2
setOwner(FILE_DIR_PATH, USER2.getShortUserName(), null, true);
// delete the file/directory
login(SUPERUSER);
fs.delete(FILE_DIR_PATH, true);
}
/* Return the group owner of the file/directory */
private String getGroup(Path path) throws IOException {
return fs.getFileStatus(path).getGroup();
}
/* Return the file owner of the file/directory */
private String getOwner(Path path) throws IOException {
return fs.getFileStatus(path).getOwner();
}
/* check if ownership is set correctly */
private void checkOwnership(Path name, String expectedOwner,
String expectedGroup) throws IOException {
// check its owner and group
FileStatus status = fs.getFileStatus(name);
assertEquals(status.getOwner(), expectedOwner);
assertEquals(status.getGroup(), expectedGroup);
}
final static private String ANCESTOR_NAME = "/ancestor";
final static private String PARENT_NAME = "parent";
final static private String FILE_NAME = "file";
final static private String DIR_NAME = "dir";
final static private String FILE_DIR_NAME = "filedir";
private enum OpType {CREATE, MKDIRS, OPEN, SET_REPLICATION,
GET_FILEINFO, IS_DIR, EXISTS, GET_CONTENT_LENGTH, LIST, RENAME, DELETE
};
/* Check if namenode performs permission checking correctly for
* superuser, file owner, group owner, and other users */
public void testPermissionChecking() throws Exception {
MiniDFSCluster cluster = new MiniDFSCluster.Builder(conf).numDataNodes(3).build();
try {
cluster.waitActive();
fs = FileSystem.get(conf);
// set the permission of the root to be world-wide rwx
fs.setPermission(new Path("/"), new FsPermission((short)0777));
// create a directory hierarchy and sets random permission for each inode
PermissionGenerator ancestorPermissionGenerator =
new PermissionGenerator(r);
PermissionGenerator dirPermissionGenerator = new PermissionGenerator(r);
PermissionGenerator filePermissionGenerator = new PermissionGenerator(r);
short[] ancestorPermissions = new short[NUM_TEST_PERMISSIONS];
short[] parentPermissions = new short[NUM_TEST_PERMISSIONS];
short[] permissions = new short[NUM_TEST_PERMISSIONS];
Path[] ancestorPaths = new Path[NUM_TEST_PERMISSIONS];
Path[] parentPaths = new Path[NUM_TEST_PERMISSIONS];
Path[] filePaths = new Path[NUM_TEST_PERMISSIONS];
Path[] dirPaths = new Path[NUM_TEST_PERMISSIONS];
for (int i = 0; i < NUM_TEST_PERMISSIONS; i++) {
// create ancestor directory
ancestorPaths[i] = new Path(ANCESTOR_NAME + i);
create(OpType.MKDIRS, ancestorPaths[i]);
fs.setOwner(ancestorPaths[i], USER1_NAME, GROUP2_NAME);
// create parent directory
parentPaths[i] = new Path(ancestorPaths[i], PARENT_NAME + i);
create(OpType.MKDIRS, parentPaths[i]);
// change parent directory's ownership to be user1
fs.setOwner(parentPaths[i], USER1_NAME, GROUP2_NAME);
filePaths[i] = new Path(parentPaths[i], FILE_NAME + i);
dirPaths[i] = new Path(parentPaths[i], DIR_NAME + i);
// makes sure that each inode at the same level
// has a different permission
ancestorPermissions[i] = ancestorPermissionGenerator.next();
parentPermissions[i] = dirPermissionGenerator.next();
permissions[i] = filePermissionGenerator.next();
fs.setPermission(ancestorPaths[i], new FsPermission(
ancestorPermissions[i]));
fs.setPermission(parentPaths[i], new FsPermission(
parentPermissions[i]));
}
/* file owner */
testPermissionCheckingPerUser(USER1, ancestorPermissions,
parentPermissions, permissions, parentPaths, filePaths, dirPaths);
/* group owner */
testPermissionCheckingPerUser(USER2, ancestorPermissions,
parentPermissions, permissions, parentPaths, filePaths, dirPaths);
/* other owner */
testPermissionCheckingPerUser(USER3, ancestorPermissions,
parentPermissions, permissions, parentPaths, filePaths, dirPaths);
/* super owner */
testPermissionCheckingPerUser(SUPERUSER, ancestorPermissions,
parentPermissions, permissions, parentPaths, filePaths, dirPaths);
} finally {
fs.close();
cluster.shutdown();
}
}
/* Check if namenode performs permission checking correctly
* for the given user for operations mkdir, open, setReplication,
* getFileInfo, isDirectory, exists, getContentLength, list, rename,
* and delete */
private void testPermissionCheckingPerUser(UserGroupInformation ugi,
short[] ancestorPermission, short[] parentPermission,
short[] filePermission, Path[] parentDirs, Path[] files, Path[] dirs)
throws Exception {
login(SUPERUSER);
for (int i = 0; i < NUM_TEST_PERMISSIONS; i++) {
create(OpType.CREATE, files[i]);
create(OpType.MKDIRS, dirs[i]);
fs.setOwner(files[i], USER1_NAME, GROUP2_NAME);
fs.setOwner(dirs[i], USER1_NAME, GROUP2_NAME);
checkOwnership(dirs[i], USER1_NAME, GROUP2_NAME);
checkOwnership(files[i], USER1_NAME, GROUP2_NAME);
FsPermission fsPermission = new FsPermission(filePermission[i]);
fs.setPermission(files[i], fsPermission);
fs.setPermission(dirs[i], fsPermission);
}
login(ugi);
for (int i = 0; i < NUM_TEST_PERMISSIONS; i++) {
testCreateMkdirs(ugi, new Path(parentDirs[i], FILE_DIR_NAME),
ancestorPermission[i], parentPermission[i]);
testOpen(ugi, files[i], ancestorPermission[i], parentPermission[i],
filePermission[i]);
testSetReplication(ugi, files[i], ancestorPermission[i],
parentPermission[i], filePermission[i]);
testSetTimes(ugi, files[i], ancestorPermission[i],
parentPermission[i], filePermission[i]);
testStats(ugi, files[i], ancestorPermission[i], parentPermission[i]);
testList(ugi, files[i], dirs[i], ancestorPermission[i],
parentPermission[i], filePermission[i]);
int next = i == NUM_TEST_PERMISSIONS - 1 ? 0 : i + 1;
testRename(ugi, files[i], files[next], ancestorPermission[i],
parentPermission[i], ancestorPermission[next], parentPermission[next]);
testDeleteFile(ugi, files[i], ancestorPermission[i], parentPermission[i]);
testDeleteDir(ugi, dirs[i], ancestorPermission[i], parentPermission[i],
filePermission[i], null);
}
// test non existent file
checkNonExistentFile();
}
/* A random permission generator that guarantees that each permission
* value is generated only once.
*/
static private class PermissionGenerator {
private Random r;
private short permissions[] = new short[MAX_PERMISSION + 1];
private int numLeft = MAX_PERMISSION + 1;
PermissionGenerator(Random r) {
this.r = r;
for (int i = 0; i <= MAX_PERMISSION; i++) {
permissions[i] = (short) i;
}
}
short next() throws IOException {
if (numLeft == 0) {
throw new IOException("No more permission is avaialbe");
}
int index = r.nextInt(numLeft); // choose which permission to return
numLeft--; // decrement the counter
// swap the chosen permission with last available permission in the array
short temp = permissions[numLeft];
permissions[numLeft] = permissions[index];
permissions[index] = temp;
return permissions[numLeft];
}
}
/* A base class that verifies the permission checking is correct
* for an operation */
abstract class PermissionVerifier {
protected Path path;
protected short ancestorPermission;
protected short parentPermission;
private short permission;
protected short requiredAncestorPermission;
protected short requiredParentPermission;
protected short requiredPermission;
final static protected short opAncestorPermission = SEARCH_MASK;
protected short opParentPermission;
protected short opPermission;
protected UserGroupInformation ugi;
/* initialize */
protected void set(Path path, short ancestorPermission,
short parentPermission, short permission) {
this.path = path;
this.ancestorPermission = ancestorPermission;
this.parentPermission = parentPermission;
this.permission = permission;
setOpPermission();
this.ugi = null;
}
/* Perform an operation and verify if the permission checking is correct */
void verifyPermission(UserGroupInformation ugi) throws LoginException,
IOException {
if (this.ugi != ugi) {
setRequiredPermissions(ugi);
this.ugi = ugi;
}
try {
try {
call();
assertFalse(expectPermissionDeny());
} catch(AccessControlException e) {
assertTrue(expectPermissionDeny());
}
} catch (AssertionFailedError ae) {
logPermissions();
throw ae;
}
}
/** Log the permissions and required permissions */
protected void logPermissions() {
LOG.info("required ancestor permission:"
+ Integer.toOctalString(requiredAncestorPermission));
LOG.info("ancestor permission: "
+ Integer.toOctalString(ancestorPermission));
LOG.info("required parent permission:"
+ Integer.toOctalString(requiredParentPermission));
LOG.info("parent permission: " + Integer.toOctalString(parentPermission));
LOG.info("required permission:"
+ Integer.toOctalString(requiredPermission));
LOG.info("permission: " + Integer.toOctalString(permission));
}
/* Return true if an AccessControlException is expected */
protected boolean expectPermissionDeny() {
return (requiredPermission & permission) != requiredPermission
|| (requiredParentPermission & parentPermission) !=
requiredParentPermission
|| (requiredAncestorPermission & ancestorPermission) !=
requiredAncestorPermission;
}
/* Set the permissions required to pass the permission checking */
protected void setRequiredPermissions(UserGroupInformation ugi)
throws IOException {
if (SUPERUSER.equals(ugi)) {
requiredAncestorPermission = SUPER_MASK;
requiredParentPermission = SUPER_MASK;
requiredPermission = SUPER_MASK;
} else if (USER1.equals(ugi)) {
requiredAncestorPermission = (short)(opAncestorPermission & OWNER_MASK);
requiredParentPermission = (short)(opParentPermission & OWNER_MASK);
requiredPermission = (short)(opPermission & OWNER_MASK);
} else if (USER2.equals(ugi)) {
requiredAncestorPermission = (short)(opAncestorPermission & GROUP_MASK);
requiredParentPermission = (short)(opParentPermission & GROUP_MASK);
requiredPermission = (short)(opPermission & GROUP_MASK);
} else if (USER3.equals(ugi)) {
requiredAncestorPermission = (short)(opAncestorPermission & OTHER_MASK);
requiredParentPermission = (short)(opParentPermission & OTHER_MASK);
requiredPermission = (short)(opPermission & OTHER_MASK);
} else {
throw new IllegalArgumentException("Non-supported user: " + ugi);
}
}
/* Set the rwx permissions required for the operation */
abstract void setOpPermission();
/* Perform the operation */
abstract void call() throws IOException;
}
final static private short SUPER_MASK = 0;
final static private short READ_MASK = 0444;
final static private short WRITE_MASK = 0222;
final static private short SEARCH_MASK = 0111;
final static private short NULL_MASK = 0;
final static private short OWNER_MASK = 0700;
final static private short GROUP_MASK = 0070;
final static private short OTHER_MASK = 0007;
/* A class that verifies the permission checking is correct for create/mkdir*/
private class CreatePermissionVerifier extends PermissionVerifier {
private OpType opType;
private boolean cleanup = true;
/* initialize */
protected void set(Path path, OpType opType, short ancestorPermission,
short parentPermission) {
super.set(path, ancestorPermission, parentPermission, NULL_MASK);
setOpType(opType);
}
void setCleanup(boolean cleanup) {
this.cleanup = cleanup;
}
/* set if the operation mkdir/create */
void setOpType(OpType opType) {
this.opType = opType;
}
@Override
void setOpPermission() {
this.opParentPermission = SEARCH_MASK | WRITE_MASK;
}
@Override
void call() throws IOException {
create(opType, path);
if (cleanup) {
fs.delete(path, true);
}
}
}
private CreatePermissionVerifier createVerifier =
new CreatePermissionVerifier();
/* test if the permission checking of create/mkdir is correct */
private void testCreateMkdirs(UserGroupInformation ugi, Path path,
short ancestorPermission, short parentPermission) throws Exception {
createVerifier.set(path, OpType.MKDIRS, ancestorPermission,
parentPermission);
createVerifier.verifyPermission(ugi);
createVerifier.setOpType(OpType.CREATE);
createVerifier.setCleanup(false);
createVerifier.verifyPermission(ugi);
createVerifier.setCleanup(true);
createVerifier.verifyPermission(ugi); // test overWritten
}
/* A class that verifies the permission checking is correct for open */
private class OpenPermissionVerifier extends PermissionVerifier {
@Override
void setOpPermission() {
this.opParentPermission = SEARCH_MASK;
this.opPermission = READ_MASK;
}
@Override
void call() throws IOException {
FSDataInputStream in = fs.open(path);
in.close();
}
}
private OpenPermissionVerifier openVerifier = new OpenPermissionVerifier();
/* test if the permission checking of open is correct */
private void testOpen(UserGroupInformation ugi, Path path,
short ancestorPermission, short parentPermission, short filePermission)
throws Exception {
openVerifier
.set(path, ancestorPermission, parentPermission, filePermission);
openVerifier.verifyPermission(ugi);
}
/* A class that verifies the permission checking is correct for
* setReplication */
private class SetReplicationPermissionVerifier extends PermissionVerifier {
@Override
void setOpPermission() {
this.opParentPermission = SEARCH_MASK;
this.opPermission = WRITE_MASK;
}
@Override
void call() throws IOException {
fs.setReplication(path, (short) 1);
}
}
private SetReplicationPermissionVerifier replicatorVerifier =
new SetReplicationPermissionVerifier();
/* test if the permission checking of setReplication is correct */
private void testSetReplication(UserGroupInformation ugi, Path path,
short ancestorPermission, short parentPermission, short filePermission)
throws Exception {
replicatorVerifier.set(path, ancestorPermission, parentPermission,
filePermission);
replicatorVerifier.verifyPermission(ugi);
}
/* A class that verifies the permission checking is correct for
* setTimes */
private class SetTimesPermissionVerifier extends PermissionVerifier {
@Override
void setOpPermission() {
this.opParentPermission = SEARCH_MASK;
this.opPermission = WRITE_MASK;
}
@Override
void call() throws IOException {
fs.setTimes(path, 100, 100);
fs.setTimes(path, -1, 100);
fs.setTimes(path, 100, -1);
}
}
private SetTimesPermissionVerifier timesVerifier =
new SetTimesPermissionVerifier();
/* test if the permission checking of setReplication is correct */
private void testSetTimes(UserGroupInformation ugi, Path path,
short ancestorPermission, short parentPermission, short filePermission)
throws Exception {
timesVerifier.set(path, ancestorPermission, parentPermission,
filePermission);
timesVerifier.verifyPermission(ugi);
}
/* A class that verifies the permission checking is correct for isDirectory,
* exist, getFileInfo, getContentSummary */
private class StatsPermissionVerifier extends PermissionVerifier {
OpType opType;
/* initialize */
void set(Path path, OpType opType, short ancestorPermission,
short parentPermission) {
super.set(path, ancestorPermission, parentPermission, NULL_MASK);
setOpType(opType);
}
/* set if operation is getFileInfo, isDirectory, exist, getContenSummary */
void setOpType(OpType opType) {
this.opType = opType;
}
@Override
void setOpPermission() {
this.opParentPermission = SEARCH_MASK;
}
@Override
void call() throws IOException {
switch (opType) {
case GET_FILEINFO:
fs.getFileStatus(path);
break;
case IS_DIR:
fs.isDirectory(path);
break;
case EXISTS:
fs.exists(path);
break;
case GET_CONTENT_LENGTH:
fs.getContentSummary(path).getLength();
break;
default:
throw new IllegalArgumentException("Unexpected operation type: "
+ opType);
}
}
}
private StatsPermissionVerifier statsVerifier = new StatsPermissionVerifier();
/* test if the permission checking of isDirectory, exist,
* getFileInfo, getContentSummary is correct */
private void testStats(UserGroupInformation ugi, Path path,
short ancestorPermission, short parentPermission) throws Exception {
statsVerifier.set(path, OpType.GET_FILEINFO, ancestorPermission,
parentPermission);
statsVerifier.verifyPermission(ugi);
statsVerifier.setOpType(OpType.IS_DIR);
statsVerifier.verifyPermission(ugi);
statsVerifier.setOpType(OpType.EXISTS);
statsVerifier.verifyPermission(ugi);
statsVerifier.setOpType(OpType.GET_CONTENT_LENGTH);
statsVerifier.verifyPermission(ugi);
}
private enum InodeType {
FILE, DIR
};
/* A class that verifies the permission checking is correct for list */
private class ListPermissionVerifier extends PermissionVerifier {
private InodeType inodeType;
/* initialize */
void set(Path path, InodeType inodeType, short ancestorPermission,
short parentPermission, short permission) {
this.inodeType = inodeType;
super.set(path, ancestorPermission, parentPermission, permission);
}
/* set if the given path is a file/directory */
void setInodeType(Path path, InodeType inodeType) {
this.path = path;
this.inodeType = inodeType;
setOpPermission();
this.ugi = null;
}
@Override
void setOpPermission() {
this.opParentPermission = SEARCH_MASK;
switch (inodeType) {
case FILE:
this.opPermission = 0;
break;
case DIR:
this.opPermission = READ_MASK | SEARCH_MASK;
break;
default:
throw new IllegalArgumentException("Illegal inode type: " + inodeType);
}
}
@Override
void call() throws IOException {
fs.listStatus(path);
}
}
ListPermissionVerifier listVerifier = new ListPermissionVerifier();
/* test if the permission checking of list is correct */
private void testList(UserGroupInformation ugi, Path file, Path dir,
short ancestorPermission, short parentPermission, short filePermission)
throws Exception {
listVerifier.set(file, InodeType.FILE, ancestorPermission,
parentPermission, filePermission);
listVerifier.verifyPermission(ugi);
listVerifier.setInodeType(dir, InodeType.DIR);
listVerifier.verifyPermission(ugi);
}
/* A class that verifies the permission checking is correct for rename */
private class RenamePermissionVerifier extends PermissionVerifier {
private Path dst;
private short dstAncestorPermission;
private short dstParentPermission;
/* initialize */
void set(Path src, short srcAncestorPermission, short srcParentPermission,
Path dst, short dstAncestorPermission, short dstParentPermission) {
super.set(src, srcAncestorPermission, srcParentPermission, NULL_MASK);
this.dst = dst;
this.dstAncestorPermission = dstAncestorPermission;
this.dstParentPermission = dstParentPermission;
}
@Override
void setOpPermission() {
opParentPermission = SEARCH_MASK | WRITE_MASK;
}
@Override
void call() throws IOException {
fs.rename(path, dst);
}
@Override
protected boolean expectPermissionDeny() {
return super.expectPermissionDeny()
|| (requiredParentPermission & dstParentPermission) !=
requiredParentPermission
|| (requiredAncestorPermission & dstAncestorPermission) !=
requiredAncestorPermission;
}
protected void logPermissions() {
super.logPermissions();
LOG.info("dst ancestor permission: "
+ Integer.toOctalString(dstAncestorPermission));
LOG.info("dst parent permission: "
+ Integer.toOctalString(dstParentPermission));
}
}
RenamePermissionVerifier renameVerifier = new RenamePermissionVerifier();
/* test if the permission checking of rename is correct */
private void testRename(UserGroupInformation ugi, Path src, Path dst,
short srcAncestorPermission, short srcParentPermission,
short dstAncestorPermission, short dstParentPermission) throws Exception {
renameVerifier.set(src, srcAncestorPermission, srcParentPermission, dst,
dstAncestorPermission, dstParentPermission);
renameVerifier.verifyPermission(ugi);
}
/* A class that verifies the permission checking is correct for delete */
private class DeletePermissionVerifier extends PermissionVerifier {
void set(Path path, short ancestorPermission, short parentPermission) {
super.set(path, ancestorPermission, parentPermission, NULL_MASK);
}
@Override
void setOpPermission() {
this.opParentPermission = SEARCH_MASK | WRITE_MASK;
}
@Override
void call() throws IOException {
fs.delete(path, true);
}
}
/* A class that verifies the permission checking is correct for
* directory deletion */
private class DeleteDirPermissionVerifier extends DeletePermissionVerifier {
private short[] childPermissions;
/* initialize */
void set(Path path, short ancestorPermission, short parentPermission,
short permission, short[] childPermissions) {
set(path, ancestorPermission, parentPermission, permission);
this.childPermissions = childPermissions;
}
@Override
void setOpPermission() {
this.opParentPermission = SEARCH_MASK | WRITE_MASK;
this.opPermission = SEARCH_MASK | WRITE_MASK | READ_MASK;
}
@Override
protected boolean expectPermissionDeny() {
if (super.expectPermissionDeny()) {
return true;
} else {
if (childPermissions != null) {
for (short childPermission : childPermissions) {
if ((requiredPermission & childPermission) != requiredPermission) {
return true;
}
}
}
return false;
}
}
}
DeletePermissionVerifier fileDeletionVerifier =
new DeletePermissionVerifier();
/* test if the permission checking of file deletion is correct */
private void testDeleteFile(UserGroupInformation ugi, Path file,
short ancestorPermission, short parentPermission) throws Exception {
fileDeletionVerifier.set(file, ancestorPermission, parentPermission);
fileDeletionVerifier.verifyPermission(ugi);
}
DeleteDirPermissionVerifier dirDeletionVerifier =
new DeleteDirPermissionVerifier();
/* test if the permission checking of directory deletion is correct */
private void testDeleteDir(UserGroupInformation ugi, Path path,
short ancestorPermission, short parentPermission, short permission,
short[] childPermissions) throws Exception {
dirDeletionVerifier.set(path, ancestorPermission, parentPermission,
permission, childPermissions);
dirDeletionVerifier.verifyPermission(ugi);
}
/* log into dfs as the given user */
private void login(UserGroupInformation ugi) throws IOException,
InterruptedException {
if (fs != null) {
fs.close();
}
fs = DFSTestUtil.getFileSystemAs(ugi, conf);
}
/* test non-existent file */
private void checkNonExistentFile() {
try {
assertFalse(fs.exists(NON_EXISTENT_FILE));
} catch (IOException e) {
checkNoPermissionDeny(e);
}
try {
fs.open(NON_EXISTENT_FILE);
} catch (IOException e) {
checkNoPermissionDeny(e);
}
try {
fs.setReplication(NON_EXISTENT_FILE, (short)4);
} catch (IOException e) {
checkNoPermissionDeny(e);
}
try {
fs.getFileStatus(NON_EXISTENT_FILE);
} catch (IOException e) {
checkNoPermissionDeny(e);
}
try {
fs.getContentSummary(NON_EXISTENT_FILE).getLength();
} catch (IOException e) {
checkNoPermissionDeny(e);
}
try {
fs.listStatus(NON_EXISTENT_FILE);
} catch (IOException e) {
checkNoPermissionDeny(e);
}
try {
fs.delete(NON_EXISTENT_FILE, true);
} catch (IOException e) {
checkNoPermissionDeny(e);
}
try {
fs.rename(NON_EXISTENT_FILE, new Path(NON_EXISTENT_FILE+".txt"));
} catch (IOException e) {
checkNoPermissionDeny(e);
}
}
private void checkNoPermissionDeny(IOException e) {
assertFalse(e instanceof AccessControlException);
}
}
|
googleapis/google-cloud-java | 36,759 | java-aiplatform/proto-google-cloud-aiplatform-v1/src/main/java/com/google/cloud/aiplatform/v1/ListEndpointsResponse.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/aiplatform/v1/endpoint_service.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.aiplatform.v1;
/**
*
*
* <pre>
* Response message for
* [EndpointService.ListEndpoints][google.cloud.aiplatform.v1.EndpointService.ListEndpoints].
* </pre>
*
* Protobuf type {@code google.cloud.aiplatform.v1.ListEndpointsResponse}
*/
public final class ListEndpointsResponse extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.aiplatform.v1.ListEndpointsResponse)
ListEndpointsResponseOrBuilder {
private static final long serialVersionUID = 0L;
// Use ListEndpointsResponse.newBuilder() to construct.
private ListEndpointsResponse(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private ListEndpointsResponse() {
endpoints_ = java.util.Collections.emptyList();
nextPageToken_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new ListEndpointsResponse();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.aiplatform.v1.EndpointServiceProto
.internal_static_google_cloud_aiplatform_v1_ListEndpointsResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.aiplatform.v1.EndpointServiceProto
.internal_static_google_cloud_aiplatform_v1_ListEndpointsResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.aiplatform.v1.ListEndpointsResponse.class,
com.google.cloud.aiplatform.v1.ListEndpointsResponse.Builder.class);
}
public static final int ENDPOINTS_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private java.util.List<com.google.cloud.aiplatform.v1.Endpoint> endpoints_;
/**
*
*
* <pre>
* List of Endpoints in the requested page.
* </pre>
*
* <code>repeated .google.cloud.aiplatform.v1.Endpoint endpoints = 1;</code>
*/
@java.lang.Override
public java.util.List<com.google.cloud.aiplatform.v1.Endpoint> getEndpointsList() {
return endpoints_;
}
/**
*
*
* <pre>
* List of Endpoints in the requested page.
* </pre>
*
* <code>repeated .google.cloud.aiplatform.v1.Endpoint endpoints = 1;</code>
*/
@java.lang.Override
public java.util.List<? extends com.google.cloud.aiplatform.v1.EndpointOrBuilder>
getEndpointsOrBuilderList() {
return endpoints_;
}
/**
*
*
* <pre>
* List of Endpoints in the requested page.
* </pre>
*
* <code>repeated .google.cloud.aiplatform.v1.Endpoint endpoints = 1;</code>
*/
@java.lang.Override
public int getEndpointsCount() {
return endpoints_.size();
}
/**
*
*
* <pre>
* List of Endpoints in the requested page.
* </pre>
*
* <code>repeated .google.cloud.aiplatform.v1.Endpoint endpoints = 1;</code>
*/
@java.lang.Override
public com.google.cloud.aiplatform.v1.Endpoint getEndpoints(int index) {
return endpoints_.get(index);
}
/**
*
*
* <pre>
* List of Endpoints in the requested page.
* </pre>
*
* <code>repeated .google.cloud.aiplatform.v1.Endpoint endpoints = 1;</code>
*/
@java.lang.Override
public com.google.cloud.aiplatform.v1.EndpointOrBuilder getEndpointsOrBuilder(int index) {
return endpoints_.get(index);
}
public static final int NEXT_PAGE_TOKEN_FIELD_NUMBER = 2;
@SuppressWarnings("serial")
private volatile java.lang.Object nextPageToken_ = "";
/**
*
*
* <pre>
* A token to retrieve the next page of results.
* Pass to
* [ListEndpointsRequest.page_token][google.cloud.aiplatform.v1.ListEndpointsRequest.page_token]
* to obtain that page.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The nextPageToken.
*/
@java.lang.Override
public java.lang.String getNextPageToken() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
nextPageToken_ = s;
return s;
}
}
/**
*
*
* <pre>
* A token to retrieve the next page of results.
* Pass to
* [ListEndpointsRequest.page_token][google.cloud.aiplatform.v1.ListEndpointsRequest.page_token]
* to obtain that page.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The bytes for nextPageToken.
*/
@java.lang.Override
public com.google.protobuf.ByteString getNextPageTokenBytes() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
nextPageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
for (int i = 0; i < endpoints_.size(); i++) {
output.writeMessage(1, endpoints_.get(i));
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 2, nextPageToken_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
for (int i = 0; i < endpoints_.size(); i++) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, endpoints_.get(i));
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, nextPageToken_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.aiplatform.v1.ListEndpointsResponse)) {
return super.equals(obj);
}
com.google.cloud.aiplatform.v1.ListEndpointsResponse other =
(com.google.cloud.aiplatform.v1.ListEndpointsResponse) obj;
if (!getEndpointsList().equals(other.getEndpointsList())) return false;
if (!getNextPageToken().equals(other.getNextPageToken())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (getEndpointsCount() > 0) {
hash = (37 * hash) + ENDPOINTS_FIELD_NUMBER;
hash = (53 * hash) + getEndpointsList().hashCode();
}
hash = (37 * hash) + NEXT_PAGE_TOKEN_FIELD_NUMBER;
hash = (53 * hash) + getNextPageToken().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.aiplatform.v1.ListEndpointsResponse parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.aiplatform.v1.ListEndpointsResponse parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.aiplatform.v1.ListEndpointsResponse parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.aiplatform.v1.ListEndpointsResponse parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.aiplatform.v1.ListEndpointsResponse parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.aiplatform.v1.ListEndpointsResponse parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.aiplatform.v1.ListEndpointsResponse parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.aiplatform.v1.ListEndpointsResponse parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.aiplatform.v1.ListEndpointsResponse parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.aiplatform.v1.ListEndpointsResponse parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.aiplatform.v1.ListEndpointsResponse parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.aiplatform.v1.ListEndpointsResponse parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.cloud.aiplatform.v1.ListEndpointsResponse prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Response message for
* [EndpointService.ListEndpoints][google.cloud.aiplatform.v1.EndpointService.ListEndpoints].
* </pre>
*
* Protobuf type {@code google.cloud.aiplatform.v1.ListEndpointsResponse}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.aiplatform.v1.ListEndpointsResponse)
com.google.cloud.aiplatform.v1.ListEndpointsResponseOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.aiplatform.v1.EndpointServiceProto
.internal_static_google_cloud_aiplatform_v1_ListEndpointsResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.aiplatform.v1.EndpointServiceProto
.internal_static_google_cloud_aiplatform_v1_ListEndpointsResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.aiplatform.v1.ListEndpointsResponse.class,
com.google.cloud.aiplatform.v1.ListEndpointsResponse.Builder.class);
}
// Construct using com.google.cloud.aiplatform.v1.ListEndpointsResponse.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
if (endpointsBuilder_ == null) {
endpoints_ = java.util.Collections.emptyList();
} else {
endpoints_ = null;
endpointsBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000001);
nextPageToken_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.aiplatform.v1.EndpointServiceProto
.internal_static_google_cloud_aiplatform_v1_ListEndpointsResponse_descriptor;
}
@java.lang.Override
public com.google.cloud.aiplatform.v1.ListEndpointsResponse getDefaultInstanceForType() {
return com.google.cloud.aiplatform.v1.ListEndpointsResponse.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.aiplatform.v1.ListEndpointsResponse build() {
com.google.cloud.aiplatform.v1.ListEndpointsResponse result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.aiplatform.v1.ListEndpointsResponse buildPartial() {
com.google.cloud.aiplatform.v1.ListEndpointsResponse result =
new com.google.cloud.aiplatform.v1.ListEndpointsResponse(this);
buildPartialRepeatedFields(result);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartialRepeatedFields(
com.google.cloud.aiplatform.v1.ListEndpointsResponse result) {
if (endpointsBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0)) {
endpoints_ = java.util.Collections.unmodifiableList(endpoints_);
bitField0_ = (bitField0_ & ~0x00000001);
}
result.endpoints_ = endpoints_;
} else {
result.endpoints_ = endpointsBuilder_.build();
}
}
private void buildPartial0(com.google.cloud.aiplatform.v1.ListEndpointsResponse result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000002) != 0)) {
result.nextPageToken_ = nextPageToken_;
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.aiplatform.v1.ListEndpointsResponse) {
return mergeFrom((com.google.cloud.aiplatform.v1.ListEndpointsResponse) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.aiplatform.v1.ListEndpointsResponse other) {
if (other == com.google.cloud.aiplatform.v1.ListEndpointsResponse.getDefaultInstance())
return this;
if (endpointsBuilder_ == null) {
if (!other.endpoints_.isEmpty()) {
if (endpoints_.isEmpty()) {
endpoints_ = other.endpoints_;
bitField0_ = (bitField0_ & ~0x00000001);
} else {
ensureEndpointsIsMutable();
endpoints_.addAll(other.endpoints_);
}
onChanged();
}
} else {
if (!other.endpoints_.isEmpty()) {
if (endpointsBuilder_.isEmpty()) {
endpointsBuilder_.dispose();
endpointsBuilder_ = null;
endpoints_ = other.endpoints_;
bitField0_ = (bitField0_ & ~0x00000001);
endpointsBuilder_ =
com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders
? getEndpointsFieldBuilder()
: null;
} else {
endpointsBuilder_.addAllMessages(other.endpoints_);
}
}
}
if (!other.getNextPageToken().isEmpty()) {
nextPageToken_ = other.nextPageToken_;
bitField0_ |= 0x00000002;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
com.google.cloud.aiplatform.v1.Endpoint m =
input.readMessage(
com.google.cloud.aiplatform.v1.Endpoint.parser(), extensionRegistry);
if (endpointsBuilder_ == null) {
ensureEndpointsIsMutable();
endpoints_.add(m);
} else {
endpointsBuilder_.addMessage(m);
}
break;
} // case 10
case 18:
{
nextPageToken_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000002;
break;
} // case 18
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.util.List<com.google.cloud.aiplatform.v1.Endpoint> endpoints_ =
java.util.Collections.emptyList();
private void ensureEndpointsIsMutable() {
if (!((bitField0_ & 0x00000001) != 0)) {
endpoints_ = new java.util.ArrayList<com.google.cloud.aiplatform.v1.Endpoint>(endpoints_);
bitField0_ |= 0x00000001;
}
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.aiplatform.v1.Endpoint,
com.google.cloud.aiplatform.v1.Endpoint.Builder,
com.google.cloud.aiplatform.v1.EndpointOrBuilder>
endpointsBuilder_;
/**
*
*
* <pre>
* List of Endpoints in the requested page.
* </pre>
*
* <code>repeated .google.cloud.aiplatform.v1.Endpoint endpoints = 1;</code>
*/
public java.util.List<com.google.cloud.aiplatform.v1.Endpoint> getEndpointsList() {
if (endpointsBuilder_ == null) {
return java.util.Collections.unmodifiableList(endpoints_);
} else {
return endpointsBuilder_.getMessageList();
}
}
/**
*
*
* <pre>
* List of Endpoints in the requested page.
* </pre>
*
* <code>repeated .google.cloud.aiplatform.v1.Endpoint endpoints = 1;</code>
*/
public int getEndpointsCount() {
if (endpointsBuilder_ == null) {
return endpoints_.size();
} else {
return endpointsBuilder_.getCount();
}
}
/**
*
*
* <pre>
* List of Endpoints in the requested page.
* </pre>
*
* <code>repeated .google.cloud.aiplatform.v1.Endpoint endpoints = 1;</code>
*/
public com.google.cloud.aiplatform.v1.Endpoint getEndpoints(int index) {
if (endpointsBuilder_ == null) {
return endpoints_.get(index);
} else {
return endpointsBuilder_.getMessage(index);
}
}
/**
*
*
* <pre>
* List of Endpoints in the requested page.
* </pre>
*
* <code>repeated .google.cloud.aiplatform.v1.Endpoint endpoints = 1;</code>
*/
public Builder setEndpoints(int index, com.google.cloud.aiplatform.v1.Endpoint value) {
if (endpointsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureEndpointsIsMutable();
endpoints_.set(index, value);
onChanged();
} else {
endpointsBuilder_.setMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* List of Endpoints in the requested page.
* </pre>
*
* <code>repeated .google.cloud.aiplatform.v1.Endpoint endpoints = 1;</code>
*/
public Builder setEndpoints(
int index, com.google.cloud.aiplatform.v1.Endpoint.Builder builderForValue) {
if (endpointsBuilder_ == null) {
ensureEndpointsIsMutable();
endpoints_.set(index, builderForValue.build());
onChanged();
} else {
endpointsBuilder_.setMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* List of Endpoints in the requested page.
* </pre>
*
* <code>repeated .google.cloud.aiplatform.v1.Endpoint endpoints = 1;</code>
*/
public Builder addEndpoints(com.google.cloud.aiplatform.v1.Endpoint value) {
if (endpointsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureEndpointsIsMutable();
endpoints_.add(value);
onChanged();
} else {
endpointsBuilder_.addMessage(value);
}
return this;
}
/**
*
*
* <pre>
* List of Endpoints in the requested page.
* </pre>
*
* <code>repeated .google.cloud.aiplatform.v1.Endpoint endpoints = 1;</code>
*/
public Builder addEndpoints(int index, com.google.cloud.aiplatform.v1.Endpoint value) {
if (endpointsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureEndpointsIsMutable();
endpoints_.add(index, value);
onChanged();
} else {
endpointsBuilder_.addMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* List of Endpoints in the requested page.
* </pre>
*
* <code>repeated .google.cloud.aiplatform.v1.Endpoint endpoints = 1;</code>
*/
public Builder addEndpoints(com.google.cloud.aiplatform.v1.Endpoint.Builder builderForValue) {
if (endpointsBuilder_ == null) {
ensureEndpointsIsMutable();
endpoints_.add(builderForValue.build());
onChanged();
} else {
endpointsBuilder_.addMessage(builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* List of Endpoints in the requested page.
* </pre>
*
* <code>repeated .google.cloud.aiplatform.v1.Endpoint endpoints = 1;</code>
*/
public Builder addEndpoints(
int index, com.google.cloud.aiplatform.v1.Endpoint.Builder builderForValue) {
if (endpointsBuilder_ == null) {
ensureEndpointsIsMutable();
endpoints_.add(index, builderForValue.build());
onChanged();
} else {
endpointsBuilder_.addMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* List of Endpoints in the requested page.
* </pre>
*
* <code>repeated .google.cloud.aiplatform.v1.Endpoint endpoints = 1;</code>
*/
public Builder addAllEndpoints(
java.lang.Iterable<? extends com.google.cloud.aiplatform.v1.Endpoint> values) {
if (endpointsBuilder_ == null) {
ensureEndpointsIsMutable();
com.google.protobuf.AbstractMessageLite.Builder.addAll(values, endpoints_);
onChanged();
} else {
endpointsBuilder_.addAllMessages(values);
}
return this;
}
/**
*
*
* <pre>
* List of Endpoints in the requested page.
* </pre>
*
* <code>repeated .google.cloud.aiplatform.v1.Endpoint endpoints = 1;</code>
*/
public Builder clearEndpoints() {
if (endpointsBuilder_ == null) {
endpoints_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
} else {
endpointsBuilder_.clear();
}
return this;
}
/**
*
*
* <pre>
* List of Endpoints in the requested page.
* </pre>
*
* <code>repeated .google.cloud.aiplatform.v1.Endpoint endpoints = 1;</code>
*/
public Builder removeEndpoints(int index) {
if (endpointsBuilder_ == null) {
ensureEndpointsIsMutable();
endpoints_.remove(index);
onChanged();
} else {
endpointsBuilder_.remove(index);
}
return this;
}
/**
*
*
* <pre>
* List of Endpoints in the requested page.
* </pre>
*
* <code>repeated .google.cloud.aiplatform.v1.Endpoint endpoints = 1;</code>
*/
public com.google.cloud.aiplatform.v1.Endpoint.Builder getEndpointsBuilder(int index) {
return getEndpointsFieldBuilder().getBuilder(index);
}
/**
*
*
* <pre>
* List of Endpoints in the requested page.
* </pre>
*
* <code>repeated .google.cloud.aiplatform.v1.Endpoint endpoints = 1;</code>
*/
public com.google.cloud.aiplatform.v1.EndpointOrBuilder getEndpointsOrBuilder(int index) {
if (endpointsBuilder_ == null) {
return endpoints_.get(index);
} else {
return endpointsBuilder_.getMessageOrBuilder(index);
}
}
/**
*
*
* <pre>
* List of Endpoints in the requested page.
* </pre>
*
* <code>repeated .google.cloud.aiplatform.v1.Endpoint endpoints = 1;</code>
*/
public java.util.List<? extends com.google.cloud.aiplatform.v1.EndpointOrBuilder>
getEndpointsOrBuilderList() {
if (endpointsBuilder_ != null) {
return endpointsBuilder_.getMessageOrBuilderList();
} else {
return java.util.Collections.unmodifiableList(endpoints_);
}
}
/**
*
*
* <pre>
* List of Endpoints in the requested page.
* </pre>
*
* <code>repeated .google.cloud.aiplatform.v1.Endpoint endpoints = 1;</code>
*/
public com.google.cloud.aiplatform.v1.Endpoint.Builder addEndpointsBuilder() {
return getEndpointsFieldBuilder()
.addBuilder(com.google.cloud.aiplatform.v1.Endpoint.getDefaultInstance());
}
/**
*
*
* <pre>
* List of Endpoints in the requested page.
* </pre>
*
* <code>repeated .google.cloud.aiplatform.v1.Endpoint endpoints = 1;</code>
*/
public com.google.cloud.aiplatform.v1.Endpoint.Builder addEndpointsBuilder(int index) {
return getEndpointsFieldBuilder()
.addBuilder(index, com.google.cloud.aiplatform.v1.Endpoint.getDefaultInstance());
}
/**
*
*
* <pre>
* List of Endpoints in the requested page.
* </pre>
*
* <code>repeated .google.cloud.aiplatform.v1.Endpoint endpoints = 1;</code>
*/
public java.util.List<com.google.cloud.aiplatform.v1.Endpoint.Builder>
getEndpointsBuilderList() {
return getEndpointsFieldBuilder().getBuilderList();
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.aiplatform.v1.Endpoint,
com.google.cloud.aiplatform.v1.Endpoint.Builder,
com.google.cloud.aiplatform.v1.EndpointOrBuilder>
getEndpointsFieldBuilder() {
if (endpointsBuilder_ == null) {
endpointsBuilder_ =
new com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.aiplatform.v1.Endpoint,
com.google.cloud.aiplatform.v1.Endpoint.Builder,
com.google.cloud.aiplatform.v1.EndpointOrBuilder>(
endpoints_, ((bitField0_ & 0x00000001) != 0), getParentForChildren(), isClean());
endpoints_ = null;
}
return endpointsBuilder_;
}
private java.lang.Object nextPageToken_ = "";
/**
*
*
* <pre>
* A token to retrieve the next page of results.
* Pass to
* [ListEndpointsRequest.page_token][google.cloud.aiplatform.v1.ListEndpointsRequest.page_token]
* to obtain that page.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The nextPageToken.
*/
public java.lang.String getNextPageToken() {
java.lang.Object ref = nextPageToken_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
nextPageToken_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* A token to retrieve the next page of results.
* Pass to
* [ListEndpointsRequest.page_token][google.cloud.aiplatform.v1.ListEndpointsRequest.page_token]
* to obtain that page.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The bytes for nextPageToken.
*/
public com.google.protobuf.ByteString getNextPageTokenBytes() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
nextPageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* A token to retrieve the next page of results.
* Pass to
* [ListEndpointsRequest.page_token][google.cloud.aiplatform.v1.ListEndpointsRequest.page_token]
* to obtain that page.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @param value The nextPageToken to set.
* @return This builder for chaining.
*/
public Builder setNextPageToken(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
nextPageToken_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* A token to retrieve the next page of results.
* Pass to
* [ListEndpointsRequest.page_token][google.cloud.aiplatform.v1.ListEndpointsRequest.page_token]
* to obtain that page.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return This builder for chaining.
*/
public Builder clearNextPageToken() {
nextPageToken_ = getDefaultInstance().getNextPageToken();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
return this;
}
/**
*
*
* <pre>
* A token to retrieve the next page of results.
* Pass to
* [ListEndpointsRequest.page_token][google.cloud.aiplatform.v1.ListEndpointsRequest.page_token]
* to obtain that page.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @param value The bytes for nextPageToken to set.
* @return This builder for chaining.
*/
public Builder setNextPageTokenBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
nextPageToken_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.aiplatform.v1.ListEndpointsResponse)
}
// @@protoc_insertion_point(class_scope:google.cloud.aiplatform.v1.ListEndpointsResponse)
private static final com.google.cloud.aiplatform.v1.ListEndpointsResponse DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.aiplatform.v1.ListEndpointsResponse();
}
public static com.google.cloud.aiplatform.v1.ListEndpointsResponse getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<ListEndpointsResponse> PARSER =
new com.google.protobuf.AbstractParser<ListEndpointsResponse>() {
@java.lang.Override
public ListEndpointsResponse parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<ListEndpointsResponse> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<ListEndpointsResponse> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.aiplatform.v1.ListEndpointsResponse getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
oracle/graalpython | 37,064 | graalpython/com.oracle.graal.python/src/com/oracle/graal/python/builtins/objects/set/SetBuiltins.java | /*
* Copyright (c) 2017, 2025, Oracle and/or its affiliates.
* Copyright (c) 2014, Regents of the University of California
*
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without modification, are
* permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice, this list of
* conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright notice, this list of
* conditions and the following disclaimer in the documentation and/or other materials provided
* with the distribution.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS
* OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
* COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
* EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE
* GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED
* AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
* NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED
* OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package com.oracle.graal.python.builtins.objects.set;
import static com.oracle.graal.python.nodes.BuiltinNames.J_ADD;
import static com.oracle.graal.python.nodes.BuiltinNames.J_SET;
import static com.oracle.graal.python.runtime.exception.PythonErrorType.TypeError;
import java.util.List;
import com.oracle.graal.python.PythonLanguage;
import com.oracle.graal.python.annotations.Builtin;
import com.oracle.graal.python.annotations.HashNotImplemented;
import com.oracle.graal.python.annotations.Slot;
import com.oracle.graal.python.annotations.Slot.SlotKind;
import com.oracle.graal.python.annotations.Slot.SlotSignature;
import com.oracle.graal.python.builtins.CoreFunctions;
import com.oracle.graal.python.builtins.Python3Core;
import com.oracle.graal.python.builtins.PythonBuiltinClassType;
import com.oracle.graal.python.builtins.PythonBuiltins;
import com.oracle.graal.python.builtins.objects.PNone;
import com.oracle.graal.python.builtins.objects.PNotImplemented;
import com.oracle.graal.python.builtins.objects.common.HashingCollectionNodes;
import com.oracle.graal.python.builtins.objects.common.HashingCollectionNodes.GetSetStorageNode;
import com.oracle.graal.python.builtins.objects.common.HashingStorage;
import com.oracle.graal.python.builtins.objects.common.HashingStorageNodes.HashingStorageAddAllToOther;
import com.oracle.graal.python.builtins.objects.common.HashingStorageNodes.HashingStorageClear;
import com.oracle.graal.python.builtins.objects.common.HashingStorageNodes.HashingStorageCopy;
import com.oracle.graal.python.builtins.objects.common.HashingStorageNodes.HashingStorageDiff;
import com.oracle.graal.python.builtins.objects.common.HashingStorageNodes.HashingStorageIntersect;
import com.oracle.graal.python.builtins.objects.common.HashingStorageNodes.HashingStorageLen;
import com.oracle.graal.python.builtins.objects.common.HashingStorageNodes.HashingStoragePop;
import com.oracle.graal.python.builtins.objects.common.HashingStorageNodes.HashingStorageSetItem;
import com.oracle.graal.python.builtins.objects.common.HashingStorageNodes.HashingStorageXor;
import com.oracle.graal.python.builtins.objects.common.PHashingCollection;
import com.oracle.graal.python.builtins.objects.common.SequenceNodes;
import com.oracle.graal.python.builtins.objects.common.SequenceStorageNodes;
import com.oracle.graal.python.builtins.objects.dict.PDictView;
import com.oracle.graal.python.builtins.objects.type.TpSlots;
import com.oracle.graal.python.builtins.objects.type.TypeNodes;
import com.oracle.graal.python.lib.IteratorExhausted;
import com.oracle.graal.python.lib.PyIterNextNode;
import com.oracle.graal.python.lib.PyObjectGetIter;
import com.oracle.graal.python.nodes.ErrorMessages;
import com.oracle.graal.python.nodes.PGuards;
import com.oracle.graal.python.nodes.PRaiseNode;
import com.oracle.graal.python.nodes.SpecialMethodNames;
import com.oracle.graal.python.nodes.function.PythonBuiltinBaseNode;
import com.oracle.graal.python.nodes.function.PythonBuiltinNode;
import com.oracle.graal.python.nodes.function.builtins.PythonBinaryBuiltinNode;
import com.oracle.graal.python.nodes.function.builtins.PythonUnaryBuiltinNode;
import com.oracle.graal.python.nodes.object.GetClassNode.GetPythonObjectClassNode;
import com.oracle.graal.python.runtime.PythonOptions;
import com.oracle.graal.python.runtime.exception.PythonErrorType;
import com.oracle.graal.python.runtime.object.PFactory;
import com.oracle.graal.python.runtime.sequence.PSequence;
import com.oracle.graal.python.runtime.sequence.storage.SequenceStorage;
import com.oracle.truffle.api.dsl.Bind;
import com.oracle.truffle.api.dsl.Cached;
import com.oracle.truffle.api.dsl.Cached.Exclusive;
import com.oracle.truffle.api.dsl.Cached.Shared;
import com.oracle.truffle.api.dsl.Fallback;
import com.oracle.truffle.api.dsl.GenerateInline;
import com.oracle.truffle.api.dsl.GenerateNodeFactory;
import com.oracle.truffle.api.dsl.GenerateUncached;
import com.oracle.truffle.api.dsl.Idempotent;
import com.oracle.truffle.api.dsl.ImportStatic;
import com.oracle.truffle.api.dsl.NeverDefault;
import com.oracle.truffle.api.dsl.NodeFactory;
import com.oracle.truffle.api.dsl.Specialization;
import com.oracle.truffle.api.frame.VirtualFrame;
import com.oracle.truffle.api.nodes.Node;
import com.oracle.truffle.api.profiles.InlinedConditionProfile;
/**
* binary operations are implemented in {@link BaseSetBuiltins}
*/
@CoreFunctions(extendClasses = PythonBuiltinClassType.PSet)
@HashNotImplemented
public final class SetBuiltins extends PythonBuiltins {
public static final TpSlots SLOTS = SetBuiltinsSlotsGen.SLOTS;
@Override
public void initialize(Python3Core core) {
super.initialize(core);
}
@Override
protected List<? extends NodeFactory<? extends PythonBuiltinBaseNode>> getNodeFactories() {
return SetBuiltinsFactory.getFactories();
}
// set([iterable])
@Slot(value = SlotKind.tp_new, isComplex = true)
@SlotSignature(name = J_SET, minNumOfPositionalArgs = 1, takesVarArgs = true, takesVarKeywordArgs = true)
@GenerateNodeFactory
public abstract static class SetNode extends PythonBuiltinNode {
@Specialization(guards = "isBuiltinSet(cls)")
public PSet setEmpty(@SuppressWarnings("unused") Object cls, @SuppressWarnings("unused") Object arg,
@Bind PythonLanguage language) {
return PFactory.createSet(language);
}
@Fallback
public PSet setEmpty(Object cls, @SuppressWarnings("unused") Object arg,
@Bind PythonLanguage language,
@Cached TypeNodes.GetInstanceShape getInstanceShape) {
return PFactory.createSet(language, cls, getInstanceShape.execute(cls));
}
protected static boolean isBuiltinSet(Object cls) {
return cls == PythonBuiltinClassType.PSet;
}
}
@Slot(value = SlotKind.tp_init, isComplex = true)
@SlotSignature(name = "set", minNumOfPositionalArgs = 1, maxNumOfPositionalArgs = 2)
@GenerateNodeFactory
@ImportStatic(PGuards.class)
public abstract static class InitNode extends PythonBuiltinNode {
@Specialization(guards = "isNoValue(iterable)")
static PNone doNoValue(PSet self, @SuppressWarnings("unused") PNone iterable,
@Bind Node inliningTarget,
@Cached HashingStorageClear clearNode) {
HashingStorage newStorage = clearNode.execute(inliningTarget, self.getDictStorage());
self.setDictStorage(newStorage);
return PNone.NONE;
}
@Specialization(guards = {"!isNoValue(iterable)"})
static PNone doGeneric(VirtualFrame frame, PSet self, Object iterable,
@Bind Node inliningTarget,
@Cached HashingCollectionNodes.GetClonedHashingStorageNode getHashingStorageNode) {
HashingStorage storage = getHashingStorageNode.getForSets(frame, inliningTarget, iterable);
self.setDictStorage(storage);
return PNone.NONE;
}
@Fallback
static PNone fail(@SuppressWarnings("unused") VirtualFrame frame, @SuppressWarnings("unused") Object self, Object iterable,
@Bind Node inliningTarget) {
throw PRaiseNode.raiseStatic(inliningTarget, TypeError, ErrorMessages.SET_DOES_NOT_SUPPORT_ITERABLE_OBJ, iterable);
}
}
@Builtin(name = "copy", minNumOfPositionalArgs = 1)
@GenerateNodeFactory
public abstract static class CopyNode extends PythonBuiltinNode {
@Specialization
static PSet doSet(PSet self,
@Bind Node inliningTarget,
@Cached HashingStorageCopy copyNode,
@Bind PythonLanguage language) {
return PFactory.createSet(language, copyNode.execute(inliningTarget, self.getDictStorage()));
}
}
@Builtin(name = "clear", minNumOfPositionalArgs = 1)
@GenerateNodeFactory
public abstract static class ClearNode extends PythonUnaryBuiltinNode {
@Specialization
public static Object clear(PSet self,
@Bind Node inliningTarget,
@Cached HashingStorageClear clearNode) {
HashingStorage newStorage = clearNode.execute(inliningTarget, self.getDictStorage());
self.setDictStorage(newStorage);
return PNone.NONE;
}
}
@Builtin(name = J_ADD, minNumOfPositionalArgs = 2)
@GenerateNodeFactory
public abstract static class AddNode extends PythonBinaryBuiltinNode {
@Specialization
public static Object add(VirtualFrame frame, PSet self, Object o,
@Cached SetNodes.AddNode addNode) {
addNode.execute(frame, self, o);
return PNone.NONE;
}
}
@Slot(value = SlotKind.nb_inplace_or, isComplex = true)
@GenerateNodeFactory
public abstract static class IOrNode extends PythonBinaryBuiltinNode {
@Specialization
Object doSet(VirtualFrame frame, PSet self, PBaseSet other,
@Bind Node inliningTarget,
@Cached HashingStorageAddAllToOther addAllToOther) {
addAllToOther.execute(frame, inliningTarget, other.getDictStorage(), self);
return self;
}
@SuppressWarnings("unused")
@Fallback
Object doOr(Object self, Object other) {
return PNotImplemented.NOT_IMPLEMENTED;
}
}
@Builtin(name = "union", minNumOfPositionalArgs = 1, takesVarArgs = true)
@GenerateNodeFactory
abstract static class UnionNode extends PythonBuiltinNode {
@Specialization(guards = {"args.length == len", "args.length < 32"}, limit = "3")
static PBaseSet doCached(VirtualFrame frame, PSet self, Object[] args,
@Bind Node inliningTarget,
@Cached("args.length") int len,
@Shared @Cached HashingCollectionNodes.GetSetStorageNode getSetStorageNode,
@Shared @Cached HashingStorageCopy copyNode,
@Shared @Cached HashingStorageAddAllToOther addAllToOther,
@Bind PythonLanguage language) {
HashingStorage result = copyNode.execute(inliningTarget, self.getDictStorage());
for (int i = 0; i < len; i++) {
result = addAllToOther.execute(frame, inliningTarget, getSetStorageNode.execute(frame, inliningTarget, args[i]), result);
}
return PFactory.createSet(language, result);
}
@Specialization(replaces = "doCached")
static PBaseSet doGeneric(VirtualFrame frame, PSet self, Object[] args,
@Bind Node inliningTarget,
@Shared @Cached HashingCollectionNodes.GetSetStorageNode getSetStorageNode,
@Shared @Cached HashingStorageCopy copyNode,
@Shared @Cached HashingStorageAddAllToOther addAllToOther,
@Bind PythonLanguage language) {
HashingStorage result = copyNode.execute(inliningTarget, self.getDictStorage());
for (int i = 0; i < args.length; i++) {
result = addAllToOther.execute(frame, inliningTarget, getSetStorageNode.execute(frame, inliningTarget, args[i]), result);
}
return PFactory.createSet(language, result);
}
}
@ImportStatic({PGuards.class, PythonOptions.class})
@GenerateUncached
@GenerateInline(false) // footprint reduction 108 -> 90
public abstract static class UpdateSingleNode extends Node {
public abstract void execute(VirtualFrame frame, PHashingCollection collection, Object other);
@Specialization
static void update(VirtualFrame frame, PHashingCollection collection, PHashingCollection other,
@Bind Node inliningTarget,
@Shared("addAll") @Cached HashingStorageAddAllToOther addAllToOther) {
HashingStorage dictStorage = other.getDictStorage();
addAllToOther.execute(frame, inliningTarget, dictStorage, collection);
}
@Specialization
static void update(VirtualFrame frame, PHashingCollection collection, PDictView.PDictKeysView other,
@Bind Node inliningTarget,
@Shared("addAll") @Cached HashingStorageAddAllToOther addAllToOther) {
HashingStorage dictStorage = other.getWrappedStorage();
addAllToOther.execute(frame, inliningTarget, dictStorage, collection);
}
@Idempotent
static boolean isBuiltinSequence(Node inliningTarget, Object other, GetPythonObjectClassNode getClassNode) {
return other instanceof PSequence && getClassNode.execute(inliningTarget, (PSequence) other) instanceof PythonBuiltinClassType;
}
@Specialization(guards = "isBuiltinSequence(inliningTarget, other, getClassNode)", limit = "1")
static void doBuiltin(VirtualFrame frame, PHashingCollection collection, @SuppressWarnings("unused") PSequence other,
@Bind Node inliningTarget,
@SuppressWarnings("unused") @Exclusive @Cached GetPythonObjectClassNode getClassNode,
@Cached SequenceNodes.GetSequenceStorageNode getSequenceStorageNode,
@Cached SequenceStorageNodes.GetItemScalarNode getItemScalarNode,
@Exclusive @Cached HashingStorageSetItem setStorageItem) {
SequenceStorage sequenceStorage = getSequenceStorageNode.execute(inliningTarget, other);
int length = sequenceStorage.length();
HashingStorage curStorage = collection.getDictStorage();
for (int i = 0; i < length; i++) {
Object key = getItemScalarNode.execute(inliningTarget, sequenceStorage, i);
curStorage = setStorageItem.execute(frame, inliningTarget, curStorage, key, PNone.NONE);
}
collection.setDictStorage(curStorage);
}
@Specialization(guards = {"!isPHashingCollection(other)", "!isDictKeysView(other)", "!isBuiltinSequence(inliningTarget, other, getClassNode)"}, limit = "1")
static void doIterable(VirtualFrame frame, PHashingCollection collection, Object other,
@Bind Node inliningTarget,
@SuppressWarnings("unused") @Exclusive @Cached GetPythonObjectClassNode getClassNode,
@Cached PyObjectGetIter getIter,
@Cached PyIterNextNode nextNode,
@Exclusive @Cached HashingStorageSetItem setStorageItem) {
HashingStorage curStorage = collection.getDictStorage();
Object iterator = getIter.execute(frame, inliningTarget, other);
while (true) {
Object key;
try {
key = nextNode.execute(frame, inliningTarget, iterator);
} catch (IteratorExhausted e) {
collection.setDictStorage(curStorage);
return;
}
curStorage = setStorageItem.execute(frame, inliningTarget, curStorage, key, PNone.NONE);
}
}
@NeverDefault
public static UpdateSingleNode create() {
return SetBuiltinsFactory.UpdateSingleNodeGen.create();
}
public static UpdateSingleNode getUncached() {
return SetBuiltinsFactory.UpdateSingleNodeGen.getUncached();
}
}
@Builtin(name = "update", minNumOfPositionalArgs = 1, takesVarArgs = true)
@GenerateNodeFactory
public abstract static class UpdateNode extends PythonBuiltinNode {
@Specialization(guards = "isNoValue(other)")
@SuppressWarnings("unused")
static PNone doSet(VirtualFrame frame, PSet self, PNone other) {
return PNone.NONE;
}
@Specialization(guards = "args.length == 1")
static PNone doCached(VirtualFrame frame, PSet self, Object[] args,
@Cached UpdateSingleNode update) {
update.execute(frame, self, args[0]);
return PNone.NONE;
}
@Specialization(guards = {"args.length == len", "args.length < 32"}, limit = "3")
static PNone doCached(VirtualFrame frame, PSet self, Object[] args,
@Bind Node inliningTarget,
@Cached("args.length") int len,
@Shared @Cached GetSetStorageNode getSetStorageNode,
@Shared @Cached HashingStorageAddAllToOther addAllToOther) {
HashingStorage storage = self.getDictStorage();
for (int i = 0; i < len; i++) {
storage = addAllToOther.execute(frame, inliningTarget, getSetStorageNode.execute(frame, inliningTarget, args[i]), storage);
}
self.setDictStorage(storage);
return PNone.NONE;
}
@Specialization(replaces = "doCached")
static PNone doSet(VirtualFrame frame, PSet self, Object[] args,
@Bind Node inliningTarget,
@Shared @Cached HashingCollectionNodes.GetSetStorageNode getSetStorageNode,
@Shared @Cached HashingStorageAddAllToOther addAllToOther) {
HashingStorage storage = self.getDictStorage();
for (Object o : args) {
storage = addAllToOther.execute(frame, inliningTarget, getSetStorageNode.execute(frame, inliningTarget, o), storage);
}
self.setDictStorage(storage);
return PNone.NONE;
}
static boolean isOther(Object arg) {
return !(PGuards.isNoValue(arg) || arg instanceof Object[]);
}
@Specialization(guards = "isOther(other)")
static PNone doSet(VirtualFrame frame, PSet self, Object other,
@Bind Node inliningTarget,
@Shared @Cached HashingCollectionNodes.GetSetStorageNode getSetStorageNode,
@Shared @Cached HashingStorageAddAllToOther addAllToOther) {
addAllToOther.execute(frame, inliningTarget, getSetStorageNode.execute(frame, inliningTarget, other), self);
return PNone.NONE;
}
}
@Slot(value = SlotKind.nb_inplace_and, isComplex = true)
@GenerateNodeFactory
public abstract static class IAndNode extends PythonBinaryBuiltinNode {
@Specialization
static PBaseSet doPBaseSet(VirtualFrame frame, PSet left, PBaseSet right,
@Bind Node inliningTarget,
@Cached HashingStorageLen lenNode,
@Cached InlinedConditionProfile swapProfile,
@Cached HashingStorageIntersect intersectNode) {
HashingStorage storage1 = left.getDictStorage();
HashingStorage storage2 = right.getDictStorage();
// Try to minimize the number of __eq__ calls
if (swapProfile.profile(inliningTarget, lenNode.execute(inliningTarget, storage2) > lenNode.execute(inliningTarget, storage1))) {
HashingStorage tmp = storage1;
storage1 = storage2;
storage2 = tmp;
}
HashingStorage storage = intersectNode.execute(frame, inliningTarget, storage2, storage1);
left.setDictStorage(storage);
return left;
}
@SuppressWarnings("unused")
@Fallback
Object doAnd(Object self, Object other) {
return PNotImplemented.NOT_IMPLEMENTED;
}
}
@Builtin(name = "intersection", minNumOfPositionalArgs = 1, takesVarArgs = true)
@GenerateNodeFactory
public abstract static class IntersectNode extends PythonBuiltinNode {
@Specialization(guards = "isNoValue(other)")
Object doSet(@SuppressWarnings("unused") VirtualFrame frame, PSet self, @SuppressWarnings("unused") PNone other,
@Bind Node inliningTarget,
@Shared @Cached HashingStorageCopy copyNode) {
HashingStorage result = copyNode.execute(inliningTarget, self.getDictStorage());
return createResult(self, result);
}
@Specialization(guards = {"args.length == len", "args.length < 32"}, limit = "3")
Object doCached(VirtualFrame frame, PSet self, Object[] args,
@Bind Node inliningTarget,
@Cached("args.length") int len,
@Shared @Cached HashingCollectionNodes.GetSetStorageNode getSetStorageNode,
@Shared @Cached HashingStorageCopy copyNode,
@Shared @Cached HashingStorageIntersect intersectNode) {
HashingStorage result = copyNode.execute(inliningTarget, self.getDictStorage());
for (int i = 0; i < len; i++) {
result = intersectNode.execute(frame, inliningTarget, result, getSetStorageNode.execute(frame, inliningTarget, args[i]));
}
return createResult(self, result);
}
@Specialization(replaces = "doCached")
Object doGeneric(VirtualFrame frame, PSet self, Object[] args,
@Bind Node inliningTarget,
@Shared @Cached HashingCollectionNodes.GetSetStorageNode getSetStorageNode,
@Shared @Cached HashingStorageCopy copyNode,
@Shared @Cached HashingStorageIntersect intersectNode) {
HashingStorage result = copyNode.execute(inliningTarget, self.getDictStorage());
for (int i = 0; i < args.length; i++) {
result = intersectNode.execute(frame, inliningTarget, result, getSetStorageNode.execute(frame, inliningTarget, args[i]));
}
return createResult(self, result);
}
static boolean isOther(Object arg) {
return !(PGuards.isNoValue(arg) || arg instanceof Object[]);
}
@Specialization(guards = "isOther(other)")
Object doSet(VirtualFrame frame, PSet self, Object other,
@Bind Node inliningTarget,
@Shared @Cached GetSetStorageNode getSetStorageNode,
@Shared @Cached HashingStorageCopy copyNode,
@Shared @Cached HashingStorageIntersect intersectNode,
@Bind PythonLanguage language) {
HashingStorage result = copyNode.execute(inliningTarget, self.getDictStorage());
result = intersectNode.execute(frame, inliningTarget, result, getSetStorageNode.execute(frame, inliningTarget, other));
return PFactory.createSet(language, result);
}
protected Object createResult(PSet self, HashingStorage result) {
return PFactory.createSet(PythonLanguage.get(this), result);
}
}
@Builtin(name = "intersection_update", minNumOfPositionalArgs = 1, takesVarArgs = true)
@GenerateNodeFactory
@ImportStatic({SpecialMethodNames.class})
public abstract static class IntersectUpdateNode extends IntersectNode {
protected Object createResult(PSet self, HashingStorage result) {
// In order to be compatible w.r.t. __eq__ calls we cannot reuse self storage
self.setDictStorage(result);
return PNone.NONE;
}
}
@Slot(value = SlotKind.nb_inplace_xor, isComplex = true)
@GenerateNodeFactory
public abstract static class IXorNode extends PythonBinaryBuiltinNode {
@Specialization
static Object doSet(VirtualFrame frame, PSet self, PBaseSet other,
@Bind Node inliningTarget,
@Cached HashingStorageXor xorNode) {
self.setDictStorage(xorNode.execute(frame, inliningTarget, self.getDictStorage(), other.getDictStorage()));
return self;
}
@SuppressWarnings("unused")
@Fallback
Object doOr(Object self, Object other) {
return PNotImplemented.NOT_IMPLEMENTED;
}
}
@Builtin(name = "symmetric_difference", minNumOfPositionalArgs = 2)
@GenerateNodeFactory
public abstract static class SymmetricDifferenceNode extends PythonBuiltinNode {
@Specialization
static PSet doSet(VirtualFrame frame, PSet self, Object other,
@Bind Node inliningTarget,
@Cached GetSetStorageNode getHashingStorage,
@Cached HashingStorageXor xorNode,
@Bind PythonLanguage language) {
HashingStorage result = xorNode.execute(frame, inliningTarget, self.getDictStorage(), getHashingStorage.execute(frame, inliningTarget, other));
return PFactory.createSet(language, result);
}
}
@Builtin(name = "symmetric_difference_update", minNumOfPositionalArgs = 1, takesVarArgs = true)
@GenerateNodeFactory
public abstract static class SymmetricDifferenceUpdateNode extends PythonBuiltinNode {
@Specialization(guards = "isNoValue(other)")
@SuppressWarnings("unused")
static PNone doSet(VirtualFrame frame, PSet self, PNone other) {
return PNone.NONE;
}
@Specialization(guards = {"args.length == len", "args.length < 32"}, limit = "3")
static PNone doCached(VirtualFrame frame, PSet self, Object[] args,
@Bind Node inliningTarget,
@Cached("args.length") int len,
@Shared @Cached HashingCollectionNodes.GetSetStorageNode getHashingStorage,
@Shared @Cached HashingStorageXor xorNode) {
HashingStorage result = self.getDictStorage();
for (int i = 0; i < len; i++) {
result = xorNode.execute(frame, inliningTarget, result, getHashingStorage.execute(frame, inliningTarget, args[i]));
}
self.setDictStorage(result);
return PNone.NONE;
}
@Specialization(replaces = "doCached")
static PNone doSetArgs(VirtualFrame frame, PSet self, Object[] args,
@Bind Node inliningTarget,
@Shared @Cached GetSetStorageNode getHashingStorage,
@Shared @Cached HashingStorageXor xorNode) {
HashingStorage result = self.getDictStorage();
for (Object o : args) {
result = xorNode.execute(frame, inliningTarget, result, getHashingStorage.execute(frame, inliningTarget, o));
}
self.setDictStorage(result);
return PNone.NONE;
}
static boolean isOther(Object arg) {
return !(PGuards.isNoValue(arg) || arg instanceof Object[]);
}
@Specialization(guards = "isOther(other)")
static PNone doSetOther(VirtualFrame frame, PSet self, Object other,
@Bind Node inliningTarget,
@Shared @Cached HashingCollectionNodes.GetSetStorageNode getHashingStorage,
@Shared @Cached HashingStorageXor xorNode) {
HashingStorage result = xorNode.execute(frame, inliningTarget, self.getDictStorage(), getHashingStorage.execute(frame, inliningTarget, other));
self.setDictStorage(result);
return PNone.NONE;
}
}
@Slot(value = SlotKind.nb_inplace_subtract, isComplex = true)
@GenerateNodeFactory
abstract static class ISubNode extends PythonBinaryBuiltinNode {
@Specialization
static PBaseSet doPBaseSet(VirtualFrame frame, PSet left, PBaseSet right,
@Bind Node inliningTarget,
@Cached HashingStorageDiff diffNode) {
HashingStorage storage = diffNode.execute(frame, inliningTarget, left.getDictStorage(), right.getDictStorage());
left.setDictStorage(storage);
return left;
}
@SuppressWarnings("unused")
@Fallback
Object doSub(Object self, Object other) {
return PNotImplemented.NOT_IMPLEMENTED;
}
}
@Builtin(name = "difference", minNumOfPositionalArgs = 1, takesVarArgs = true)
@GenerateNodeFactory
public abstract static class DifferenceNode extends PythonBuiltinNode {
@Specialization(guards = "isNoValue(other)")
static PSet doSet(@SuppressWarnings("unused") VirtualFrame frame, PSet self, @SuppressWarnings("unused") PNone other,
@Bind PythonLanguage language) {
return PFactory.createSet(language, self.getDictStorage());
}
@Specialization(guards = {"args.length == len", "args.length < 32"}, limit = "3")
static PBaseSet doCached(VirtualFrame frame, PSet self, Object[] args,
@Bind Node inliningTarget,
@Cached("args.length") int len,
@Shared @Cached HashingCollectionNodes.GetSetStorageNode getSetStorageNode,
@Shared @Cached HashingStorageCopy copyNode,
@Shared @Cached HashingStorageDiff diffNode,
@Bind PythonLanguage language) {
HashingStorage result = copyNode.execute(inliningTarget, self.getDictStorage());
for (int i = 0; i < len; i++) {
result = diffNode.execute(frame, inliningTarget, result, getSetStorageNode.execute(frame, inliningTarget, args[i]));
}
return PFactory.createSet(language, result);
}
@Specialization(replaces = "doCached")
static PBaseSet doGeneric(VirtualFrame frame, PSet self, Object[] args,
@Bind Node inliningTarget,
@Shared @Cached GetSetStorageNode getSetStorageNode,
@Shared @Cached HashingStorageCopy copyNode,
@Shared @Cached HashingStorageDiff diffNode,
@Bind PythonLanguage language) {
HashingStorage result = copyNode.execute(inliningTarget, self.getDictStorage());
for (int i = 0; i < args.length; i++) {
result = diffNode.execute(frame, inliningTarget, result, getSetStorageNode.execute(frame, inliningTarget, args[i]));
}
return PFactory.createSet(language, result);
}
static boolean isOther(Object arg) {
return !(PGuards.isNoValue(arg) || arg instanceof Object[]);
}
@Specialization(guards = "isOther(other)")
static PSet doSet(VirtualFrame frame, PSet self, Object other,
@Bind Node inliningTarget,
@Shared @Cached GetSetStorageNode getSetStorageNode,
@Shared @Cached HashingStorageDiff diffNode,
@Bind PythonLanguage language) {
HashingStorage result = diffNode.execute(frame, inliningTarget, self.getDictStorage(), getSetStorageNode.execute(frame, inliningTarget, other));
return PFactory.createSet(language, result);
}
}
@Builtin(name = "difference_update", minNumOfPositionalArgs = 1, takesVarArgs = true, declaresExplicitSelf = true)
@GenerateNodeFactory
public abstract static class DifferenceUpdateNode extends PythonBuiltinNode {
@Specialization(guards = "isNoValue(other)")
@SuppressWarnings("unused")
static PNone doNone(VirtualFrame frame, PSet self, PNone other) {
return PNone.NONE;
}
@Specialization(guards = {"args.length == len", "args.length < 32"}, limit = "3")
static PNone doCached(VirtualFrame frame, PSet self, Object[] args,
@Bind Node inliningTarget,
@Cached("args.length") int len,
@Shared @Cached GetSetStorageNode getHashingStorage,
@Shared @Cached HashingStorageDiff diffNode) {
HashingStorage result = self.getDictStorage();
for (int i = 0; i < len; i++) {
result = diffNode.execute(frame, inliningTarget, result, getHashingStorage.execute(frame, inliningTarget, args[i]));
}
self.setDictStorage(result);
return PNone.NONE;
}
@Specialization(replaces = "doCached")
static PNone doSet(VirtualFrame frame, PSet self, Object[] args,
@Bind Node inliningTarget,
@Shared @Cached HashingCollectionNodes.GetSetStorageNode getHashingStorage,
@Shared @Cached HashingStorageDiff diffNode) {
HashingStorage result = self.getDictStorage();
for (Object o : args) {
result = diffNode.execute(frame, inliningTarget, result, getHashingStorage.execute(frame, inliningTarget, o));
}
self.setDictStorage(result);
return PNone.NONE;
}
@Specialization
static PNone doSet(VirtualFrame frame, PSet self, Object other,
@Bind Node inliningTarget,
@Shared @Cached HashingCollectionNodes.GetSetStorageNode getHashingStorage,
@Shared @Cached HashingStorageDiff diffNode) {
HashingStorage result = diffNode.execute(frame, inliningTarget, self.getDictStorage(), getHashingStorage.execute(frame, inliningTarget, other));
self.setDictStorage(result);
return PNone.NONE;
}
}
@Builtin(name = "remove", minNumOfPositionalArgs = 2)
@GenerateNodeFactory
abstract static class RemoveNode extends PythonBinaryBuiltinNode {
@Specialization
static Object remove(VirtualFrame frame, PSet self, Object key,
@Bind Node inliningTarget,
@Cached com.oracle.graal.python.builtins.objects.set.SetNodes.DiscardNode discardNode,
@Cached PRaiseNode raiseNode) {
if (!discardNode.execute(frame, self, key)) {
throw raiseNode.raise(inliningTarget, PythonErrorType.KeyError, new Object[]{key});
}
return PNone.NONE;
}
}
@Builtin(name = "discard", minNumOfPositionalArgs = 2)
@GenerateNodeFactory
abstract static class DiscardNode extends PythonBinaryBuiltinNode {
@Specialization
static Object discard(VirtualFrame frame, PSet self, Object key,
@Cached com.oracle.graal.python.builtins.objects.set.SetNodes.DiscardNode discardNode) {
discardNode.execute(frame, self, key);
return PNone.NONE;
}
}
@Builtin(name = "pop", minNumOfPositionalArgs = 1, maxNumOfPositionalArgs = 1)
@GenerateNodeFactory
public abstract static class PopNode extends PythonUnaryBuiltinNode {
@Specialization
static Object remove(PSet self,
@Bind Node inliningTarget,
@Cached HashingStoragePop popNode,
@Cached PRaiseNode raiseNode) {
Object[] result = popNode.execute(inliningTarget, self.getDictStorage(), self);
if (result != null) {
return result[0];
}
throw raiseNode.raise(inliningTarget, PythonErrorType.KeyError, ErrorMessages.POP_FROM_EMPTY_SET);
}
}
}
|
googleapis/google-cloud-java | 36,794 | java-dialogflow/google-cloud-dialogflow/src/test/java/com/google/cloud/dialogflow/v2/DocumentsClientTest.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.cloud.dialogflow.v2;
import static com.google.cloud.dialogflow.v2.DocumentsClient.ListDocumentsPagedResponse;
import static com.google.cloud.dialogflow.v2.DocumentsClient.ListLocationsPagedResponse;
import com.google.api.gax.core.NoCredentialsProvider;
import com.google.api.gax.grpc.GaxGrpcProperties;
import com.google.api.gax.grpc.testing.LocalChannelProvider;
import com.google.api.gax.grpc.testing.MockGrpcService;
import com.google.api.gax.grpc.testing.MockServiceHelper;
import com.google.api.gax.rpc.ApiClientHeaderProvider;
import com.google.api.gax.rpc.InvalidArgumentException;
import com.google.api.gax.rpc.StatusCode;
import com.google.cloud.location.GetLocationRequest;
import com.google.cloud.location.ListLocationsRequest;
import com.google.cloud.location.ListLocationsResponse;
import com.google.cloud.location.Location;
import com.google.common.collect.Lists;
import com.google.longrunning.Operation;
import com.google.protobuf.AbstractMessage;
import com.google.protobuf.Any;
import com.google.protobuf.Empty;
import com.google.protobuf.FieldMask;
import com.google.rpc.Status;
import io.grpc.StatusRuntimeException;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.List;
import java.util.UUID;
import java.util.concurrent.ExecutionException;
import javax.annotation.Generated;
import org.junit.After;
import org.junit.AfterClass;
import org.junit.Assert;
import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Test;
@Generated("by gapic-generator-java")
public class DocumentsClientTest {
private static MockDocuments mockDocuments;
private static MockLocations mockLocations;
private static MockServiceHelper mockServiceHelper;
private LocalChannelProvider channelProvider;
private DocumentsClient client;
@BeforeClass
public static void startStaticServer() {
mockDocuments = new MockDocuments();
mockLocations = new MockLocations();
mockServiceHelper =
new MockServiceHelper(
UUID.randomUUID().toString(),
Arrays.<MockGrpcService>asList(mockDocuments, mockLocations));
mockServiceHelper.start();
}
@AfterClass
public static void stopServer() {
mockServiceHelper.stop();
}
@Before
public void setUp() throws IOException {
mockServiceHelper.reset();
channelProvider = mockServiceHelper.createChannelProvider();
DocumentsSettings settings =
DocumentsSettings.newBuilder()
.setTransportChannelProvider(channelProvider)
.setCredentialsProvider(NoCredentialsProvider.create())
.build();
client = DocumentsClient.create(settings);
}
@After
public void tearDown() throws Exception {
client.close();
}
@Test
public void listDocumentsTest() throws Exception {
Document responsesElement = Document.newBuilder().build();
ListDocumentsResponse expectedResponse =
ListDocumentsResponse.newBuilder()
.setNextPageToken("")
.addAllDocuments(Arrays.asList(responsesElement))
.build();
mockDocuments.addResponse(expectedResponse);
KnowledgeBaseName parent =
KnowledgeBaseName.ofProjectKnowledgeBaseName("[PROJECT]", "[KNOWLEDGE_BASE]");
ListDocumentsPagedResponse pagedListResponse = client.listDocuments(parent);
List<Document> resources = Lists.newArrayList(pagedListResponse.iterateAll());
Assert.assertEquals(1, resources.size());
Assert.assertEquals(expectedResponse.getDocumentsList().get(0), resources.get(0));
List<AbstractMessage> actualRequests = mockDocuments.getRequests();
Assert.assertEquals(1, actualRequests.size());
ListDocumentsRequest actualRequest = ((ListDocumentsRequest) actualRequests.get(0));
Assert.assertEquals(parent.toString(), actualRequest.getParent());
Assert.assertTrue(
channelProvider.isHeaderSent(
ApiClientHeaderProvider.getDefaultApiClientHeaderKey(),
GaxGrpcProperties.getDefaultApiClientHeaderPattern()));
}
@Test
public void listDocumentsExceptionTest() throws Exception {
StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT);
mockDocuments.addException(exception);
try {
KnowledgeBaseName parent =
KnowledgeBaseName.ofProjectKnowledgeBaseName("[PROJECT]", "[KNOWLEDGE_BASE]");
client.listDocuments(parent);
Assert.fail("No exception raised");
} catch (InvalidArgumentException e) {
// Expected exception.
}
}
@Test
public void listDocumentsTest2() throws Exception {
Document responsesElement = Document.newBuilder().build();
ListDocumentsResponse expectedResponse =
ListDocumentsResponse.newBuilder()
.setNextPageToken("")
.addAllDocuments(Arrays.asList(responsesElement))
.build();
mockDocuments.addResponse(expectedResponse);
String parent = "parent-995424086";
ListDocumentsPagedResponse pagedListResponse = client.listDocuments(parent);
List<Document> resources = Lists.newArrayList(pagedListResponse.iterateAll());
Assert.assertEquals(1, resources.size());
Assert.assertEquals(expectedResponse.getDocumentsList().get(0), resources.get(0));
List<AbstractMessage> actualRequests = mockDocuments.getRequests();
Assert.assertEquals(1, actualRequests.size());
ListDocumentsRequest actualRequest = ((ListDocumentsRequest) actualRequests.get(0));
Assert.assertEquals(parent, actualRequest.getParent());
Assert.assertTrue(
channelProvider.isHeaderSent(
ApiClientHeaderProvider.getDefaultApiClientHeaderKey(),
GaxGrpcProperties.getDefaultApiClientHeaderPattern()));
}
@Test
public void listDocumentsExceptionTest2() throws Exception {
StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT);
mockDocuments.addException(exception);
try {
String parent = "parent-995424086";
client.listDocuments(parent);
Assert.fail("No exception raised");
} catch (InvalidArgumentException e) {
// Expected exception.
}
}
@Test
public void getDocumentTest() throws Exception {
Document expectedResponse =
Document.newBuilder()
.setName(
DocumentName.ofProjectKnowledgeBaseDocumentName(
"[PROJECT]", "[KNOWLEDGE_BASE]", "[DOCUMENT]")
.toString())
.setDisplayName("displayName1714148973")
.setMimeType("mimeType-1392120434")
.addAllKnowledgeTypes(new ArrayList<Document.KnowledgeType>())
.setEnableAutoReload(true)
.setLatestReloadStatus(Document.ReloadStatus.newBuilder().build())
.putAllMetadata(new HashMap<String, String>())
.build();
mockDocuments.addResponse(expectedResponse);
DocumentName name =
DocumentName.ofProjectKnowledgeBaseDocumentName(
"[PROJECT]", "[KNOWLEDGE_BASE]", "[DOCUMENT]");
Document actualResponse = client.getDocument(name);
Assert.assertEquals(expectedResponse, actualResponse);
List<AbstractMessage> actualRequests = mockDocuments.getRequests();
Assert.assertEquals(1, actualRequests.size());
GetDocumentRequest actualRequest = ((GetDocumentRequest) actualRequests.get(0));
Assert.assertEquals(name.toString(), actualRequest.getName());
Assert.assertTrue(
channelProvider.isHeaderSent(
ApiClientHeaderProvider.getDefaultApiClientHeaderKey(),
GaxGrpcProperties.getDefaultApiClientHeaderPattern()));
}
@Test
public void getDocumentExceptionTest() throws Exception {
StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT);
mockDocuments.addException(exception);
try {
DocumentName name =
DocumentName.ofProjectKnowledgeBaseDocumentName(
"[PROJECT]", "[KNOWLEDGE_BASE]", "[DOCUMENT]");
client.getDocument(name);
Assert.fail("No exception raised");
} catch (InvalidArgumentException e) {
// Expected exception.
}
}
@Test
public void getDocumentTest2() throws Exception {
Document expectedResponse =
Document.newBuilder()
.setName(
DocumentName.ofProjectKnowledgeBaseDocumentName(
"[PROJECT]", "[KNOWLEDGE_BASE]", "[DOCUMENT]")
.toString())
.setDisplayName("displayName1714148973")
.setMimeType("mimeType-1392120434")
.addAllKnowledgeTypes(new ArrayList<Document.KnowledgeType>())
.setEnableAutoReload(true)
.setLatestReloadStatus(Document.ReloadStatus.newBuilder().build())
.putAllMetadata(new HashMap<String, String>())
.build();
mockDocuments.addResponse(expectedResponse);
String name = "name3373707";
Document actualResponse = client.getDocument(name);
Assert.assertEquals(expectedResponse, actualResponse);
List<AbstractMessage> actualRequests = mockDocuments.getRequests();
Assert.assertEquals(1, actualRequests.size());
GetDocumentRequest actualRequest = ((GetDocumentRequest) actualRequests.get(0));
Assert.assertEquals(name, actualRequest.getName());
Assert.assertTrue(
channelProvider.isHeaderSent(
ApiClientHeaderProvider.getDefaultApiClientHeaderKey(),
GaxGrpcProperties.getDefaultApiClientHeaderPattern()));
}
@Test
public void getDocumentExceptionTest2() throws Exception {
StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT);
mockDocuments.addException(exception);
try {
String name = "name3373707";
client.getDocument(name);
Assert.fail("No exception raised");
} catch (InvalidArgumentException e) {
// Expected exception.
}
}
@Test
public void createDocumentTest() throws Exception {
Document expectedResponse =
Document.newBuilder()
.setName(
DocumentName.ofProjectKnowledgeBaseDocumentName(
"[PROJECT]", "[KNOWLEDGE_BASE]", "[DOCUMENT]")
.toString())
.setDisplayName("displayName1714148973")
.setMimeType("mimeType-1392120434")
.addAllKnowledgeTypes(new ArrayList<Document.KnowledgeType>())
.setEnableAutoReload(true)
.setLatestReloadStatus(Document.ReloadStatus.newBuilder().build())
.putAllMetadata(new HashMap<String, String>())
.build();
Operation resultOperation =
Operation.newBuilder()
.setName("createDocumentTest")
.setDone(true)
.setResponse(Any.pack(expectedResponse))
.build();
mockDocuments.addResponse(resultOperation);
KnowledgeBaseName parent =
KnowledgeBaseName.ofProjectKnowledgeBaseName("[PROJECT]", "[KNOWLEDGE_BASE]");
Document document = Document.newBuilder().build();
Document actualResponse = client.createDocumentAsync(parent, document).get();
Assert.assertEquals(expectedResponse, actualResponse);
List<AbstractMessage> actualRequests = mockDocuments.getRequests();
Assert.assertEquals(1, actualRequests.size());
CreateDocumentRequest actualRequest = ((CreateDocumentRequest) actualRequests.get(0));
Assert.assertEquals(parent.toString(), actualRequest.getParent());
Assert.assertEquals(document, actualRequest.getDocument());
Assert.assertTrue(
channelProvider.isHeaderSent(
ApiClientHeaderProvider.getDefaultApiClientHeaderKey(),
GaxGrpcProperties.getDefaultApiClientHeaderPattern()));
}
@Test
public void createDocumentExceptionTest() throws Exception {
StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT);
mockDocuments.addException(exception);
try {
KnowledgeBaseName parent =
KnowledgeBaseName.ofProjectKnowledgeBaseName("[PROJECT]", "[KNOWLEDGE_BASE]");
Document document = Document.newBuilder().build();
client.createDocumentAsync(parent, document).get();
Assert.fail("No exception raised");
} catch (ExecutionException e) {
Assert.assertEquals(InvalidArgumentException.class, e.getCause().getClass());
InvalidArgumentException apiException = ((InvalidArgumentException) e.getCause());
Assert.assertEquals(StatusCode.Code.INVALID_ARGUMENT, apiException.getStatusCode().getCode());
}
}
@Test
public void createDocumentTest2() throws Exception {
Document expectedResponse =
Document.newBuilder()
.setName(
DocumentName.ofProjectKnowledgeBaseDocumentName(
"[PROJECT]", "[KNOWLEDGE_BASE]", "[DOCUMENT]")
.toString())
.setDisplayName("displayName1714148973")
.setMimeType("mimeType-1392120434")
.addAllKnowledgeTypes(new ArrayList<Document.KnowledgeType>())
.setEnableAutoReload(true)
.setLatestReloadStatus(Document.ReloadStatus.newBuilder().build())
.putAllMetadata(new HashMap<String, String>())
.build();
Operation resultOperation =
Operation.newBuilder()
.setName("createDocumentTest")
.setDone(true)
.setResponse(Any.pack(expectedResponse))
.build();
mockDocuments.addResponse(resultOperation);
String parent = "parent-995424086";
Document document = Document.newBuilder().build();
Document actualResponse = client.createDocumentAsync(parent, document).get();
Assert.assertEquals(expectedResponse, actualResponse);
List<AbstractMessage> actualRequests = mockDocuments.getRequests();
Assert.assertEquals(1, actualRequests.size());
CreateDocumentRequest actualRequest = ((CreateDocumentRequest) actualRequests.get(0));
Assert.assertEquals(parent, actualRequest.getParent());
Assert.assertEquals(document, actualRequest.getDocument());
Assert.assertTrue(
channelProvider.isHeaderSent(
ApiClientHeaderProvider.getDefaultApiClientHeaderKey(),
GaxGrpcProperties.getDefaultApiClientHeaderPattern()));
}
@Test
public void createDocumentExceptionTest2() throws Exception {
StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT);
mockDocuments.addException(exception);
try {
String parent = "parent-995424086";
Document document = Document.newBuilder().build();
client.createDocumentAsync(parent, document).get();
Assert.fail("No exception raised");
} catch (ExecutionException e) {
Assert.assertEquals(InvalidArgumentException.class, e.getCause().getClass());
InvalidArgumentException apiException = ((InvalidArgumentException) e.getCause());
Assert.assertEquals(StatusCode.Code.INVALID_ARGUMENT, apiException.getStatusCode().getCode());
}
}
@Test
public void importDocumentsTest() throws Exception {
ImportDocumentsResponse expectedResponse =
ImportDocumentsResponse.newBuilder().addAllWarnings(new ArrayList<Status>()).build();
Operation resultOperation =
Operation.newBuilder()
.setName("importDocumentsTest")
.setDone(true)
.setResponse(Any.pack(expectedResponse))
.build();
mockDocuments.addResponse(resultOperation);
ImportDocumentsRequest request =
ImportDocumentsRequest.newBuilder()
.setParent(
KnowledgeBaseName.ofProjectKnowledgeBaseName("[PROJECT]", "[KNOWLEDGE_BASE]")
.toString())
.setDocumentTemplate(ImportDocumentTemplate.newBuilder().build())
.setImportGcsCustomMetadata(true)
.build();
ImportDocumentsResponse actualResponse = client.importDocumentsAsync(request).get();
Assert.assertEquals(expectedResponse, actualResponse);
List<AbstractMessage> actualRequests = mockDocuments.getRequests();
Assert.assertEquals(1, actualRequests.size());
ImportDocumentsRequest actualRequest = ((ImportDocumentsRequest) actualRequests.get(0));
Assert.assertEquals(request.getParent(), actualRequest.getParent());
Assert.assertEquals(request.getGcsSource(), actualRequest.getGcsSource());
Assert.assertEquals(request.getDocumentTemplate(), actualRequest.getDocumentTemplate());
Assert.assertEquals(
request.getImportGcsCustomMetadata(), actualRequest.getImportGcsCustomMetadata());
Assert.assertTrue(
channelProvider.isHeaderSent(
ApiClientHeaderProvider.getDefaultApiClientHeaderKey(),
GaxGrpcProperties.getDefaultApiClientHeaderPattern()));
}
@Test
public void importDocumentsExceptionTest() throws Exception {
StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT);
mockDocuments.addException(exception);
try {
ImportDocumentsRequest request =
ImportDocumentsRequest.newBuilder()
.setParent(
KnowledgeBaseName.ofProjectKnowledgeBaseName("[PROJECT]", "[KNOWLEDGE_BASE]")
.toString())
.setDocumentTemplate(ImportDocumentTemplate.newBuilder().build())
.setImportGcsCustomMetadata(true)
.build();
client.importDocumentsAsync(request).get();
Assert.fail("No exception raised");
} catch (ExecutionException e) {
Assert.assertEquals(InvalidArgumentException.class, e.getCause().getClass());
InvalidArgumentException apiException = ((InvalidArgumentException) e.getCause());
Assert.assertEquals(StatusCode.Code.INVALID_ARGUMENT, apiException.getStatusCode().getCode());
}
}
@Test
public void deleteDocumentTest() throws Exception {
Empty expectedResponse = Empty.newBuilder().build();
Operation resultOperation =
Operation.newBuilder()
.setName("deleteDocumentTest")
.setDone(true)
.setResponse(Any.pack(expectedResponse))
.build();
mockDocuments.addResponse(resultOperation);
DocumentName name =
DocumentName.ofProjectKnowledgeBaseDocumentName(
"[PROJECT]", "[KNOWLEDGE_BASE]", "[DOCUMENT]");
client.deleteDocumentAsync(name).get();
List<AbstractMessage> actualRequests = mockDocuments.getRequests();
Assert.assertEquals(1, actualRequests.size());
DeleteDocumentRequest actualRequest = ((DeleteDocumentRequest) actualRequests.get(0));
Assert.assertEquals(name.toString(), actualRequest.getName());
Assert.assertTrue(
channelProvider.isHeaderSent(
ApiClientHeaderProvider.getDefaultApiClientHeaderKey(),
GaxGrpcProperties.getDefaultApiClientHeaderPattern()));
}
@Test
public void deleteDocumentExceptionTest() throws Exception {
StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT);
mockDocuments.addException(exception);
try {
DocumentName name =
DocumentName.ofProjectKnowledgeBaseDocumentName(
"[PROJECT]", "[KNOWLEDGE_BASE]", "[DOCUMENT]");
client.deleteDocumentAsync(name).get();
Assert.fail("No exception raised");
} catch (ExecutionException e) {
Assert.assertEquals(InvalidArgumentException.class, e.getCause().getClass());
InvalidArgumentException apiException = ((InvalidArgumentException) e.getCause());
Assert.assertEquals(StatusCode.Code.INVALID_ARGUMENT, apiException.getStatusCode().getCode());
}
}
@Test
public void deleteDocumentTest2() throws Exception {
Empty expectedResponse = Empty.newBuilder().build();
Operation resultOperation =
Operation.newBuilder()
.setName("deleteDocumentTest")
.setDone(true)
.setResponse(Any.pack(expectedResponse))
.build();
mockDocuments.addResponse(resultOperation);
String name = "name3373707";
client.deleteDocumentAsync(name).get();
List<AbstractMessage> actualRequests = mockDocuments.getRequests();
Assert.assertEquals(1, actualRequests.size());
DeleteDocumentRequest actualRequest = ((DeleteDocumentRequest) actualRequests.get(0));
Assert.assertEquals(name, actualRequest.getName());
Assert.assertTrue(
channelProvider.isHeaderSent(
ApiClientHeaderProvider.getDefaultApiClientHeaderKey(),
GaxGrpcProperties.getDefaultApiClientHeaderPattern()));
}
@Test
public void deleteDocumentExceptionTest2() throws Exception {
StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT);
mockDocuments.addException(exception);
try {
String name = "name3373707";
client.deleteDocumentAsync(name).get();
Assert.fail("No exception raised");
} catch (ExecutionException e) {
Assert.assertEquals(InvalidArgumentException.class, e.getCause().getClass());
InvalidArgumentException apiException = ((InvalidArgumentException) e.getCause());
Assert.assertEquals(StatusCode.Code.INVALID_ARGUMENT, apiException.getStatusCode().getCode());
}
}
@Test
public void updateDocumentTest() throws Exception {
Document expectedResponse =
Document.newBuilder()
.setName(
DocumentName.ofProjectKnowledgeBaseDocumentName(
"[PROJECT]", "[KNOWLEDGE_BASE]", "[DOCUMENT]")
.toString())
.setDisplayName("displayName1714148973")
.setMimeType("mimeType-1392120434")
.addAllKnowledgeTypes(new ArrayList<Document.KnowledgeType>())
.setEnableAutoReload(true)
.setLatestReloadStatus(Document.ReloadStatus.newBuilder().build())
.putAllMetadata(new HashMap<String, String>())
.build();
Operation resultOperation =
Operation.newBuilder()
.setName("updateDocumentTest")
.setDone(true)
.setResponse(Any.pack(expectedResponse))
.build();
mockDocuments.addResponse(resultOperation);
Document document = Document.newBuilder().build();
FieldMask updateMask = FieldMask.newBuilder().build();
Document actualResponse = client.updateDocumentAsync(document, updateMask).get();
Assert.assertEquals(expectedResponse, actualResponse);
List<AbstractMessage> actualRequests = mockDocuments.getRequests();
Assert.assertEquals(1, actualRequests.size());
UpdateDocumentRequest actualRequest = ((UpdateDocumentRequest) actualRequests.get(0));
Assert.assertEquals(document, actualRequest.getDocument());
Assert.assertEquals(updateMask, actualRequest.getUpdateMask());
Assert.assertTrue(
channelProvider.isHeaderSent(
ApiClientHeaderProvider.getDefaultApiClientHeaderKey(),
GaxGrpcProperties.getDefaultApiClientHeaderPattern()));
}
@Test
public void updateDocumentExceptionTest() throws Exception {
StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT);
mockDocuments.addException(exception);
try {
Document document = Document.newBuilder().build();
FieldMask updateMask = FieldMask.newBuilder().build();
client.updateDocumentAsync(document, updateMask).get();
Assert.fail("No exception raised");
} catch (ExecutionException e) {
Assert.assertEquals(InvalidArgumentException.class, e.getCause().getClass());
InvalidArgumentException apiException = ((InvalidArgumentException) e.getCause());
Assert.assertEquals(StatusCode.Code.INVALID_ARGUMENT, apiException.getStatusCode().getCode());
}
}
@Test
public void reloadDocumentTest() throws Exception {
Document expectedResponse =
Document.newBuilder()
.setName(
DocumentName.ofProjectKnowledgeBaseDocumentName(
"[PROJECT]", "[KNOWLEDGE_BASE]", "[DOCUMENT]")
.toString())
.setDisplayName("displayName1714148973")
.setMimeType("mimeType-1392120434")
.addAllKnowledgeTypes(new ArrayList<Document.KnowledgeType>())
.setEnableAutoReload(true)
.setLatestReloadStatus(Document.ReloadStatus.newBuilder().build())
.putAllMetadata(new HashMap<String, String>())
.build();
Operation resultOperation =
Operation.newBuilder()
.setName("reloadDocumentTest")
.setDone(true)
.setResponse(Any.pack(expectedResponse))
.build();
mockDocuments.addResponse(resultOperation);
DocumentName name =
DocumentName.ofProjectKnowledgeBaseDocumentName(
"[PROJECT]", "[KNOWLEDGE_BASE]", "[DOCUMENT]");
String contentUri = "contentUri264542771";
Document actualResponse = client.reloadDocumentAsync(name, contentUri).get();
Assert.assertEquals(expectedResponse, actualResponse);
List<AbstractMessage> actualRequests = mockDocuments.getRequests();
Assert.assertEquals(1, actualRequests.size());
ReloadDocumentRequest actualRequest = ((ReloadDocumentRequest) actualRequests.get(0));
Assert.assertEquals(name.toString(), actualRequest.getName());
Assert.assertEquals(contentUri, actualRequest.getContentUri());
Assert.assertTrue(
channelProvider.isHeaderSent(
ApiClientHeaderProvider.getDefaultApiClientHeaderKey(),
GaxGrpcProperties.getDefaultApiClientHeaderPattern()));
}
@Test
public void reloadDocumentExceptionTest() throws Exception {
StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT);
mockDocuments.addException(exception);
try {
DocumentName name =
DocumentName.ofProjectKnowledgeBaseDocumentName(
"[PROJECT]", "[KNOWLEDGE_BASE]", "[DOCUMENT]");
String contentUri = "contentUri264542771";
client.reloadDocumentAsync(name, contentUri).get();
Assert.fail("No exception raised");
} catch (ExecutionException e) {
Assert.assertEquals(InvalidArgumentException.class, e.getCause().getClass());
InvalidArgumentException apiException = ((InvalidArgumentException) e.getCause());
Assert.assertEquals(StatusCode.Code.INVALID_ARGUMENT, apiException.getStatusCode().getCode());
}
}
@Test
public void reloadDocumentTest2() throws Exception {
Document expectedResponse =
Document.newBuilder()
.setName(
DocumentName.ofProjectKnowledgeBaseDocumentName(
"[PROJECT]", "[KNOWLEDGE_BASE]", "[DOCUMENT]")
.toString())
.setDisplayName("displayName1714148973")
.setMimeType("mimeType-1392120434")
.addAllKnowledgeTypes(new ArrayList<Document.KnowledgeType>())
.setEnableAutoReload(true)
.setLatestReloadStatus(Document.ReloadStatus.newBuilder().build())
.putAllMetadata(new HashMap<String, String>())
.build();
Operation resultOperation =
Operation.newBuilder()
.setName("reloadDocumentTest")
.setDone(true)
.setResponse(Any.pack(expectedResponse))
.build();
mockDocuments.addResponse(resultOperation);
String name = "name3373707";
String contentUri = "contentUri264542771";
Document actualResponse = client.reloadDocumentAsync(name, contentUri).get();
Assert.assertEquals(expectedResponse, actualResponse);
List<AbstractMessage> actualRequests = mockDocuments.getRequests();
Assert.assertEquals(1, actualRequests.size());
ReloadDocumentRequest actualRequest = ((ReloadDocumentRequest) actualRequests.get(0));
Assert.assertEquals(name, actualRequest.getName());
Assert.assertEquals(contentUri, actualRequest.getContentUri());
Assert.assertTrue(
channelProvider.isHeaderSent(
ApiClientHeaderProvider.getDefaultApiClientHeaderKey(),
GaxGrpcProperties.getDefaultApiClientHeaderPattern()));
}
@Test
public void reloadDocumentExceptionTest2() throws Exception {
StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT);
mockDocuments.addException(exception);
try {
String name = "name3373707";
String contentUri = "contentUri264542771";
client.reloadDocumentAsync(name, contentUri).get();
Assert.fail("No exception raised");
} catch (ExecutionException e) {
Assert.assertEquals(InvalidArgumentException.class, e.getCause().getClass());
InvalidArgumentException apiException = ((InvalidArgumentException) e.getCause());
Assert.assertEquals(StatusCode.Code.INVALID_ARGUMENT, apiException.getStatusCode().getCode());
}
}
@Test
public void exportDocumentTest() throws Exception {
Document expectedResponse =
Document.newBuilder()
.setName(
DocumentName.ofProjectKnowledgeBaseDocumentName(
"[PROJECT]", "[KNOWLEDGE_BASE]", "[DOCUMENT]")
.toString())
.setDisplayName("displayName1714148973")
.setMimeType("mimeType-1392120434")
.addAllKnowledgeTypes(new ArrayList<Document.KnowledgeType>())
.setEnableAutoReload(true)
.setLatestReloadStatus(Document.ReloadStatus.newBuilder().build())
.putAllMetadata(new HashMap<String, String>())
.build();
Operation resultOperation =
Operation.newBuilder()
.setName("exportDocumentTest")
.setDone(true)
.setResponse(Any.pack(expectedResponse))
.build();
mockDocuments.addResponse(resultOperation);
ExportDocumentRequest request =
ExportDocumentRequest.newBuilder()
.setName(
DocumentName.ofProjectKnowledgeBaseDocumentName(
"[PROJECT]", "[KNOWLEDGE_BASE]", "[DOCUMENT]")
.toString())
.setExportFullContent(true)
.setSmartMessagingPartialUpdate(true)
.build();
Document actualResponse = client.exportDocumentAsync(request).get();
Assert.assertEquals(expectedResponse, actualResponse);
List<AbstractMessage> actualRequests = mockDocuments.getRequests();
Assert.assertEquals(1, actualRequests.size());
ExportDocumentRequest actualRequest = ((ExportDocumentRequest) actualRequests.get(0));
Assert.assertEquals(request.getName(), actualRequest.getName());
Assert.assertEquals(request.getGcsDestination(), actualRequest.getGcsDestination());
Assert.assertEquals(request.getExportFullContent(), actualRequest.getExportFullContent());
Assert.assertEquals(
request.getSmartMessagingPartialUpdate(), actualRequest.getSmartMessagingPartialUpdate());
Assert.assertTrue(
channelProvider.isHeaderSent(
ApiClientHeaderProvider.getDefaultApiClientHeaderKey(),
GaxGrpcProperties.getDefaultApiClientHeaderPattern()));
}
@Test
public void exportDocumentExceptionTest() throws Exception {
StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT);
mockDocuments.addException(exception);
try {
ExportDocumentRequest request =
ExportDocumentRequest.newBuilder()
.setName(
DocumentName.ofProjectKnowledgeBaseDocumentName(
"[PROJECT]", "[KNOWLEDGE_BASE]", "[DOCUMENT]")
.toString())
.setExportFullContent(true)
.setSmartMessagingPartialUpdate(true)
.build();
client.exportDocumentAsync(request).get();
Assert.fail("No exception raised");
} catch (ExecutionException e) {
Assert.assertEquals(InvalidArgumentException.class, e.getCause().getClass());
InvalidArgumentException apiException = ((InvalidArgumentException) e.getCause());
Assert.assertEquals(StatusCode.Code.INVALID_ARGUMENT, apiException.getStatusCode().getCode());
}
}
@Test
public void listLocationsTest() throws Exception {
Location responsesElement = Location.newBuilder().build();
ListLocationsResponse expectedResponse =
ListLocationsResponse.newBuilder()
.setNextPageToken("")
.addAllLocations(Arrays.asList(responsesElement))
.build();
mockLocations.addResponse(expectedResponse);
ListLocationsRequest request =
ListLocationsRequest.newBuilder()
.setName("name3373707")
.setFilter("filter-1274492040")
.setPageSize(883849137)
.setPageToken("pageToken873572522")
.build();
ListLocationsPagedResponse pagedListResponse = client.listLocations(request);
List<Location> resources = Lists.newArrayList(pagedListResponse.iterateAll());
Assert.assertEquals(1, resources.size());
Assert.assertEquals(expectedResponse.getLocationsList().get(0), resources.get(0));
List<AbstractMessage> actualRequests = mockLocations.getRequests();
Assert.assertEquals(1, actualRequests.size());
ListLocationsRequest actualRequest = ((ListLocationsRequest) actualRequests.get(0));
Assert.assertEquals(request.getName(), actualRequest.getName());
Assert.assertEquals(request.getFilter(), actualRequest.getFilter());
Assert.assertEquals(request.getPageSize(), actualRequest.getPageSize());
Assert.assertEquals(request.getPageToken(), actualRequest.getPageToken());
Assert.assertTrue(
channelProvider.isHeaderSent(
ApiClientHeaderProvider.getDefaultApiClientHeaderKey(),
GaxGrpcProperties.getDefaultApiClientHeaderPattern()));
}
@Test
public void listLocationsExceptionTest() throws Exception {
StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT);
mockLocations.addException(exception);
try {
ListLocationsRequest request =
ListLocationsRequest.newBuilder()
.setName("name3373707")
.setFilter("filter-1274492040")
.setPageSize(883849137)
.setPageToken("pageToken873572522")
.build();
client.listLocations(request);
Assert.fail("No exception raised");
} catch (InvalidArgumentException e) {
// Expected exception.
}
}
@Test
public void getLocationTest() throws Exception {
Location expectedResponse =
Location.newBuilder()
.setName("name3373707")
.setLocationId("locationId1541836720")
.setDisplayName("displayName1714148973")
.putAllLabels(new HashMap<String, String>())
.setMetadata(Any.newBuilder().build())
.build();
mockLocations.addResponse(expectedResponse);
GetLocationRequest request = GetLocationRequest.newBuilder().setName("name3373707").build();
Location actualResponse = client.getLocation(request);
Assert.assertEquals(expectedResponse, actualResponse);
List<AbstractMessage> actualRequests = mockLocations.getRequests();
Assert.assertEquals(1, actualRequests.size());
GetLocationRequest actualRequest = ((GetLocationRequest) actualRequests.get(0));
Assert.assertEquals(request.getName(), actualRequest.getName());
Assert.assertTrue(
channelProvider.isHeaderSent(
ApiClientHeaderProvider.getDefaultApiClientHeaderKey(),
GaxGrpcProperties.getDefaultApiClientHeaderPattern()));
}
@Test
public void getLocationExceptionTest() throws Exception {
StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT);
mockLocations.addException(exception);
try {
GetLocationRequest request = GetLocationRequest.newBuilder().setName("name3373707").build();
client.getLocation(request);
Assert.fail("No exception raised");
} catch (InvalidArgumentException e) {
// Expected exception.
}
}
}
|
apache/pdfbox | 36,871 | xmpbox/src/main/java/org/apache/xmpbox/xml/DomXmpParser.java | /*****************************************************************************
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*
****************************************************************************/
package org.apache.xmpbox.xml;
import java.io.ByteArrayInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.util.ArrayDeque;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Deque;
import java.util.StringTokenizer;
import javax.xml.XMLConstants;
import javax.xml.namespace.QName;
import javax.xml.parsers.DocumentBuilder;
import javax.xml.parsers.DocumentBuilderFactory;
import javax.xml.parsers.ParserConfigurationException;
import org.apache.xmpbox.XMPMetadata;
import org.apache.xmpbox.XmpConstants;
import org.apache.xmpbox.schema.XMPSchema;
import org.apache.xmpbox.schema.XmpSchemaException;
import org.apache.xmpbox.type.AbstractField;
import org.apache.xmpbox.type.AbstractSimpleProperty;
import org.apache.xmpbox.type.AbstractStructuredType;
import org.apache.xmpbox.type.ArrayProperty;
import org.apache.xmpbox.type.Attribute;
import org.apache.xmpbox.type.BadFieldValueException;
import org.apache.xmpbox.type.Cardinality;
import org.apache.xmpbox.type.ComplexPropertyContainer;
import org.apache.xmpbox.type.PropertiesDescription;
import org.apache.xmpbox.type.PropertyType;
import org.apache.xmpbox.type.TypeMapping;
import org.apache.xmpbox.type.Types;
import org.apache.xmpbox.xml.XmpParsingException.ErrorType;
import org.w3c.dom.Attr;
import org.w3c.dom.Comment;
import org.w3c.dom.Document;
import org.w3c.dom.Element;
import org.w3c.dom.NamedNodeMap;
import org.w3c.dom.Node;
import org.w3c.dom.NodeList;
import org.w3c.dom.ProcessingInstruction;
import org.w3c.dom.Text;
import org.xml.sax.SAXException;
public class DomXmpParser
{
private DocumentBuilder dBuilder;
private NamespaceFinder nsFinder;
private boolean strictParsing = true;
public DomXmpParser() throws XmpParsingException
{
try
{
DocumentBuilderFactory dbFactory = DocumentBuilderFactory.newInstance();
dbFactory.setFeature("http://apache.org/xml/features/disallow-doctype-decl", true);
dbFactory.setFeature("http://xml.org/sax/features/external-general-entities", false);
dbFactory.setFeature("http://xml.org/sax/features/external-parameter-entities", false);
dbFactory.setFeature("http://apache.org/xml/features/nonvalidating/load-external-dtd", false);
dbFactory.setXIncludeAware(false);
dbFactory.setExpandEntityReferences(false);
dbFactory.setIgnoringComments(true);
dbFactory.setNamespaceAware(true);
dBuilder = dbFactory.newDocumentBuilder();
nsFinder = new NamespaceFinder();
}
catch (ParserConfigurationException e)
{
throw new XmpParsingException(ErrorType.Configuration, "Failed to initialize", e);
}
}
public boolean isStrictParsing()
{
return strictParsing;
}
public void setStrictParsing(boolean strictParsing)
{
this.strictParsing = strictParsing;
}
public XMPMetadata parse(byte[] xmp) throws XmpParsingException
{
ByteArrayInputStream input = new ByteArrayInputStream(xmp);
return parse(input);
}
public XMPMetadata parse(InputStream input) throws XmpParsingException
{
Document document = null;
try
{
// prevents validation messages polluting the console
dBuilder.setErrorHandler(null);
document = dBuilder.parse(input);
}
catch (SAXException | IOException e)
{
throw new XmpParsingException(ErrorType.Undefined, "Failed to parse", e);
}
XMPMetadata xmp = null;
// Start reading
removeComments(document);
Node node = document.getFirstChild();
// expect xpacket processing instruction
if (!(node instanceof ProcessingInstruction))
{
throw new XmpParsingException(ErrorType.XpacketBadStart, "xmp should start with a processing instruction");
}
else
{
xmp = parseInitialXpacket((ProcessingInstruction) node);
node = node.getNextSibling();
}
// forget other processing instruction
while (node instanceof ProcessingInstruction)
{
node = node.getNextSibling();
}
// expect root element
Element root = null;
if (!(node instanceof Element))
{
throw new XmpParsingException(ErrorType.NoRootElement, "xmp should contain a root element");
}
else
{
// use this element as root
root = (Element) node;
node = node.getNextSibling();
}
// expect xpacket end
if (!(node instanceof ProcessingInstruction))
{
throw new XmpParsingException(ErrorType.XpacketBadEnd, "xmp should end with a processing instruction");
}
else
{
parseEndPacket(xmp, (ProcessingInstruction) node);
node = node.getNextSibling();
}
// should be null
if (node != null)
{
throw new XmpParsingException(ErrorType.XpacketBadEnd,
"xmp should end after xpacket end processing instruction");
}
// xpacket is OK and the is no more nodes
// Now, parse the content of root
Element rdfRdf = findDescriptionsParent(root);
List<Element> descriptions = DomHelper.getElementChildren(rdfRdf);
List<Element> dataDescriptions = new ArrayList<>(descriptions.size());
for (Element description : descriptions)
{
Element first = DomHelper.getFirstChildElement(description);
if (first != null && "pdfaExtension".equals(first.getPrefix()))
{
PdfaExtensionHelper.validateNaming(xmp, description);
parseDescriptionRoot(xmp, description);
}
else
{
dataDescriptions.add(description);
}
}
// find schema description
PdfaExtensionHelper.populateSchemaMapping(xmp);
// parse data description
for (Element description : dataDescriptions)
{
parseDescriptionRoot(xmp, description);
}
return xmp;
}
private void parseDescriptionRoot(XMPMetadata xmp, Element description) throws XmpParsingException
{
nsFinder.push(description);
TypeMapping tm = xmp.getTypeMapping();
try
{
List<Element> properties = DomHelper.getElementChildren(description);
// parse attributes as properties
NamedNodeMap nnm = description.getAttributes();
for (int i = 0; i < nnm.getLength(); i++)
{
Attr attr = (Attr) nnm.item(i);
if (XmpConstants.DEFAULT_RDF_PREFIX.equals(attr.getPrefix())
&& XmpConstants.ABOUT_NAME.equals(attr.getLocalName()))
{
// do nothing
}
else if (attr.getPrefix() == null && XmpConstants.ABOUT_NAME.equals(attr.getLocalName()))
{
// do nothing
}
else if (XMLConstants.XMLNS_ATTRIBUTE.equals(attr.getPrefix()))
{
String namespace = attr.getValue();
if (!strictParsing && !tm.isStructuredTypeNamespace(namespace))
{
// PDFBOX-5128: Add the schema on the fly if it can't be found
// PDFBOX-5649: But only if the namespace isn't already known
// because this adds a namespace without property descriptions
String prefix = attr.getLocalName();
XMPSchema schema = xmp.getSchema(namespace);
if (schema == null && tm.getSchemaFactory(namespace) == null)
{
tm.addNewNameSpace(namespace, prefix);
}
}
}
else
{
parseDescriptionRootAttr(xmp, description, attr, tm);
}
}
parseChildrenAsProperties(xmp, properties, tm, description);
}
catch (XmpSchemaException e)
{
throw new XmpParsingException(ErrorType.Undefined, "Parsing failed", e);
}
finally
{
nsFinder.pop();
}
}
private void parseDescriptionRootAttr(XMPMetadata xmp, Element description, Attr attr, TypeMapping tm)
throws XmpSchemaException, XmpParsingException
{
String namespace = attr.getNamespaceURI();
XMPSchema schema = xmp.getSchema(namespace);
if (schema == null && tm.getSchemaFactory(namespace) != null)
{
schema = tm.getSchemaFactory(namespace).createXMPSchema(xmp, attr.getPrefix());
loadAttributes(schema, description);
}
// Only process when a schema was successfully found
if( schema != null )
{
ComplexPropertyContainer container = schema.getContainer();
PropertyType type = checkPropertyDefinition(xmp,
new QName(attr.getNamespaceURI(), attr.getLocalName()));
//Default to text if no type is found
if( type == null)
{
type = TypeMapping.createPropertyType(Types.Text, Cardinality.Simple);
}
try
{
AbstractSimpleProperty sp = tm.instanciateSimpleProperty(namespace, schema.getPrefix(),
attr.getLocalName(), attr.getValue(), type.type());
container.addProperty(sp);
}
catch (IllegalArgumentException e)
{
throw new XmpParsingException(ErrorType.Format,
e.getMessage() + " in " + schema.getPrefix() + ":" + attr.getLocalName(), e);
}
}
}
private void parseChildrenAsProperties(XMPMetadata xmp, List<Element> properties, TypeMapping tm, Element description)
throws XmpParsingException, XmpSchemaException
{
// parse children elements as properties
for (Element property : properties)
{
nsFinder.push(property);
String namespace = property.getNamespaceURI();
PropertyType type = checkPropertyDefinition(xmp, DomHelper.getQName(property));
// create the container
if (!tm.isDefinedSchema(namespace))
{
throw new XmpParsingException(ErrorType.NoSchema,
"This namespace is not a schema or a structured type : " + namespace);
}
XMPSchema schema = xmp.getSchema(namespace);
if (schema == null)
{
schema = tm.getSchemaFactory(namespace).createXMPSchema(xmp, property.getPrefix());
loadAttributes(schema, description);
}
ComplexPropertyContainer container = schema.getContainer();
// create property
createProperty(xmp, property, type, container);
nsFinder.pop();
}
}
private void createProperty(XMPMetadata xmp, Element property, PropertyType type, ComplexPropertyContainer container)
throws XmpParsingException
{
String prefix = property.getPrefix();
String name = property.getLocalName();
String namespace = property.getNamespaceURI();
// create property
nsFinder.push(property);
try
{
if (type == null)
{
if (strictParsing)
{
throw new XmpParsingException(ErrorType.InvalidType, "No type defined for {" + namespace + "}"
+ name);
}
else
{
// use it as string
manageSimpleType(xmp, property, Types.Text, container);
}
}
else if (type.type() == Types.LangAlt)
{
manageLangAlt(xmp, property, container);
}
else if (type.card().isArray())
{
manageArray(xmp, property, type, container);
}
else if (type.type().isSimple())
{
manageSimpleType(xmp, property, type.type(), container);
}
else if (type.type().isStructured())
{
manageStructuredType(xmp, property, prefix, container);
}
else if (type.type() == Types.DefinedType)
{
manageDefinedType(xmp, property, prefix, container);
}
}
catch (IllegalArgumentException e)
{
throw new XmpParsingException(ErrorType.Format, e.getMessage() + " in " + prefix + ":" + name, e);
}
finally
{
nsFinder.pop();
}
}
private void manageDefinedType(XMPMetadata xmp, Element property, String prefix, ComplexPropertyContainer container)
throws XmpParsingException
{
if (DomHelper.isParseTypeResource(property))
{
AbstractStructuredType ast = parseLiDescription(xmp, DomHelper.getQName(property), property);
if (ast == null)
{
throw new XmpParsingException(ErrorType.Format, "property should contain child elements : "
+ property);
}
ast.setPrefix(prefix);
container.addProperty(ast);
}
else
{
Element inner = DomHelper.getFirstChildElement(property);
if (inner == null)
{
throw new XmpParsingException(ErrorType.Format, "property should contain child element : "
+ property);
}
AbstractStructuredType ast = parseLiDescription(xmp, DomHelper.getQName(property), inner);
if (ast == null)
{
throw new XmpParsingException(ErrorType.Format, "inner element should contain child elements : "
+ inner);
}
ast.setPrefix(prefix);
container.addProperty(ast);
}
}
private void manageStructuredType(XMPMetadata xmp, Element property, String prefix, ComplexPropertyContainer container)
throws XmpParsingException
{
if (DomHelper.isParseTypeResource(property))
{
AbstractStructuredType ast = parseLiDescription(xmp, DomHelper.getQName(property), property);
if (ast != null)
{
ast.setPrefix(prefix);
container.addProperty(ast);
}
}
else
{
Element inner = DomHelper.getFirstChildElement(property);
if (inner != null)
{
nsFinder.push(inner);
AbstractStructuredType ast = parseLiDescription(xmp, DomHelper.getQName(property), inner);
if (ast == null)
{
throw new XmpParsingException(ErrorType.Format, "inner element should contain child elements : "
+ inner);
}
ast.setPrefix(prefix);
container.addProperty(ast);
}
}
}
private void manageSimpleType(XMPMetadata xmp, Element property, Types type, ComplexPropertyContainer container)
{
TypeMapping tm = xmp.getTypeMapping();
String prefix = property.getPrefix();
String name = property.getLocalName();
String namespace = property.getNamespaceURI();
AbstractSimpleProperty sp = tm.instanciateSimpleProperty(namespace, prefix, name, property.getTextContent(),
type);
loadAttributes(sp, property);
container.addProperty(sp);
}
private void manageArray(XMPMetadata xmp, Element property, PropertyType type, ComplexPropertyContainer container)
throws XmpParsingException
{
TypeMapping tm = xmp.getTypeMapping();
String prefix = property.getPrefix();
String name = property.getLocalName();
String namespace = property.getNamespaceURI();
Element bagOrSeq = DomHelper.getUniqueElementChild(property);
// ensure this is the good type of array
if (bagOrSeq == null)
{
// not an array
String whatFound = "nothing";
if (property.getFirstChild() != null)
{
whatFound = property.getFirstChild().getClass().getName();
}
throw new XmpParsingException(ErrorType.Format, "Invalid array definition, expecting " + type.card()
+ " and found "
+ whatFound
+ " [prefix=" + prefix + "; name=" + name + "]");
}
if (!bagOrSeq.getLocalName().equals(type.card().name()))
{
// not the good array type
throw new XmpParsingException(ErrorType.Format, "Invalid array type, expecting " + type.card()
+ " and found " + bagOrSeq.getLocalName() + " [prefix="+prefix+"; name="+name+"]");
}
ArrayProperty array = tm.createArrayProperty(namespace, prefix, name, type.card());
container.addProperty(array);
List<Element> lis = DomHelper.getElementChildren(bagOrSeq);
for (Element element : lis)
{
QName propertyQName = new QName(element.getLocalName());
AbstractField ast = parseLiElement(xmp, propertyQName, element, type.type());
if (ast != null)
{
array.addProperty(ast);
}
}
}
private void manageLangAlt(XMPMetadata xmp, Element property, ComplexPropertyContainer container)
throws XmpParsingException
{
manageArray(xmp, property, TypeMapping.createPropertyType(Types.LangAlt, Cardinality.Alt), container);
}
private void parseDescriptionInner(XMPMetadata xmp, Element description, ComplexPropertyContainer parentContainer)
throws XmpParsingException
{
nsFinder.push(description);
TypeMapping tm = xmp.getTypeMapping();
try
{
List<Element> properties = DomHelper.getElementChildren(description);
for (Element property : properties)
{
String name = property.getLocalName();
PropertyType dtype = checkPropertyDefinition(xmp, DomHelper.getQName(property));
PropertyType ptype = tm.getStructuredPropMapping(dtype.type()).getPropertyType(name);
// create property
createProperty(xmp, property, ptype, parentContainer);
}
}
finally
{
nsFinder.pop();
}
}
private AbstractField parseLiElement(XMPMetadata xmp, QName descriptor, Element liElement, Types type)
throws XmpParsingException
{
if (DomHelper.isParseTypeResource(liElement))
{
return parseLiDescription(xmp, descriptor, liElement);
}
// will find rdf:Description
Element liChild = DomHelper.getUniqueElementChild(liElement);
if (liChild != null)
{
nsFinder.push(liChild);
return parseLiDescription(xmp, descriptor, liChild);
}
else
{
// no child
String text = liElement.getTextContent();
TypeMapping tm = xmp.getTypeMapping();
if (type.isSimple())
{
AbstractField af = tm.instanciateSimpleProperty(descriptor.getNamespaceURI(),
descriptor.getPrefix(), descriptor.getLocalPart(), text, type);
loadAttributes(af, liElement);
return af;
}
else
{
// PDFBOX-4325: assume it is structured
AbstractField af;
try
{
af = tm.instanciateStructuredType(type, descriptor.getLocalPart());
}
catch (BadFieldValueException ex)
{
throw new XmpParsingException(ErrorType.InvalidType, "Parsing of structured type failed", ex);
}
loadAttributes(af, liElement);
return af;
}
}
}
private void loadAttributes(AbstractField sp, Element element)
{
NamedNodeMap nnm = element.getAttributes();
for (int i = 0; i < nnm.getLength(); i++)
{
Attr attr = (Attr) nnm.item(i);
if (XMLConstants.XMLNS_ATTRIBUTE.equals(attr.getPrefix()))
{
// do nothing
}
else if (XmpConstants.DEFAULT_RDF_PREFIX.equals(attr.getPrefix())
&& XmpConstants.ABOUT_NAME.equals(attr.getLocalName()))
{
// set about
if (sp instanceof XMPSchema)
{
((XMPSchema) sp).setAboutAsSimple(attr.getValue());
}
}
else
{
Attribute attribute = new Attribute(XMLConstants.XML_NS_URI, attr.getLocalName(), attr.getValue());
sp.setAttribute(attribute);
}
}
}
private AbstractStructuredType parseLiDescription(XMPMetadata xmp, QName descriptor, Element liElement)
throws XmpParsingException
{
TypeMapping tm = xmp.getTypeMapping();
List<Element> elements = DomHelper.getElementChildren(liElement);
if (elements.isEmpty())
{
// The list is empty
return null;
}
// Instantiate abstract structured type with hint from first element
Element first = elements.get(0);
nsFinder.push(first);
PropertyType ctype = checkPropertyDefinition(xmp, DomHelper.getQName(first));
if (ctype == null)
{
throw new XmpParsingException(ErrorType.NoType, "ctype is null, first: " + first +
", DomHelper.getQName(first): " + DomHelper.getQName(first));
}
Types tt = ctype.type();
AbstractStructuredType ast = instanciateStructured(tm, tt, descriptor.getLocalPart(), first.getNamespaceURI());
ast.setNamespace(first.getNamespaceURI());
ast.setPrefix(first.getPrefix());
PropertiesDescription pm;
if (tt.isStructured())
{
pm = tm.getStructuredPropMapping(tt);
}
else
{
pm = tm.getDefinedDescriptionByNamespace(first.getNamespaceURI());
}
for (Element element : elements)
{
String prefix = element.getPrefix();
String name = element.getLocalName();
String namespace = element.getNamespaceURI();
PropertyType type = pm.getPropertyType(name);
if (type == null)
{
// not defined
throw new XmpParsingException(ErrorType.NoType, "Type '" + name + "' not defined in "
+ element.getNamespaceURI());
}
else if (type.card().isArray())
{
ArrayProperty array = tm.createArrayProperty(namespace, prefix, name, type.card());
ast.getContainer().addProperty(array);
Element bagOrSeq = DomHelper.getUniqueElementChild(element);
List<Element> lis = DomHelper.getElementChildren(bagOrSeq);
for (Element element2 : lis)
{
AbstractField ast2 = parseLiElement(xmp, descriptor, element2, type.type());
if (ast2 != null)
{
array.addProperty(ast2);
}
}
}
else if (type.type().isSimple())
{
AbstractSimpleProperty sp = tm.instanciateSimpleProperty(namespace, prefix, name,
element.getTextContent(), type.type());
loadAttributes(sp, element);
ast.getContainer().addProperty(sp);
}
else if (type.type().isStructured())
{
// create a new structured type
AbstractStructuredType inner = instanciateStructured(tm, type.type(), name, null);
inner.setNamespace(namespace);
inner.setPrefix(prefix);
ast.getContainer().addProperty(inner);
ComplexPropertyContainer cpc = inner.getContainer();
if (DomHelper.isParseTypeResource(element))
{
parseDescriptionInner(xmp, element, cpc);
}
else
{
Element descElement = DomHelper.getFirstChildElement(element);
if (descElement != null)
{
parseDescriptionInner(xmp, descElement, cpc);
}
}
}
else
{
throw new XmpParsingException(ErrorType.NoType, "Unidentified element to parse " + element + " (type="
+ type + ")");
}
}
nsFinder.pop();
return ast;
}
private XMPMetadata parseInitialXpacket(ProcessingInstruction pi) throws XmpParsingException
{
if (!"xpacket".equals(pi.getNodeName()))
{
throw new XmpParsingException(ErrorType.XpacketBadStart, "Bad processing instruction name : "
+ pi.getNodeName());
}
String data = pi.getData();
StringTokenizer tokens = new StringTokenizer(data, " ");
String id = null;
String begin = null;
String bytes = null;
String encoding = null;
while (tokens.hasMoreTokens())
{
String token = tokens.nextToken();
if (!token.endsWith("\"") && !token.endsWith("'"))
{
throw new XmpParsingException(ErrorType.XpacketBadStart, "Cannot understand PI data part : '" + token
+ "' in '" + data + "'");
}
String quote = token.substring(token.length() - 1);
int pos = token.indexOf("=" + quote);
if (pos <= 0)
{
throw new XmpParsingException(ErrorType.XpacketBadStart, "Cannot understand PI data part : '" + token
+ "' in '" + data + "'");
}
String name = token.substring(0, pos);
if (token.length() - 1 < pos + 2)
{
throw new XmpParsingException(ErrorType.XpacketBadStart, "Cannot understand PI data part : '" + token
+ "' in '" + data + "'");
}
String value = token.substring(pos + 2, token.length() - 1);
switch (name)
{
case "id":
id = value;
break;
case "begin":
begin = value;
break;
case "bytes":
bytes = value;
break;
case "encoding":
encoding = value;
break;
default:
throw new XmpParsingException(ErrorType.XpacketBadStart,
"Unknown attribute in xpacket PI : '" + token + "'");
}
}
return XMPMetadata.createXMPMetadata(begin, id, bytes, encoding);
}
private void parseEndPacket(XMPMetadata metadata, ProcessingInstruction pi) throws XmpParsingException
{
String xpackData = pi.getData();
// end attribute must be present and placed in first
// xmp spec says Other unrecognized attributes can follow, but
// should be ignored
if (xpackData.startsWith("end="))
{
char end = xpackData.charAt(5);
// check value (5 for end='X')
if (end != 'r' && end != 'w')
{
throw new XmpParsingException(ErrorType.XpacketBadEnd,
"Excepted xpacket 'end' attribute with value 'r' or 'w' ");
}
else
{
metadata.setEndXPacket(Character.toString(end));
}
}
else
{
// should find end='r/w'
throw new XmpParsingException(ErrorType.XpacketBadEnd,
"Excepted xpacket 'end' attribute (must be present and placed in first)");
}
}
private Element findDescriptionsParent(Element root) throws XmpParsingException
{
Element rdfRdf;
// check if already rdf element, as xmpmeta wrapper can be optional
if (!XmpConstants.RDF_NAMESPACE.equals(root.getNamespaceURI()))
{
// always <x:xmpmeta xmlns:x="adobe:ns:meta/">
expectNaming(root, "adobe:ns:meta/", "x", "xmpmeta");
// should only have one child
NodeList nl = root.getChildNodes();
if (nl.getLength() == 0)
{
// empty description
throw new XmpParsingException(ErrorType.Format, "No rdf description found in xmp");
}
else if (nl.getLength() > 1)
{
// only expect one element
throw new XmpParsingException(ErrorType.Format, "More than one element found in x:xmpmeta");
}
else if (!(root.getFirstChild() instanceof Element))
{
// should be an element
throw new XmpParsingException(ErrorType.Format, "x:xmpmeta does not contains rdf:RDF element");
} // else let's parse
rdfRdf = (Element) root.getFirstChild();
}
else
{
rdfRdf = root;
}
// always <rdf:RDF
// xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#">
expectNaming(rdfRdf, XmpConstants.RDF_NAMESPACE, XmpConstants.DEFAULT_RDF_PREFIX,
XmpConstants.DEFAULT_RDF_LOCAL_NAME);
// return description parent
return rdfRdf;
}
private void expectNaming(Element element, String ns, String prefix, String ln) throws XmpParsingException
{
if ((ns != null) && !(ns.equals(element.getNamespaceURI())))
{
throw new XmpParsingException(ErrorType.Format, "Expecting namespace '" + ns + "' and found '"
+ element.getNamespaceURI() + "'");
}
else if ((prefix != null) && !(prefix.equals(element.getPrefix())))
{
throw new XmpParsingException(ErrorType.Format, "Expecting prefix '" + prefix + "' and found '"
+ element.getPrefix() + "'");
}
else if ((ln != null) && !(ln.equals(element.getLocalName())))
{
throw new XmpParsingException(ErrorType.Format, "Expecting local name '" + ln + "' and found '"
+ element.getLocalName() + "'");
} // else OK
}
/**
* Remove all the comments node in the parent element of the parameter
*
* @param root
* the first node of an element or document to clear
*/
private void removeComments(Node root)
{
// will hold the nodes which are to be deleted
List<Node> forDeletion = new ArrayList<>();
NodeList nl = root.getChildNodes();
if (nl.getLength()<=1)
{
// There is only one node so we do not remove it
return;
}
for (int i = 0; i < nl.getLength(); i++)
{
Node node = nl.item(i);
if (node instanceof Comment)
{
// comments to be deleted
forDeletion.add(node);
}
else if (node instanceof Text)
{
if (node.getTextContent().isBlank())
{
// TODO: verify why this is necessary
// empty text nodes to be deleted
forDeletion.add(node);
}
}
else if (node instanceof Element)
{
// clean child
removeComments(node);
} // else do nothing
}
// now remove the child nodes
forDeletion.forEach(root::removeChild);
}
private AbstractStructuredType instanciateStructured(TypeMapping tm, Types type, String name,
String structuredNamespace) throws XmpParsingException
{
try
{
if (type.isStructured())
{
return tm.instanciateStructuredType(type, name);
}
else if (type.isDefined())
{
return tm.instanciateDefinedType(name, structuredNamespace);
}
else
{
throw new XmpParsingException(ErrorType.InvalidType, "Type not structured : " + type);
}
}
catch (BadFieldValueException e)
{
throw new XmpParsingException(ErrorType.InvalidType, "Parsing failed", e);
}
}
private PropertyType checkPropertyDefinition(XMPMetadata xmp, QName prop) throws XmpParsingException
{
TypeMapping tm = xmp.getTypeMapping();
// test if namespace is set in xml
if (!nsFinder.containsNamespace(prop.getNamespaceURI()))
{
throw new XmpParsingException(ErrorType.NoSchema, "Schema is not set in this document : "
+ prop.getNamespaceURI());
}
// test if namespace is defined
String nsuri = prop.getNamespaceURI();
if (!tm.isDefinedNamespace(nsuri))
{
throw new XmpParsingException(ErrorType.NoSchema, "Cannot find a definition for the namespace "
+ prop.getNamespaceURI());
}
try
{
return tm.getSpecifiedPropertyType(prop);
}
catch (BadFieldValueException e)
{
throw new XmpParsingException(ErrorType.InvalidType, "Failed to retrieve property definition", e);
}
}
protected static class NamespaceFinder
{
private final Deque<Map<String, String>> stack = new ArrayDeque<>();
protected void push(Element description)
{
NamedNodeMap nnm = description.getAttributes();
Map<String, String> map = new HashMap<>(nnm.getLength());
for (int j = 0; j < nnm.getLength(); j++)
{
Attr no = (Attr) nnm.item(j);
// if ns definition add it
if (XMLConstants.XMLNS_ATTRIBUTE_NS_URI.equals(no.getNamespaceURI()))
{
map.put(no.getLocalName(), no.getValue());
}
else if (no.getNamespaceURI() != null && no.getPrefix() != null)
{
// PDFBOX-5976
map.put(no.getPrefix(), no.getNamespaceURI());
}
}
stack.push(map);
}
protected Map<String, String> pop()
{
return stack.pop();
}
protected boolean containsNamespace(String namespace)
{
return stack.stream().anyMatch(map -> map.containsValue(namespace));
}
}
}
|
googleapis/google-cloud-java | 36,772 | java-oracledatabase/proto-google-cloud-oracledatabase-v1/src/main/java/com/google/cloud/oracledatabase/v1/ListGiVersionsResponse.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/oracledatabase/v1/oracledatabase.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.oracledatabase.v1;
/**
*
*
* <pre>
* The response for `GiVersion.List`.
* </pre>
*
* Protobuf type {@code google.cloud.oracledatabase.v1.ListGiVersionsResponse}
*/
public final class ListGiVersionsResponse extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.oracledatabase.v1.ListGiVersionsResponse)
ListGiVersionsResponseOrBuilder {
private static final long serialVersionUID = 0L;
// Use ListGiVersionsResponse.newBuilder() to construct.
private ListGiVersionsResponse(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private ListGiVersionsResponse() {
giVersions_ = java.util.Collections.emptyList();
nextPageToken_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new ListGiVersionsResponse();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.oracledatabase.v1.V1mainProto
.internal_static_google_cloud_oracledatabase_v1_ListGiVersionsResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.oracledatabase.v1.V1mainProto
.internal_static_google_cloud_oracledatabase_v1_ListGiVersionsResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.oracledatabase.v1.ListGiVersionsResponse.class,
com.google.cloud.oracledatabase.v1.ListGiVersionsResponse.Builder.class);
}
public static final int GI_VERSIONS_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private java.util.List<com.google.cloud.oracledatabase.v1.GiVersion> giVersions_;
/**
*
*
* <pre>
* The list of Oracle Grid Infrastructure (GI) versions.
* </pre>
*
* <code>repeated .google.cloud.oracledatabase.v1.GiVersion gi_versions = 1;</code>
*/
@java.lang.Override
public java.util.List<com.google.cloud.oracledatabase.v1.GiVersion> getGiVersionsList() {
return giVersions_;
}
/**
*
*
* <pre>
* The list of Oracle Grid Infrastructure (GI) versions.
* </pre>
*
* <code>repeated .google.cloud.oracledatabase.v1.GiVersion gi_versions = 1;</code>
*/
@java.lang.Override
public java.util.List<? extends com.google.cloud.oracledatabase.v1.GiVersionOrBuilder>
getGiVersionsOrBuilderList() {
return giVersions_;
}
/**
*
*
* <pre>
* The list of Oracle Grid Infrastructure (GI) versions.
* </pre>
*
* <code>repeated .google.cloud.oracledatabase.v1.GiVersion gi_versions = 1;</code>
*/
@java.lang.Override
public int getGiVersionsCount() {
return giVersions_.size();
}
/**
*
*
* <pre>
* The list of Oracle Grid Infrastructure (GI) versions.
* </pre>
*
* <code>repeated .google.cloud.oracledatabase.v1.GiVersion gi_versions = 1;</code>
*/
@java.lang.Override
public com.google.cloud.oracledatabase.v1.GiVersion getGiVersions(int index) {
return giVersions_.get(index);
}
/**
*
*
* <pre>
* The list of Oracle Grid Infrastructure (GI) versions.
* </pre>
*
* <code>repeated .google.cloud.oracledatabase.v1.GiVersion gi_versions = 1;</code>
*/
@java.lang.Override
public com.google.cloud.oracledatabase.v1.GiVersionOrBuilder getGiVersionsOrBuilder(int index) {
return giVersions_.get(index);
}
public static final int NEXT_PAGE_TOKEN_FIELD_NUMBER = 2;
@SuppressWarnings("serial")
private volatile java.lang.Object nextPageToken_ = "";
/**
*
*
* <pre>
* A token identifying a page of results the server should return.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The nextPageToken.
*/
@java.lang.Override
public java.lang.String getNextPageToken() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
nextPageToken_ = s;
return s;
}
}
/**
*
*
* <pre>
* A token identifying a page of results the server should return.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The bytes for nextPageToken.
*/
@java.lang.Override
public com.google.protobuf.ByteString getNextPageTokenBytes() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
nextPageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
for (int i = 0; i < giVersions_.size(); i++) {
output.writeMessage(1, giVersions_.get(i));
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 2, nextPageToken_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
for (int i = 0; i < giVersions_.size(); i++) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, giVersions_.get(i));
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, nextPageToken_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.oracledatabase.v1.ListGiVersionsResponse)) {
return super.equals(obj);
}
com.google.cloud.oracledatabase.v1.ListGiVersionsResponse other =
(com.google.cloud.oracledatabase.v1.ListGiVersionsResponse) obj;
if (!getGiVersionsList().equals(other.getGiVersionsList())) return false;
if (!getNextPageToken().equals(other.getNextPageToken())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (getGiVersionsCount() > 0) {
hash = (37 * hash) + GI_VERSIONS_FIELD_NUMBER;
hash = (53 * hash) + getGiVersionsList().hashCode();
}
hash = (37 * hash) + NEXT_PAGE_TOKEN_FIELD_NUMBER;
hash = (53 * hash) + getNextPageToken().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.oracledatabase.v1.ListGiVersionsResponse parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.oracledatabase.v1.ListGiVersionsResponse parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.oracledatabase.v1.ListGiVersionsResponse parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.oracledatabase.v1.ListGiVersionsResponse parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.oracledatabase.v1.ListGiVersionsResponse parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.oracledatabase.v1.ListGiVersionsResponse parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.oracledatabase.v1.ListGiVersionsResponse parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.oracledatabase.v1.ListGiVersionsResponse parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.oracledatabase.v1.ListGiVersionsResponse parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.oracledatabase.v1.ListGiVersionsResponse parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.oracledatabase.v1.ListGiVersionsResponse parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.oracledatabase.v1.ListGiVersionsResponse parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.oracledatabase.v1.ListGiVersionsResponse prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* The response for `GiVersion.List`.
* </pre>
*
* Protobuf type {@code google.cloud.oracledatabase.v1.ListGiVersionsResponse}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.oracledatabase.v1.ListGiVersionsResponse)
com.google.cloud.oracledatabase.v1.ListGiVersionsResponseOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.oracledatabase.v1.V1mainProto
.internal_static_google_cloud_oracledatabase_v1_ListGiVersionsResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.oracledatabase.v1.V1mainProto
.internal_static_google_cloud_oracledatabase_v1_ListGiVersionsResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.oracledatabase.v1.ListGiVersionsResponse.class,
com.google.cloud.oracledatabase.v1.ListGiVersionsResponse.Builder.class);
}
// Construct using com.google.cloud.oracledatabase.v1.ListGiVersionsResponse.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
if (giVersionsBuilder_ == null) {
giVersions_ = java.util.Collections.emptyList();
} else {
giVersions_ = null;
giVersionsBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000001);
nextPageToken_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.oracledatabase.v1.V1mainProto
.internal_static_google_cloud_oracledatabase_v1_ListGiVersionsResponse_descriptor;
}
@java.lang.Override
public com.google.cloud.oracledatabase.v1.ListGiVersionsResponse getDefaultInstanceForType() {
return com.google.cloud.oracledatabase.v1.ListGiVersionsResponse.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.oracledatabase.v1.ListGiVersionsResponse build() {
com.google.cloud.oracledatabase.v1.ListGiVersionsResponse result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.oracledatabase.v1.ListGiVersionsResponse buildPartial() {
com.google.cloud.oracledatabase.v1.ListGiVersionsResponse result =
new com.google.cloud.oracledatabase.v1.ListGiVersionsResponse(this);
buildPartialRepeatedFields(result);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartialRepeatedFields(
com.google.cloud.oracledatabase.v1.ListGiVersionsResponse result) {
if (giVersionsBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0)) {
giVersions_ = java.util.Collections.unmodifiableList(giVersions_);
bitField0_ = (bitField0_ & ~0x00000001);
}
result.giVersions_ = giVersions_;
} else {
result.giVersions_ = giVersionsBuilder_.build();
}
}
private void buildPartial0(com.google.cloud.oracledatabase.v1.ListGiVersionsResponse result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000002) != 0)) {
result.nextPageToken_ = nextPageToken_;
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.oracledatabase.v1.ListGiVersionsResponse) {
return mergeFrom((com.google.cloud.oracledatabase.v1.ListGiVersionsResponse) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.oracledatabase.v1.ListGiVersionsResponse other) {
if (other == com.google.cloud.oracledatabase.v1.ListGiVersionsResponse.getDefaultInstance())
return this;
if (giVersionsBuilder_ == null) {
if (!other.giVersions_.isEmpty()) {
if (giVersions_.isEmpty()) {
giVersions_ = other.giVersions_;
bitField0_ = (bitField0_ & ~0x00000001);
} else {
ensureGiVersionsIsMutable();
giVersions_.addAll(other.giVersions_);
}
onChanged();
}
} else {
if (!other.giVersions_.isEmpty()) {
if (giVersionsBuilder_.isEmpty()) {
giVersionsBuilder_.dispose();
giVersionsBuilder_ = null;
giVersions_ = other.giVersions_;
bitField0_ = (bitField0_ & ~0x00000001);
giVersionsBuilder_ =
com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders
? getGiVersionsFieldBuilder()
: null;
} else {
giVersionsBuilder_.addAllMessages(other.giVersions_);
}
}
}
if (!other.getNextPageToken().isEmpty()) {
nextPageToken_ = other.nextPageToken_;
bitField0_ |= 0x00000002;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
com.google.cloud.oracledatabase.v1.GiVersion m =
input.readMessage(
com.google.cloud.oracledatabase.v1.GiVersion.parser(), extensionRegistry);
if (giVersionsBuilder_ == null) {
ensureGiVersionsIsMutable();
giVersions_.add(m);
} else {
giVersionsBuilder_.addMessage(m);
}
break;
} // case 10
case 18:
{
nextPageToken_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000002;
break;
} // case 18
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.util.List<com.google.cloud.oracledatabase.v1.GiVersion> giVersions_ =
java.util.Collections.emptyList();
private void ensureGiVersionsIsMutable() {
if (!((bitField0_ & 0x00000001) != 0)) {
giVersions_ =
new java.util.ArrayList<com.google.cloud.oracledatabase.v1.GiVersion>(giVersions_);
bitField0_ |= 0x00000001;
}
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.oracledatabase.v1.GiVersion,
com.google.cloud.oracledatabase.v1.GiVersion.Builder,
com.google.cloud.oracledatabase.v1.GiVersionOrBuilder>
giVersionsBuilder_;
/**
*
*
* <pre>
* The list of Oracle Grid Infrastructure (GI) versions.
* </pre>
*
* <code>repeated .google.cloud.oracledatabase.v1.GiVersion gi_versions = 1;</code>
*/
public java.util.List<com.google.cloud.oracledatabase.v1.GiVersion> getGiVersionsList() {
if (giVersionsBuilder_ == null) {
return java.util.Collections.unmodifiableList(giVersions_);
} else {
return giVersionsBuilder_.getMessageList();
}
}
/**
*
*
* <pre>
* The list of Oracle Grid Infrastructure (GI) versions.
* </pre>
*
* <code>repeated .google.cloud.oracledatabase.v1.GiVersion gi_versions = 1;</code>
*/
public int getGiVersionsCount() {
if (giVersionsBuilder_ == null) {
return giVersions_.size();
} else {
return giVersionsBuilder_.getCount();
}
}
/**
*
*
* <pre>
* The list of Oracle Grid Infrastructure (GI) versions.
* </pre>
*
* <code>repeated .google.cloud.oracledatabase.v1.GiVersion gi_versions = 1;</code>
*/
public com.google.cloud.oracledatabase.v1.GiVersion getGiVersions(int index) {
if (giVersionsBuilder_ == null) {
return giVersions_.get(index);
} else {
return giVersionsBuilder_.getMessage(index);
}
}
/**
*
*
* <pre>
* The list of Oracle Grid Infrastructure (GI) versions.
* </pre>
*
* <code>repeated .google.cloud.oracledatabase.v1.GiVersion gi_versions = 1;</code>
*/
public Builder setGiVersions(int index, com.google.cloud.oracledatabase.v1.GiVersion value) {
if (giVersionsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureGiVersionsIsMutable();
giVersions_.set(index, value);
onChanged();
} else {
giVersionsBuilder_.setMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* The list of Oracle Grid Infrastructure (GI) versions.
* </pre>
*
* <code>repeated .google.cloud.oracledatabase.v1.GiVersion gi_versions = 1;</code>
*/
public Builder setGiVersions(
int index, com.google.cloud.oracledatabase.v1.GiVersion.Builder builderForValue) {
if (giVersionsBuilder_ == null) {
ensureGiVersionsIsMutable();
giVersions_.set(index, builderForValue.build());
onChanged();
} else {
giVersionsBuilder_.setMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* The list of Oracle Grid Infrastructure (GI) versions.
* </pre>
*
* <code>repeated .google.cloud.oracledatabase.v1.GiVersion gi_versions = 1;</code>
*/
public Builder addGiVersions(com.google.cloud.oracledatabase.v1.GiVersion value) {
if (giVersionsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureGiVersionsIsMutable();
giVersions_.add(value);
onChanged();
} else {
giVersionsBuilder_.addMessage(value);
}
return this;
}
/**
*
*
* <pre>
* The list of Oracle Grid Infrastructure (GI) versions.
* </pre>
*
* <code>repeated .google.cloud.oracledatabase.v1.GiVersion gi_versions = 1;</code>
*/
public Builder addGiVersions(int index, com.google.cloud.oracledatabase.v1.GiVersion value) {
if (giVersionsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureGiVersionsIsMutable();
giVersions_.add(index, value);
onChanged();
} else {
giVersionsBuilder_.addMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* The list of Oracle Grid Infrastructure (GI) versions.
* </pre>
*
* <code>repeated .google.cloud.oracledatabase.v1.GiVersion gi_versions = 1;</code>
*/
public Builder addGiVersions(
com.google.cloud.oracledatabase.v1.GiVersion.Builder builderForValue) {
if (giVersionsBuilder_ == null) {
ensureGiVersionsIsMutable();
giVersions_.add(builderForValue.build());
onChanged();
} else {
giVersionsBuilder_.addMessage(builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* The list of Oracle Grid Infrastructure (GI) versions.
* </pre>
*
* <code>repeated .google.cloud.oracledatabase.v1.GiVersion gi_versions = 1;</code>
*/
public Builder addGiVersions(
int index, com.google.cloud.oracledatabase.v1.GiVersion.Builder builderForValue) {
if (giVersionsBuilder_ == null) {
ensureGiVersionsIsMutable();
giVersions_.add(index, builderForValue.build());
onChanged();
} else {
giVersionsBuilder_.addMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* The list of Oracle Grid Infrastructure (GI) versions.
* </pre>
*
* <code>repeated .google.cloud.oracledatabase.v1.GiVersion gi_versions = 1;</code>
*/
public Builder addAllGiVersions(
java.lang.Iterable<? extends com.google.cloud.oracledatabase.v1.GiVersion> values) {
if (giVersionsBuilder_ == null) {
ensureGiVersionsIsMutable();
com.google.protobuf.AbstractMessageLite.Builder.addAll(values, giVersions_);
onChanged();
} else {
giVersionsBuilder_.addAllMessages(values);
}
return this;
}
/**
*
*
* <pre>
* The list of Oracle Grid Infrastructure (GI) versions.
* </pre>
*
* <code>repeated .google.cloud.oracledatabase.v1.GiVersion gi_versions = 1;</code>
*/
public Builder clearGiVersions() {
if (giVersionsBuilder_ == null) {
giVersions_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
} else {
giVersionsBuilder_.clear();
}
return this;
}
/**
*
*
* <pre>
* The list of Oracle Grid Infrastructure (GI) versions.
* </pre>
*
* <code>repeated .google.cloud.oracledatabase.v1.GiVersion gi_versions = 1;</code>
*/
public Builder removeGiVersions(int index) {
if (giVersionsBuilder_ == null) {
ensureGiVersionsIsMutable();
giVersions_.remove(index);
onChanged();
} else {
giVersionsBuilder_.remove(index);
}
return this;
}
/**
*
*
* <pre>
* The list of Oracle Grid Infrastructure (GI) versions.
* </pre>
*
* <code>repeated .google.cloud.oracledatabase.v1.GiVersion gi_versions = 1;</code>
*/
public com.google.cloud.oracledatabase.v1.GiVersion.Builder getGiVersionsBuilder(int index) {
return getGiVersionsFieldBuilder().getBuilder(index);
}
/**
*
*
* <pre>
* The list of Oracle Grid Infrastructure (GI) versions.
* </pre>
*
* <code>repeated .google.cloud.oracledatabase.v1.GiVersion gi_versions = 1;</code>
*/
public com.google.cloud.oracledatabase.v1.GiVersionOrBuilder getGiVersionsOrBuilder(int index) {
if (giVersionsBuilder_ == null) {
return giVersions_.get(index);
} else {
return giVersionsBuilder_.getMessageOrBuilder(index);
}
}
/**
*
*
* <pre>
* The list of Oracle Grid Infrastructure (GI) versions.
* </pre>
*
* <code>repeated .google.cloud.oracledatabase.v1.GiVersion gi_versions = 1;</code>
*/
public java.util.List<? extends com.google.cloud.oracledatabase.v1.GiVersionOrBuilder>
getGiVersionsOrBuilderList() {
if (giVersionsBuilder_ != null) {
return giVersionsBuilder_.getMessageOrBuilderList();
} else {
return java.util.Collections.unmodifiableList(giVersions_);
}
}
/**
*
*
* <pre>
* The list of Oracle Grid Infrastructure (GI) versions.
* </pre>
*
* <code>repeated .google.cloud.oracledatabase.v1.GiVersion gi_versions = 1;</code>
*/
public com.google.cloud.oracledatabase.v1.GiVersion.Builder addGiVersionsBuilder() {
return getGiVersionsFieldBuilder()
.addBuilder(com.google.cloud.oracledatabase.v1.GiVersion.getDefaultInstance());
}
/**
*
*
* <pre>
* The list of Oracle Grid Infrastructure (GI) versions.
* </pre>
*
* <code>repeated .google.cloud.oracledatabase.v1.GiVersion gi_versions = 1;</code>
*/
public com.google.cloud.oracledatabase.v1.GiVersion.Builder addGiVersionsBuilder(int index) {
return getGiVersionsFieldBuilder()
.addBuilder(index, com.google.cloud.oracledatabase.v1.GiVersion.getDefaultInstance());
}
/**
*
*
* <pre>
* The list of Oracle Grid Infrastructure (GI) versions.
* </pre>
*
* <code>repeated .google.cloud.oracledatabase.v1.GiVersion gi_versions = 1;</code>
*/
public java.util.List<com.google.cloud.oracledatabase.v1.GiVersion.Builder>
getGiVersionsBuilderList() {
return getGiVersionsFieldBuilder().getBuilderList();
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.oracledatabase.v1.GiVersion,
com.google.cloud.oracledatabase.v1.GiVersion.Builder,
com.google.cloud.oracledatabase.v1.GiVersionOrBuilder>
getGiVersionsFieldBuilder() {
if (giVersionsBuilder_ == null) {
giVersionsBuilder_ =
new com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.oracledatabase.v1.GiVersion,
com.google.cloud.oracledatabase.v1.GiVersion.Builder,
com.google.cloud.oracledatabase.v1.GiVersionOrBuilder>(
giVersions_, ((bitField0_ & 0x00000001) != 0), getParentForChildren(), isClean());
giVersions_ = null;
}
return giVersionsBuilder_;
}
private java.lang.Object nextPageToken_ = "";
/**
*
*
* <pre>
* A token identifying a page of results the server should return.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The nextPageToken.
*/
public java.lang.String getNextPageToken() {
java.lang.Object ref = nextPageToken_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
nextPageToken_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* A token identifying a page of results the server should return.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The bytes for nextPageToken.
*/
public com.google.protobuf.ByteString getNextPageTokenBytes() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
nextPageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* A token identifying a page of results the server should return.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @param value The nextPageToken to set.
* @return This builder for chaining.
*/
public Builder setNextPageToken(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
nextPageToken_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* A token identifying a page of results the server should return.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return This builder for chaining.
*/
public Builder clearNextPageToken() {
nextPageToken_ = getDefaultInstance().getNextPageToken();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
return this;
}
/**
*
*
* <pre>
* A token identifying a page of results the server should return.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @param value The bytes for nextPageToken to set.
* @return This builder for chaining.
*/
public Builder setNextPageTokenBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
nextPageToken_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.oracledatabase.v1.ListGiVersionsResponse)
}
// @@protoc_insertion_point(class_scope:google.cloud.oracledatabase.v1.ListGiVersionsResponse)
private static final com.google.cloud.oracledatabase.v1.ListGiVersionsResponse DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.oracledatabase.v1.ListGiVersionsResponse();
}
public static com.google.cloud.oracledatabase.v1.ListGiVersionsResponse getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<ListGiVersionsResponse> PARSER =
new com.google.protobuf.AbstractParser<ListGiVersionsResponse>() {
@java.lang.Override
public ListGiVersionsResponse parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<ListGiVersionsResponse> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<ListGiVersionsResponse> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.oracledatabase.v1.ListGiVersionsResponse getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
oracle/graal | 37,013 | compiler/src/jdk.graal.compiler/src/jdk/graal/compiler/nodes/InliningLog.java | /*
* Copyright (c) 2018, 2023, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License version 2 only, as
* published by the Free Software Foundation. Oracle designates this
* particular file as subject to the "Classpath" exception as provided
* by Oracle in the LICENSE file that accompanied this code.
*
* This code is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* version 2 for more details (a copy is included in the LICENSE file that
* accompanied this code).
*
* You should have received a copy of the GNU General Public License version
* 2 along with this work; if not, write to the Free Software Foundation,
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
*
* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
* or visit www.oracle.com if you need additional information or have any
* questions.
*/
package jdk.graal.compiler.nodes;
import java.util.ArrayList;
import java.util.List;
import java.util.Objects;
import java.util.function.BiConsumer;
import org.graalvm.collections.EconomicMap;
import org.graalvm.collections.Equivalence;
import org.graalvm.collections.MapCursor;
import org.graalvm.collections.UnmodifiableEconomicMap;
import jdk.graal.compiler.debug.Assertions;
import jdk.graal.compiler.debug.GraalError;
import jdk.graal.compiler.graph.Node;
import jdk.graal.compiler.nodes.java.MethodCallTargetNode;
import jdk.graal.compiler.util.CollectionsUtil;
import jdk.vm.ci.meta.JavaTypeProfile;
import jdk.vm.ci.meta.MetaUtil;
import jdk.vm.ci.meta.ResolvedJavaMethod;
/**
* This class contains all inlining decisions performed on a graph during the compilation.
*
* Each inlining decision consists of:
*
* <ul>
* <li>a value indicating whether the decision was positive or negative</li>
* <li>the call target method</li>
* <li>the reason for the inlining decision</li>
* <li>the name of the phase in which the inlining decision took place</li>
* <li>the inlining log of the inlined graph, or {@code null} if the decision was negative</li>
* </ul>
*
* A phase that does inlining should use the instance of this class contained in the
* {@link StructuredGraph} by calling {@link #addDecision} whenever it decides to inline a method.
* If there are invokes in the graph at the end of the respective phase, then that phase must call
* {@link #addDecision} to log negative decisions.
*/
public class InliningLog {
private static final String TREE_NODE = "\u251c\u2500\u2500";
private static final String LAST_TREE_NODE = "\u2514\u2500\u2500";
public static final class Decision {
private final boolean positive;
private final String reason;
private final String phase;
private final ResolvedJavaMethod target;
private Decision(boolean positive, String reason, String phase, ResolvedJavaMethod target) {
this.positive = positive;
this.reason = reason;
this.phase = phase;
this.target = target;
}
public boolean isPositive() {
return positive;
}
public String getReason() {
return reason;
}
@Override
public String toString() {
return String.format("<%s> %s: %s, %s", phase, target != null ? target.format("%H.%n(%p)") : "", positive ? "yes" : "no",
reason);
}
}
/**
* A call-tree node with inlining decisions. The root callsite represents the root compiled
* method. Non-root nodes represent invokes in inlined methods' bodies.
*/
public static final class Callsite {
/**
* A special bci for the root method's callsite.
*/
private static final int ROOT_CALLSITE_BCI = -1;
/**
* The list of inlining decisions made about this callsite.
*/
private final List<Decision> decisions;
/**
* The list of callsites in the inlined body of the target method.
*/
private final List<Callsite> children;
/**
* The callsite whose inlined body contains this callsite. The value is {@code null} for the
* root callsite.
*/
private Callsite parent;
/**
* The invoke associated with the callsite. The value is {@code null} for the root callsite.
*
* For non-root nodes, {@link Invokable#getTargetMethod()} is used to obtain the
* {@link #target target method}. However, the target method might change in the lifetime of
* the node. Thus, the target method must be (1) initialized at the time the callsite is
* created and (2) updated at the time {@link #addDecision a decision is made}.
*
* The invoke is also lost (the value is {@code null}) when it is removed and
* {@link #copyTree copied}.
*/
private Invokable invoke;
/**
* The target method of the callsite. This field should reflect the correct target method at
* the end of a compilation.
*/
private ResolvedJavaMethod target;
/**
* The bci of the invoke. The value is {@link #ROOT_CALLSITE_BCI} for the root method's
* callsite. For other callsites, we remember their {@link Invokable#bci() invoke's bci}
* because the invoke might be lost when it is removed.
*
* @see #copyTree(Callsite, Callsite, UnmodifiableEconomicMap, EconomicMap)
*/
private final int bci;
/**
* {@code true} if the call was known to be indirect at the time of the last inlining
* decision (or at the time the call-tree node was created if there was no inlining
* decision).
*/
private boolean indirect;
/**
* The original callsite holding the invoke from which this invoke was originally duplicated
* or {@code null}.
*
* If this field is set, the optimization log interprets it as the true parent node
* overriding the {@link #parent} field. This allows us to build a slightly different tree
* in the optimization log while preserving the behavior of {@link #positionString()} and
* {@link #formatAsTree}.
*
* It must hold that the original callsite (the value of this field) precedes this node in
* the preorder traversal of the call tree. This property simplifies the construction of the
* modified tree in the optimization log.
*/
private final Callsite originalCallsite;
/**
* The last non-null type profile of the call target or {@code null}. The value is updated
* on each inlining decision.
*/
private JavaTypeProfile targetTypeProfile;
Callsite(Callsite parent, Callsite originalCallsite, Invokable invoke, ResolvedJavaMethod target, int bci, boolean indirect, JavaTypeProfile targetTypeProfile) {
this.parent = parent;
this.bci = bci;
this.indirect = indirect;
this.decisions = new ArrayList<>();
this.children = new ArrayList<>();
this.invoke = invoke;
this.target = target;
this.originalCallsite = originalCallsite;
this.targetTypeProfile = targetTypeProfile;
if (parent != null) {
parent.children.add(this);
}
}
/**
* Adds an inlining decision, updates the target method, the type profile, and the indirect
* field.
*
* @param decision the decision to be added
*/
private void addDecision(Decision decision) {
decisions.add(decision);
target = decision.target;
indirect = !decision.positive && invokeIsIndirect(invoke);
JavaTypeProfile newTypeProfile = targetTypeProfile(invoke);
if (newTypeProfile != null) {
targetTypeProfile = newTypeProfile;
}
}
/**
* Returns {@code true} if the invokable is an {@link Invoke} with an indirect call target.
*
* @param invokable an invokable
* @return {@code true} if the invokable is an indirect invoke
*/
private static boolean invokeIsIndirect(Invokable invokable) {
if (!(invokable instanceof Invoke)) {
return false;
}
CallTargetNode callTargetNode = ((Invoke) invokable).callTarget();
if (callTargetNode == null) {
return false;
}
return callTargetNode.invokeKind.isIndirect();
}
/**
* Returns the type profile associated with the call target of the provided invoke or
* {@code null}.
*
* @param invokable an invokable
* @return the type profile of the call target or {@code null}
*/
private static JavaTypeProfile targetTypeProfile(Invokable invokable) {
if (!(invokable instanceof Invoke)) {
return null;
}
CallTargetNode callTarget = ((Invoke) invokable).callTarget();
if (!(callTarget instanceof MethodCallTargetNode)) {
return null;
}
return ((MethodCallTargetNode) callTarget).getTypeProfile();
}
/**
* Creates and adds a child call-tree node (callsite) to this node.
*
* @param childInvoke the invoke which represents the child callsite to be added
* @param childOriginalCallsite the original callsite from which the child invoke was
* duplicated (if any)
* @return the created callsite for the child
*/
private Callsite addChild(Invokable childInvoke, Callsite childOriginalCallsite) {
return new Callsite(this, childOriginalCallsite, childInvoke, childInvoke.getTargetMethod(), childInvoke.bci(), invokeIsIndirect(childInvoke), targetTypeProfile(childInvoke));
}
public String positionString() {
if (parent == null) {
if (target != null) {
return "compilation of " + target.format("%H.%n(%p)");
} else if (invoke != null && invoke.getTargetMethod() != null) {
return "compilation of " + invoke.getTargetMethod().getName() + "(bci: " + getBci() + ")";
} else {
return "unknown method (bci: " + getBci() + ")";
}
}
String position;
if (parent.target != null) {
position = MetaUtil.appendLocation(new StringBuilder(100), parent.target, getBci()).toString();
} else if (invoke != null && invoke.getTargetMethod() != null) {
position = invoke.getTargetMethod().getName() + "(bci: " + getBci() + ")";
} else {
position = "unknown method (bci: " + getBci() + ")";
}
return "at " + position;
}
/**
* Gets the list of inlining decisions made about this callsite.
*/
public List<Decision> getDecisions() {
return decisions;
}
/**
* Gets the list of callsites in the inlined body of the target method.
*/
public List<Callsite> getChildren() {
return children;
}
/**
* Gets the callsite whose inlined body contains this callsite. Returns {@code null} for the
* root callsite.
*
* @return the parent callsite
*/
public Callsite getParent() {
return parent;
}
/**
* Gets the invoke associated with the callsite. Returns {@code null} for the root callsite.
* Might return {@code null} if the invoke was removed.
*
* @return the invoke associated with the callsite
*/
public Invokable getInvoke() {
return invoke;
}
/**
* Updates the invoke associated with the callsite.
*
* @param newInvoke the new invoke
*/
public void setInvoke(Invokable newInvoke) {
invoke = newInvoke;
}
/**
* Gets the target method of the callsite. The target is correct at the end of the
* compilation.
*
* @return the target method of the callsite
*/
public ResolvedJavaMethod getTarget() {
return target;
}
/**
* Gets the parent callsite, which may be overridden by {@link #originalCallsite} if it set.
*
* The optimization log interprets the call-tree node returned by this method as the parent
* of this node. This allows it to build a slightly different call-tree while preserving the
* behavior of {@link #positionString()} and {@link #formatAsTree}.
*
* @return the parent callsite (overridable by {@link #originalCallsite})
*/
public Callsite getOverriddenParent() {
return originalCallsite == null ? parent : originalCallsite;
}
/**
* Gets the original callsite, which overrides the parent callsite when it is set.
*
* @see #getOverriddenParent()
* @return the original callsite
*/
public Callsite getOriginalCallsite() {
return originalCallsite;
}
/**
* Gets the bci of the invoke. Returns {@link #ROOT_CALLSITE_BCI} for the root callsite.
*
* @return the bci of the invoke
*/
public int getBci() {
return bci;
}
/**
* Returns {@code true} if the call was known to be indirect at the time of the last
* inlining decision (or at the time the call-tree node was created if there was no inlining
* decision).
*
* @return {@code true} if the call was known to be indirect
*/
public boolean isIndirect() {
return indirect;
}
/**
* Returns the last non-null type profile of the call target or {@code null}.
*/
public JavaTypeProfile getTargetTypeProfile() {
return targetTypeProfile;
}
/**
* Returns {@code true} if the callsite is inlined.
*/
public boolean isInlined() {
return CollectionsUtil.anyMatch(decisions, InliningLog.Decision::isPositive);
}
}
private Callsite root;
private final EconomicMap<Invokable, Callsite> leaves;
public InliningLog(ResolvedJavaMethod rootMethod) {
this.root = new Callsite(null, null, null, rootMethod, Callsite.ROOT_CALLSITE_BCI, false, null);
this.leaves = EconomicMap.create();
}
/**
* Add an inlining decision for the specified invoke.
*
* An inlining decision can be either positive or negative. A positive inlining decision must be
* logged after replacing an {@link Invoke} with a graph. In this case, the node replacement map
* and the {@link InliningLog} of the inlined graph must be provided.
*/
void addDecision(Invokable invoke, boolean positive, String phase, EconomicMap<Node, Node> replacements, InliningLog calleeLog, ResolvedJavaMethod inlineeMethod, String reason, Object... args) {
assert leaves.containsKey(invoke) : invoke;
assert !positive || Objects.isNull(replacements) == Objects.isNull(calleeLog) : Assertions.errorMessage(positive, replacements, calleeLog);
Callsite callsite = leaves.get(invoke);
callsite.addDecision(new Decision(positive, String.format(reason, args), phase, inlineeMethod));
if (positive) {
leaves.removeKey(invoke);
if (calleeLog == null) {
return;
}
EconomicMap<Callsite, Callsite> mapping = EconomicMap.create(Equivalence.IDENTITY_WITH_SYSTEM_HASHCODE);
for (Callsite calleeChild : calleeLog.root.children) {
copyTree(callsite, calleeChild, replacements, mapping);
}
MapCursor<Invokable, Callsite> entries = calleeLog.leaves.getEntries();
while (entries.advance()) {
FixedNode invokeFromCallee = entries.getKey().asFixedNodeOrNull();
Callsite callsiteFromCallee = entries.getValue();
if (invokeFromCallee == null || invokeFromCallee.isDeleted()) {
// Some invoke nodes could have been removed by optimizations.
continue;
}
Invokable inlinedInvokeFromCallee = (Invokable) replacements.get(invokeFromCallee);
Callsite descendant = mapping.get(callsiteFromCallee);
leaves.put(inlinedInvokeFromCallee, descendant);
}
}
}
/**
* Appends the inlining decision tree from {@code replacementLog} to this log.
*
* This is called for example when a node in a graph is replaced with a snippet.
*
* @param replacementLog if non-null, its subtrees are appended below the root of this log.
* @see InliningLog#addDecision
*/
public void addLog(UnmodifiableEconomicMap<Node, Node> replacements, InliningLog replacementLog) {
if (replacementLog != null) {
EconomicMap<Callsite, Callsite> mapping = EconomicMap.create(Equivalence.IDENTITY_WITH_SYSTEM_HASHCODE);
for (Callsite calleeChild : replacementLog.root.children) {
copyTree(root, calleeChild, replacements, mapping);
}
MapCursor<Invokable, Callsite> entries = replacementLog.leaves.getEntries();
while (entries.advance()) {
FixedNode replacementInvoke = entries.getKey().asFixedNodeOrNull();
Callsite replacementCallsite = entries.getValue();
if (replacementInvoke == null || replacementInvoke.isDeleted()) {
// Some invoke nodes could have been removed by optimizations.
continue;
}
Invokable invoke = (Invokable) replacements.get(replacementInvoke);
Callsite callsite = mapping.get(replacementCallsite);
leaves.put(invoke, callsite);
}
}
}
/**
* Completely replace this log's contents with a copy of {@code replacementLog}'s contents.
*
* The precondition is that this inlining log is completely empty. This is usually called as
* part of graph copying.
*
* @see InliningLog#addDecision
*/
public void replaceLog(UnmodifiableEconomicMap<Node, Node> replacements, InliningLog replacementLog) {
assert root.decisions.isEmpty();
assert root.children.isEmpty();
assert leaves.isEmpty();
EconomicMap<Callsite, Callsite> mapping = EconomicMap.create(Equivalence.IDENTITY_WITH_SYSTEM_HASHCODE);
root = copyTree(null, replacementLog.root, replacements, mapping);
MapCursor<Invokable, Callsite> replacementEntries = replacementLog.leaves.getEntries();
while (replacementEntries.advance()) {
FixedNode replacementInvoke = replacementEntries.getKey().asFixedNodeOrNull();
Callsite replacementSite = replacementEntries.getValue();
if (replacementInvoke != null && replacementInvoke.isAlive()) {
Invokable invoke = (Invokable) replacements.get(replacementInvoke);
Callsite site = mapping.get(replacementSite);
leaves.put(invoke, site);
}
}
}
/**
* Recursively copies a call tree and adds it to this call tree.
*
* @param parent the call-tree node which will hold the copy ({@code null} if the copy replaces
* the root)
* @param replacementSite the root of the call tree to be copied
* @param replacements the mapping from original graph nodes to replaced nodes
* @param mapping the mapping from original call-tree nodes to copies
* @return the root of the copied subtree
*/
private static Callsite copyTree(Callsite parent, Callsite replacementSite, UnmodifiableEconomicMap<Node, Node> replacements, EconomicMap<Callsite, Callsite> mapping) {
Invokable invoke = null;
if (replacementSite.invoke != null) {
FixedNode replacementSiteInvoke = replacementSite.invoke.asFixedNodeOrNull();
if (replacementSiteInvoke != null && replacementSiteInvoke.isAlive()) {
invoke = (Invokable) replacements.get(replacementSiteInvoke);
}
}
Callsite originalCallsite = replacementSite.originalCallsite == null ? null : mapping.get(replacementSite.originalCallsite);
Callsite site = new Callsite(parent, originalCallsite, invoke, replacementSite.target, replacementSite.bci, replacementSite.indirect, replacementSite.targetTypeProfile);
site.decisions.addAll(replacementSite.decisions);
mapping.put(replacementSite, site);
for (Callsite replacementChild : replacementSite.children) {
copyTree(site, replacementChild, replacements, mapping);
}
return site;
}
public void checkInvariants(StructuredGraph graph) {
if (!Assertions.assertionsEnabled()) {
return;
}
for (Invoke invoke : graph.getInvokes()) {
assert leaves.containsKey(invoke) : "Invoke " + invoke + " not contained in the leaves.";
}
checkParentPointers();
}
private void checkParentPointers() {
assert root.parent == null : "Non-null parent of root: " + root.parent;
if (Assertions.assertionsEnabled()) {
checkParentPointers(root);
}
}
private static void checkParentPointers(Callsite site) {
for (Callsite child : site.children) {
assert site == child.parent : "Callsite " + site + " with child " + child + " has an invalid parent pointer " + child.parent;
checkParentPointers(child);
}
}
private final UpdateScope noUpdates = new UpdateScope((oldNode, newNode) -> {
});
private UpdateScope currentUpdateScope = null;
/**
* Used to designate scopes in which {@link Invokable} registration or cloning should be handled
* differently.
*/
public final class UpdateScope implements AutoCloseable {
private final BiConsumer<Invokable, Invokable> updater;
private UpdateScope(BiConsumer<Invokable, Invokable> updater) {
this.updater = updater;
}
public void activate() {
if (currentUpdateScope != null) {
throw GraalError.shouldNotReachHere("InliningLog updating already set."); // ExcludeFromJacocoGeneratedReport
}
currentUpdateScope = this;
}
@Override
public void close() {
assert currentUpdateScope != null;
currentUpdateScope = null;
}
public BiConsumer<Invokable, Invokable> getUpdater() {
return updater;
}
}
public BiConsumer<Invokable, Invokable> getUpdateScope() {
if (currentUpdateScope == null) {
return null;
}
return currentUpdateScope.getUpdater();
}
/**
* Creates and sets a new update scope for the log.
*
* The specified {@code updater} is invoked when an {@link Invokable} node is registered or
* cloned. If the node is newly registered, then the first argument to the {@code updater} is
* {@code null}. If the node is cloned, then the first argument is the node it was cloned from.
*
* @param updater an operation taking a null (or the original node), and the registered (or
* cloned) {@link Invokable}
* @return a bound {@link UpdateScope} object, or a {@code null} if tracing is disabled
*/
public UpdateScope openUpdateScope(BiConsumer<Invokable, Invokable> updater) {
UpdateScope scope = new UpdateScope(updater);
scope.activate();
return scope;
}
/**
* Creates a new update scope that does not update {@code log}.
*
* This update scope will not add a newly created {@code Invokable} to the log, nor will it
* amend its position if it was cloned. Instead, users need to update the inlining log with the
* new {@code Invokable} on their own.
*
* @see #openUpdateScope
*/
public static UpdateScope openDefaultUpdateScope(InliningLog log) {
if (log == null) {
return null;
}
log.noUpdates.activate();
return log.noUpdates;
}
/**
* Opens a new update scope that registers callsites for duplicated invokes and sets the
* {@link Callsite#originalCallsite} of the duplicated callsite to the original callsite (the
* callsite of the invoke from which it is duplicated).
*
* @return a bound {@link UpdateScope} or {@code null} if the log is disabled
*/
public static UpdateScope openUpdateScopeTrackingOriginalCallsites(InliningLog inliningLog) {
if (inliningLog == null) {
return null;
}
return inliningLog.openUpdateScope((originalInvoke, newInvoke) -> {
if (originalInvoke != null) {
assert !inliningLog.containsLeafCallsite(newInvoke);
inliningLog.trackDuplicatedCallsite(originalInvoke, newInvoke, inliningLog.leaves.get(originalInvoke));
}
});
}
/**
* Opens a new update scope tracking the replacement of an invoke. Exactly one invoke must be
* registered when this scope is active, which becomes the replacement invoke. The method
* associates the {@link Callsite} of the replaced invoke with the replacement invoke,
* preserving all inlining decisions. It is a responsibility of the caller to delete the
* replaced invoke.
*
* If the log is in a {@link #openDefaultUpdateScope default} update scope (i.e., updates to
* invokes are handled by the user), this call has no effect.
*
* @param inliningLog the inlining log or {@code null} if it disabled
* @param replacedInvoke the invoke that is getting replaced
*
* @return a bound {@link UpdateScope} or {@code null} if the log is disabled
*/
public static UpdateScope openUpdateScopeTrackingReplacement(InliningLog inliningLog, Invokable replacedInvoke) {
if (inliningLog == null || inliningLog.currentUpdateScope == inliningLog.noUpdates) {
return null;
}
return inliningLog.openUpdateScope((nullInvoke, replacementInvoke) -> {
assert nullInvoke == null;
assert !inliningLog.leaves.containsKey(replacementInvoke);
Callsite callsite = inliningLog.leaves.get(replacedInvoke);
callsite.invoke = replacementInvoke;
inliningLog.leaves.removeKey(replacedInvoke);
inliningLog.leaves.put(replacementInvoke, callsite);
});
}
private RootScope currentRootScope = null;
/**
* Used to change the current effective root of the method being compiled.
*
* This root scope is used in situations in which a phase does its own ad-hoc inlining, in which
* it replaces an Invoke with other nodes, some of which may be other Invokes. The prime example
* for this is the bytecode parser, which does not create separate graphs with their own
* inlining logs when inlining an Invoke, but instead continues recursively parsing the graph
* corresponding to the Invoke.
*
* Root scopes can be nested.
*
* @see #openRootScope
*/
public final class RootScope implements AutoCloseable {
private final RootScope parent;
private final Callsite replacementRoot;
public RootScope(RootScope parent, Callsite replacementRoot) {
this.parent = parent;
this.replacementRoot = replacementRoot;
}
void activate() {
currentRootScope = this;
}
public Invokable getInvoke() {
return replacementRoot.invoke;
}
@Override
public void close() {
assert currentRootScope != null;
unregisterLeafCallsite(replacementRoot.invoke);
currentRootScope = parent;
}
}
public static final class PlaceholderInvokable implements Invokable {
private final int bci;
private final ResolvedJavaMethod callerMethod;
private final ResolvedJavaMethod method;
public PlaceholderInvokable(ResolvedJavaMethod callerMethod, ResolvedJavaMethod method, int bci) {
this.callerMethod = callerMethod;
this.method = method;
this.bci = bci;
}
@Override
public ResolvedJavaMethod getTargetMethod() {
return method;
}
@Override
public int bci() {
return bci;
}
@Override
public void setBci(int bci) {
GraalError.unimplementedOverride(); // ExcludeFromJacocoGeneratedReport
}
@Override
public FixedNode asFixedNodeOrNull() {
return null;
}
@Override
public ResolvedJavaMethod getContextMethod() {
return callerMethod;
}
@Override
public int hashCode() {
return Integer.hashCode(bci) ^ callerMethod.hashCode() ^ method.hashCode();
}
@Override
public boolean equals(Object obj) {
if (obj instanceof PlaceholderInvokable) {
final PlaceholderInvokable that = (PlaceholderInvokable) obj;
return that.bci == bci && that.method.equals(method) && that.callerMethod.equals(callerMethod);
}
return false;
}
@Override
public String toString() {
return String.format("Invokable(caller: %s, bci: %d, method: %s)", callerMethod.format("%H.%n"), bci, method.format("%H.%n"));
}
}
public RootScope openRootScope(ResolvedJavaMethod callerMethod, ResolvedJavaMethod target, int bci) {
return openRootScope(new PlaceholderInvokable(callerMethod, target, bci));
}
public RootScope openRootScope(Invokable invoke) {
if (!leaves.containsKey(invoke)) {
// Create the invoke if it was not added to the graph yet.
trackNewCallsite(invoke);
}
RootScope scope = new RootScope(currentRootScope, leaves.get(invoke));
scope.activate();
return scope;
}
public boolean containsLeafCallsite(Invokable invokable) {
return leaves.containsKey(invokable);
}
/**
* Removes a callsite from the set of leaf callsites. The callsite is not removed from the call
* tree. The callsite can be {@link #registerLeafCallsite re-registered} later.
*
* @param invokable the invoke representing the callsite
* @return the unregistered callsite
*/
public Callsite unregisterLeafCallsite(Invokable invokable) {
return leaves.removeKey(invokable);
}
/**
* Registers a callsite as a leaf callsite. This method must be called during graph compression,
* or other node-id changes.
*
* @param invokable the invoke representing the callsite
* @param callsite the callsite to be registered
*/
public void registerLeafCallsite(Invokable invokable, Callsite callsite) {
leaves.put(invokable, callsite);
}
/**
* Removes a leaf callsite from the call tree. This implies {@link #unregisterLeafCallsite
* unregistering} it from the set of leaves.
*
* @param invokable the invoke representing the callsite
*/
public void removeLeafCallsite(Invokable invokable) {
Callsite callsite = unregisterLeafCallsite(invokable);
assert callsite != null : "it must be a leaf callsite";
assert callsite.parent != null : "a leaf callsite must have a parent";
callsite.parent.children.remove(callsite);
}
public void trackNewCallsite(Invokable invoke) {
assert !leaves.containsKey(invoke);
Callsite currentRoot = findCurrentRoot();
Callsite callsite = currentRoot.addChild(invoke, null);
leaves.put(invoke, callsite);
}
private Callsite findCurrentRoot() {
return currentRootScope != null ? currentRootScope.replacementRoot : root;
}
public void trackDuplicatedCallsite(Invokable sibling, Invokable newInvoke, Callsite childOriginalCallsite) {
Callsite siblingCallsite = leaves.get(sibling);
Callsite parentCallsite = siblingCallsite.parent;
Callsite callsite = parentCallsite.addChild(newInvoke, childOriginalCallsite);
callsite.decisions.addAll(siblingCallsite.decisions);
leaves.put(newInvoke, callsite);
}
/**
* Formats the inlining log as a hierarchical tree.
*
* @param nullIfEmpty specifies whether null should be returned if there are no inlining
* decisions
* @return the tree representation of the inlining log
*/
public String formatAsTree(boolean nullIfEmpty) {
assert root.decisions.isEmpty();
assert !root.children.isEmpty() || leaves.isEmpty();
if (nullIfEmpty && root.children.isEmpty()) {
return null;
}
StringBuilder builder = new StringBuilder(512);
formatAsTree(root, "", builder);
return builder.toString();
}
private static void formatAsTree(Callsite site, String indent, StringBuilder builder) {
String position = site.positionString();
builder.append(indent).append(position).append(": ");
if (site.decisions.isEmpty()) {
if (site.parent != null) {
builder.append("(no decisions made about ").append(site.target != null ? site.target.format("%H.%n(%p)") : "callee").append(")");
}
builder.append(System.lineSeparator());
} else if (site.decisions.size() == 1) {
builder.append(site.decisions.get(0).toString());
builder.append(System.lineSeparator());
} else {
builder.append(System.lineSeparator());
for (Decision decision : site.decisions) {
String node = (decision == site.decisions.get(site.decisions.size() - 1)) ? LAST_TREE_NODE : TREE_NODE;
builder.append(indent + " " + node).append(decision.toString());
builder.append(System.lineSeparator());
}
}
for (Callsite child : site.children) {
formatAsTree(child, indent + " ", builder);
}
}
/**
* Gets the callsite representing the root method.
*/
public Callsite getRootCallsite() {
return root;
}
/**
* Sets the call tree to the provided call tree.
*
* @param root the root of the new call tree
*/
public void setRootCallsite(Callsite root) {
this.root = root;
}
/**
* Adds a positive inlining decision and transfers the inlining log of the callee to this log.
* The inlining log of the callee must be associated with the same graph. The inlining log of
* the callee is cleared and should not be used anymore. The target of the callsite is updated
* using the provided call target.
*
* @param invoke the inlined invoke
* @param callTargetNode the call target of the invoke
* @param calleeLog the inlining log of the callee
* @param phase the phase which performed the decision
* @param reason the reason for the inlining decision
*/
public void inlineByTransfer(Invokable invoke, CallTargetNode callTargetNode, InliningLog calleeLog, String phase, String reason) {
Callsite caller = leaves.get(invoke);
caller.addDecision(new Decision(true, reason, phase, invoke.getTargetMethod()));
caller.target = callTargetNode.targetMethod;
caller.indirect = callTargetNode.invokeKind.isIndirect();
for (Callsite child : calleeLog.getRootCallsite().children) {
caller.children.add(child);
child.parent = caller;
}
leaves.removeKey(invoke);
leaves.putAll(calleeLog.leaves);
calleeLog.root.children.clear();
calleeLog.leaves.clear();
checkParentPointers();
}
}
|
apache/hive | 36,664 | standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/PartitionProjectionEvaluator.java | /*
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hive.metastore;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.node.ArrayNode;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.base.Joiner;
import com.google.common.collect.ImmutableBiMap;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.ImmutableSet;
import org.apache.hadoop.hive.metastore.api.FieldSchema;
import org.apache.hadoop.hive.metastore.api.MetaException;
import org.apache.hadoop.hive.metastore.api.Partition;
import org.apache.hadoop.hive.metastore.api.SerDeInfo;
import org.apache.hadoop.hive.metastore.api.StorageDescriptor;
import org.apache.hadoop.hive.metastore.api.Table;
import org.apache.hadoop.hive.metastore.utils.MetaStoreServerUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import javax.jdo.PersistenceManager;
import javax.jdo.Query;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Objects;
import java.util.Set;
import java.util.TreeMap;
import java.util.regex.Pattern;
import static org.apache.hadoop.hive.metastore.MetastoreDirectSqlUtils.extractSqlLong;
/**
* Evaluator for partition projection filters which specify parts of the partition that should be
* used using dot notation for fields.
*/
public class PartitionProjectionEvaluator {
private static final Logger LOG = LoggerFactory.getLogger(PartitionProjectionEvaluator.class);
private final boolean convertMapNullsToEmptyStrings;
private final boolean isView;
private final String includeParamKeyPattern;
private final String excludeParamKeyPattern;
private Set<String> projectionFields;
interface PartitionFieldValueSetter<T> {
void setValue(T part, PartitionFieldNode node, Object value) throws MetaException;
}
private final ImmutableMap<String, MutivaluedFieldSetter> multiValuedFieldSetters =
new ImmutableMap.Builder<String, MutivaluedFieldSetter>()
.put("values", new PartitionValuesSetter())
.put("parameters", new PartitionParametersSetter())
.put("sd.cols", new PartitionSDColsSetter())
.put("sd.bucketCols", new PartitionSDBucketColsSetter())
.put("sd.sortCols", new PartitionSortColsSetter())
.put("sd.parameters", new PartitionSDParametersSetter())
.put("sd.serdeInfo.parameters", new PartitionSerdeInfoParametersSetter())
.put("sd.skewedInfo.skewedColNames", new PartitionSkewedColsNamesSetter())
.put("sd.skewedInfo.skewedColValues", new PartitionSkewedColsValuesSetter())
.put("sd.skewedInfo.skewedColValueLocationMaps",
new PartitionSkewedColValLocationMapSetter()).build();
private static final String PART_ID = "PART_ID";
private static final String SD_ID = "SD_ID";
private static final String SERDE_ID = "SERDE_ID";
private static final String CD_ID = "CD_ID";
private static final PartitionFieldNode partIdNode = new PartitionFieldNode(PART_ID);
private static final PartitionFieldNode sdIdNode = new PartitionFieldNode(SD_ID);
private static final PartitionFieldNode serdeIdNode = new PartitionFieldNode(SERDE_ID);
private static final PartitionFieldNode cdIdNode = new PartitionFieldNode(CD_ID);
private final ImmutableMap<String, String> fieldNameToTableName;
private final Set<PartitionFieldNode> roots;
private final String PARTITIONS;
private final String SDS;
private final String SERDES;
private final String PARTITION_PARAMS;
private final PersistenceManager pm;
@VisibleForTesting static final String SD_PATTERN = "sd|sd\\.";
@VisibleForTesting static final String SERDE_PATTERN = "sd\\.serdeInfo|sd\\.serdeInfo\\.";
@VisibleForTesting static final String CD_PATTERN = "sd\\.cols|sd\\.cols\\.";
private static final int SD_INDEX = 0;
private static final int CD_INDEX = 1;
private static final int SERDE_INDEX = 2;
private static final int PART_INDEX = 3;
// this map stores all the single valued fields in the Partition class and maps them to the corresponding
// single-valued fields from the MPartition class. This map is used to parse the given partition fields
// as well as to convert a given partition field list to a JDO setResult string when direct-SQL
// is disabled
private static final ImmutableMap<String, String> allPartitionSingleValuedFields = new ImmutableMap.Builder<String, String>()
.put("dbName", "table.database.name")
.put("tableName", "table.tableName")
.put("createTime", "createTime")
.put("lastAccessTime", "lastAccessTime")
.put("sd.location", "sd.location")
.put("sd.inputFormat", "sd.inputFormat")
.put("sd.outputFormat", "sd.outputFormat")
.put("sd.compressed", "sd.isCompressed")
.put("sd.numBuckets", "sd.numBuckets")
.put("sd.serdeInfo.name", "sd.serDeInfo.name")
.put("sd.serdeInfo.serializationLib", "sd.serDeInfo.serializationLib")
.put("sd.serdeInfo.description", "sd.serDeInfo.description")
.put("sd.serdeInfo.serializerClass", "sd.serDeInfo.serializerClass")
.put("sd.serdeInfo.deserializerClass", "sd.serDeInfo.deserializerClass")
.put("sd.serdeInfo.serdeType", "sd.serDeInfo.serdeType")
.put("catName", "table.database.catalogName")
.put("writeId", "writeId")
//TODO there is no mapping for isStatsCompliant to JDO MPartition
//.put("isStatsCompliant", "isStatsCompliant")
.build();
private static final ImmutableSet<String> allPartitionMultiValuedFields = new ImmutableSet.Builder<String>()
.add("values")
.add("sd.cols.name")
.add("sd.cols.type")
.add("sd.cols.comment")
.add("sd.serdeInfo.parameters")
.add("sd.bucketCols")
.add("sd.sortCols.col")
.add("sd.sortCols.order")
.add("sd.parameters")
.add("sd.skewedInfo.skewedColNames")
.add("sd.skewedInfo.skewedColValues")
.add("sd.skewedInfo.skewedColValueLocationMaps")
.add("parameters")
.add("privileges.userPrivileges")
.add("privileges.groupPrivileges")
.add("privileges.rolePrivileges")
.build();
private static final ImmutableSet<String> allPartitionFields = new ImmutableSet.Builder<String>()
.addAll(allPartitionSingleValuedFields.keySet())
.addAll(allPartitionMultiValuedFields)
.build();
public PartitionProjectionEvaluator(PersistenceManager pm,
ImmutableMap<String, String> fieldNameToTableName, List<String> projectionFields,
boolean convertMapNullsToEmptyStrings, boolean isView, String includeParamKeyPattern,
String excludeParamKeyPattern) throws MetaException {
this.pm = pm;
this.fieldNameToTableName = fieldNameToTableName;
this.convertMapNullsToEmptyStrings = convertMapNullsToEmptyStrings;
this.isView = isView;
this.includeParamKeyPattern = includeParamKeyPattern;
this.excludeParamKeyPattern = excludeParamKeyPattern;
this.PARTITIONS =
fieldNameToTableName.containsKey("PARTITIONS_TABLE_NAME") ? fieldNameToTableName
.get("PARTITIONS_TABLE_NAME") : "\"PARTITIONS\"";
this.SDS = fieldNameToTableName.containsKey("SDS_TABLE_NAME") ? fieldNameToTableName
.get("SDS_TABLE_NAME") : "\"SDS\"";
this.SERDES = fieldNameToTableName.containsKey("SERDES_TABLE_NAME") ? fieldNameToTableName
.get("SERDES_TABLE_NAME") : "\"SERDES\"";
this.PARTITION_PARAMS =
fieldNameToTableName.containsKey("PARTITION_PARAMS") ? fieldNameToTableName
.get("PARTITION_PARAMS") : "\"PARTITION_PARAMS\"";
roots = parse(projectionFields);
// we always query PART_ID
roots.add(partIdNode);
if (find(SD_PATTERN)) {
roots.add(sdIdNode);
}
if (find(SERDE_PATTERN)) {
roots.add(serdeIdNode);
}
if (find(CD_PATTERN)) {
roots.add(cdIdNode);
}
}
/**
* Given a Java regex string pattern, checks if the the partitionFieldNode tree
* has any node whose fieldName matches the given pattern
* @param searchField
* @return
*/
@VisibleForTesting
boolean find(String searchField) {
Pattern p = Pattern.compile(searchField);
for (PartitionFieldNode node : roots) {
if (find(node, p)) {
return true;
}
}
return false;
}
private static boolean find(PartitionFieldNode root, Pattern p) {
if (root == null) {
return false;
}
if (p.matcher(root.fieldName).matches()) {
return true;
}
for (PartitionFieldNode child : root.children) {
if (find(child, p)) {
return true;
}
}
return false;
}
/**
* if top level field name is given expand the top level field such that all the children
* of that node are added to the projection list. eg. if only "sd" is provided in the projection
* list, it means all the nested fields for sd should be added to the projection fields
* @param projectionList
* @return
*/
private static Set<String> expand(Collection<String> projectionList) throws MetaException {
Set<String> result = new HashSet<>();
for (String projectedField : projectionList) {
if (allPartitionFields.contains(projectedField)) {
result.add(projectedField);
} else {
boolean found = false;
for (String partitionField : allPartitionFields) {
if (partitionField.startsWith(projectedField)) {
LOG.debug("Found " + partitionField + " included within given projection field "
+ projectedField);
result.add(partitionField);
found = true;
}
}
if (!found) {
throw new MetaException("Invalid field name " + projectedField);
}
}
}
return result;
}
@VisibleForTesting
Set<PartitionFieldNode> getRoots() {
return roots;
}
private static void validate(Collection<String> projectionFields) throws MetaException {
Set<String> verify = new HashSet<>(projectionFields);
verify.removeAll(allPartitionFields);
if (verify.size() > 0) {
throw new MetaException("Invalid partition fields in the projection spec" + Arrays
.toString(verify.toArray(new String[verify.size()])));
}
}
private Set<PartitionFieldNode> parse(List<String> inputProjectionFields) throws MetaException {
// in case of dbName and tableName we rely on table object to get their values
this.projectionFields = new HashSet<>(inputProjectionFields);
projectionFields.remove("dbName");
projectionFields.remove("tableName");
projectionFields.remove("catName");
if (isView) {
// if this is a view SDs are not set so can be skipped
projectionFields.removeIf(
s -> s.matches(SD_PATTERN) || s.matches(SERDE_PATTERN) || s.matches(CD_PATTERN));
}
// remove redundant fields
projectionFields = PartitionProjectionEvaluator.expand(projectionFields);
removeUnsupportedFields();
validate(projectionFields);
Map<String, PartitionFieldNode> nestedNodes = new HashMap<>();
Set<PartitionFieldNode> rootNodes = new HashSet<>();
for (String projectedField : projectionFields) {
String[] fields = projectedField.split("\\.");
if (fields.length == 0) {
LOG.warn("Invalid projected field {}. Ignoring ..", projectedField);
continue;
}
StringBuilder fieldNameBuilder = new StringBuilder(fields[0]);
PartitionFieldNode currentNode = createIfNotExists(nestedNodes, fieldNameBuilder.toString());
rootNodes.add(currentNode);
for (int level = 1; level < fields.length; level++) {
final String name = fieldNameBuilder.append(".").append(fields[level]).toString();
PartitionFieldNode childNode = createIfNotExists(nestedNodes, name);
// all the children of a multi-valued nodes are also multi-valued
if (currentNode.isMultiValued) {
childNode.setMultiValued();
}
currentNode.addChild(childNode);
currentNode = childNode;
}
}
return rootNodes;
}
// TODO some of the optional partition fields are never set by DirectSQL implementation
// Removing such fields to keep it consistent with methods in MetastoreDirectSQL class
private void removeUnsupportedFields() {
List<String> unsupportedFields = Arrays
.asList("sd.serdeInfo.serializerClass", "sd.serdeInfo.deserializerClass",
"sd.serdeInfo.serdeType", "sd.serdeInfo.description");
for (String unsupportedField : unsupportedFields) {
if (projectionFields.contains(unsupportedField)) {
LOG.warn("DirectSQL does not return partitions with the optional field" + unsupportedField
+ " set. Removing it from the projection list");
projectionFields.remove(unsupportedField);
}
}
}
private PartitionFieldNode createIfNotExists(Map<String, PartitionFieldNode> nestedNodes,
String fieldName) {
PartitionFieldNode currentNode = nestedNodes.computeIfAbsent(fieldName, k -> {
if (multiValuedFieldSetters.containsKey(fieldName)) {
return new PartitionFieldNode(fieldName, true);
} else {
return new PartitionFieldNode(fieldName);
}
});
return currentNode;
}
/**
* Given a list of partition ids, returns the List of partially filled partitions based on the
* projection list used to instantiate this PartitionProjectionEvaluator
* @param partitionIds List of partition ids corresponding to the Partitions objects which are requested
* @return Partitions where each partition has only the projected fields set
* @throws MetaException
*/
public List<Partition> getPartitionsUsingProjectionList(List<Long> partitionIds)
throws MetaException {
TreeMap<Long, StorageDescriptor> sds = new TreeMap<>();
TreeMap<Long, List<FieldSchema>> cds = new TreeMap<>();
TreeMap<Long, SerDeInfo> serdes = new TreeMap<>();
TreeMap<Long, Partition> partitions = new TreeMap<>();
List<Partition> results = setSingleValuedFields(partitionIds, partitions, sds, serdes, cds);
setMultivaluedFields(partitions, sds, serdes, cds);
return results;
}
private List<Partition> setSingleValuedFields(List<Long> partitionIds,
final TreeMap<Long, Partition> partitions, final TreeMap<Long, StorageDescriptor> sdIds,
final TreeMap<Long, SerDeInfo> serdeIds, final TreeMap<Long, List<FieldSchema>> cdIds)
throws MetaException {
StringBuilder queryTextBuilder = new StringBuilder();
int numColumns = buildQueryForSingleValuedFields(partitionIds, queryTextBuilder);
String queryText = queryTextBuilder.toString();
Query<?> query = pm.newQuery("javax.jdo.query.SQL", queryText);
try {
long start = LOG.isDebugEnabled() ? System.nanoTime() : 0;
List<Object> sqlResult = MetastoreDirectSqlUtils.executeWithArray(query, null, queryText);
long queryTime = LOG.isDebugEnabled() ? System.nanoTime() : 0;
MetastoreDirectSqlUtils.timingTrace(LOG.isDebugEnabled(), queryText, start, queryTime);
Deadline.checkTimeout();
final Long[] ids = new Long[4];
Object[] rowVals = new Object[1];
// Keep order by name, consistent with JDO.
ArrayList<Partition> orderedResult = new ArrayList<Partition>(partitionIds.size());
for (Object row : sqlResult) {
if (numColumns > 1) {
rowVals = (Object[])row;
} else {
// only one column is selected by query. The result class will be Object
rowVals[0] = row;
}
Partition part = new Partition();
for (PartitionFieldNode root : roots) {
traverseAndSetValues(part, root, rowVals, new PartitionFieldValueSetter() {
@Override
public void setValue(Object partition, PartitionFieldNode node, Object value)
throws MetaException {
if (!node.isMultiValued) {
// in case of serdeid and sdId node we just collect the sdIds for further processing
if (node.equals(sdIdNode)) {
ids[SD_INDEX] = extractSqlLong(value);
} else if (node.equals(serdeIdNode)) {
ids[SERDE_INDEX] = extractSqlLong(value);
} else if (node.equals(cdIdNode)) {
ids[CD_INDEX] = extractSqlLong(value);
} else if (node.equals(partIdNode)) {
ids[PART_INDEX] = extractSqlLong(value);
} else {
// incase of sd.compressed and sd.storedAsSubDirectories we need special code to convert
// string to a boolean value
if (node.fieldName.equals("sd.compressed") || node.fieldName.equals("sd.storedAsSubDirectories")) {
value = MetastoreDirectSqlUtils.extractSqlBoolean(value);
} else if (node.fieldName.equals("lastAccessTime") || node.fieldName.equals("createTime")) {
value = MetastoreDirectSqlUtils.extractSqlInt(value);
}
MetaStoreServerUtils.setNestedProperty(partition, node.fieldName, value, true);
}
}
}
});
}
// PART_ID is always queried
if (ids[PART_INDEX] == null) {
throw new MetaException("Could not find PART_ID for partition " + part);
}
partitions.put(ids[PART_INDEX], part);
orderedResult.add(part);
ids[PART_INDEX] = null;
if (ids[SD_INDEX] != null) {
// sd object is initialized if any of the sd single-valued fields are in the projection
if (part.getSd() == null) {
part.setSd(new StorageDescriptor());
}
sdIds.put(ids[SD_INDEX], part.getSd());
ids[SD_INDEX] = null;
}
if (ids[SERDE_INDEX] != null) {
// serde object must have already been intialized above in MetaStoreUtils.setNestedProperty call
if (part.getSd().getSerdeInfo() == null) {
part.getSd().setSerdeInfo(new SerDeInfo());
}
serdeIds.put(ids[SERDE_INDEX], part.getSd().getSerdeInfo());
ids[SERDE_INDEX] = null;
}
if (ids[CD_INDEX] != null) {
// common case is all the SDs will reuse the same CD
// allocate List<FieldSchema> only when you see a new CD_ID
cdIds.putIfAbsent(ids[CD_INDEX], new ArrayList<>(5));
if (part.getSd().getCols() == null) {
part.getSd().setCols(cdIds.get(ids[CD_INDEX]));
}
ids[CD_INDEX] = null;
}
Deadline.checkTimeout();
}
return orderedResult;
} catch (Exception e) {
LOG.error("Exception received while getting partitions using projected fields", e);
throw new MetaException(e.getMessage());
} finally {
query.closeAll();
}
}
private void setMultivaluedFields(TreeMap<Long, Partition> partitions,
TreeMap<Long, StorageDescriptor> sds, TreeMap<Long, SerDeInfo> serdes,
TreeMap<Long, List<FieldSchema>> cds) throws MetaException {
for (PartitionFieldNode root : roots) {
traverseAndSetMultiValuedFields(root, partitions, sds, serdes, cds);
}
}
private void traverseAndSetMultiValuedFields(PartitionFieldNode root,
TreeMap<Long, Partition> partitions, TreeMap<Long, StorageDescriptor> sds,
TreeMap<Long, SerDeInfo> serdes, TreeMap<Long, List<FieldSchema>> cds) throws MetaException {
if (root == null) {
return;
}
// if a multi-valued node is found set its value using its value-setters
// note that once a multi-valued node is found the method does not recurse further
// this is because the multi-valued setter also sets the values of all its descendents
if (root.isMultiValued) {
MutivaluedFieldSetter multiValuedFieldSetter = multiValuedFieldSetters.get(root.fieldName);
if (multiValuedFieldSetter == null) {
throw new MetaException(
"Cannot find multi-valued field setter for field " + root.fieldName);
}
multiValuedFieldSetter.setValue(root, partitions, sds, serdes, cds);
} else {
for (PartitionFieldNode child : root.children) {
traverseAndSetMultiValuedFields(child, partitions, sds, serdes, cds);
}
}
}
private void traverseAndSetValues(Partition part, PartitionFieldNode root, Object[] row,
PartitionFieldValueSetter valueSetter) throws MetaException {
// if root is null or is multiValued, do not recurse further
// multi-valued fields are set separately in setMultiValuedFields method
if (root == null || root.isMultiValued()) {
return;
}
if (root.isLeafNode()) {
valueSetter.setValue(part, root, row[root.fieldIndex]);
return;
}
for (PartitionFieldNode child : root.children) {
traverseAndSetValues(part, child, row, valueSetter);
}
}
private static final String SPACE = " ";
private int buildQueryForSingleValuedFields(List<Long> partitionIds, StringBuilder queryTextBuilder) {
queryTextBuilder.append("select ");
// build projection columns using the ProjectedFields
// it should not matter if you select all the
List<String> columnList = getSingleValuedColumnNames(roots);
queryTextBuilder.append(Joiner.on(',').join(columnList));
queryTextBuilder.append(SPACE);
queryTextBuilder.append("from " + PARTITIONS);
// if SD fields are selected add join clause with SDS
boolean foundSD = false;
if (find(SD_PATTERN)) {
queryTextBuilder.append(SPACE);
queryTextBuilder.append(
"left outer join " + SDS + " on " + PARTITIONS + ".\"SD_ID\" = " + SDS + ".\"SD_ID\"");
foundSD = true;
}
// if serde fields are selected add join clause on serdes
if (foundSD || find(SERDE_PATTERN)) {
queryTextBuilder.append(SPACE);
queryTextBuilder.append(
" left outer join " + SERDES + " on " + SDS + ".\"SERDE_ID\" = " + SERDES
+ ".\"SERDE_ID\"");
}
queryTextBuilder.append(SPACE);
//add where clause
queryTextBuilder.append("where \"PART_ID\" in (" + Joiner.on(',').join(partitionIds)
+ ") order by \"PART_NAME\" asc");
return columnList.size();
}
private int getSingleValuedColumnName(PartitionFieldNode root, int fieldId,
final List<String> columnNames) {
if (root == null) {
return fieldId;
}
if (root.isLeafNode() && !root.isMultiValued) {
if (fieldNameToTableName.containsKey(root.fieldName)) {
columnNames.add(fieldNameToTableName.get(root.fieldName));
root.setFieldIndex(fieldId++);
return fieldId;
}
throw new RuntimeException(
"No column name mapping found for partition field " + root.fieldName);
}
for (PartitionFieldNode child : root.children) {
fieldId = getSingleValuedColumnName(child, fieldId, columnNames);
}
return fieldId;
}
private List<String> getSingleValuedColumnNames(Set<PartitionFieldNode> roots) {
List<String> columnNames = new ArrayList<>();
int fieldIndex = 0;
for (PartitionFieldNode node : roots) {
fieldIndex = getSingleValuedColumnName(node, fieldIndex, columnNames);
}
return columnNames;
}
private static void getNestedFieldName(JsonNode jsonNode, String fieldName,
Collection<String> results) {
if (jsonNode instanceof ArrayNode) {
Iterator<JsonNode> elements = ((ArrayNode) jsonNode).elements();
if (!elements.hasNext()) {
results.add(fieldName);
return;
}
while (elements.hasNext()) {
JsonNode element = elements.next();
getNestedFieldName(element, fieldName, results);
}
} else {
Iterator<Entry<String, JsonNode>> fields = jsonNode.fields();
if (!fields.hasNext()) {
results.add(fieldName);
return;
}
while (fields.hasNext()) {
Entry<String, JsonNode> fieldKV = fields.next();
String key = fieldKV.getKey();
getNestedFieldName(fieldKV.getValue(), fieldName.length() == 0 ? key : fieldName + "." + key,
results);
}
}
}
static class PartitionFieldNode {
private String fieldName;
private Set<PartitionFieldNode> children = new HashSet<>(4);
private boolean isMultiValued;
private int fieldIndex;
PartitionFieldNode(String fieldName) {
this.fieldName = fieldName;
isMultiValued = false;
}
PartitionFieldNode(String fieldName, boolean isMultiValued) {
this.fieldName = fieldName;
this.isMultiValued = isMultiValued;
}
@Override
public boolean equals(Object o) {
if (this == o)
return true;
if (o == null || getClass() != o.getClass())
return false;
PartitionFieldNode that = (PartitionFieldNode) o;
return Objects.equals(fieldName, that.fieldName);
}
boolean isLeafNode() {
return children == null || children.isEmpty();
}
void setFieldIndex(int fieldIndex) {
this.fieldIndex = fieldIndex;
}
@VisibleForTesting
void addChild(PartitionFieldNode child) {
children.add(child);
}
@VisibleForTesting
String getFieldName() {
return fieldName;
}
@VisibleForTesting
Set<PartitionFieldNode> getChildren() {
return new HashSet<>(children);
}
@VisibleForTesting
boolean isMultiValued() {
return isMultiValued;
}
@Override
public String toString() {
return fieldName;
}
@Override
public int hashCode() {
return Objects.hash(fieldName);
}
void setMultiValued() {
this.isMultiValued = true;
}
}
private interface MutivaluedFieldSetter {
void setValue(PartitionFieldNode root, TreeMap<Long, Partition> partitions, TreeMap<Long, StorageDescriptor> sds,
TreeMap<Long, SerDeInfo> serdes, TreeMap<Long, List<FieldSchema>> cds) throws MetaException;
}
private class PartitionValuesSetter implements MutivaluedFieldSetter {
private PartitionValuesSetter() {
//
}
@Override
public void setValue(PartitionFieldNode root, TreeMap<Long, Partition> partitions, TreeMap<Long, StorageDescriptor> sds,
TreeMap<Long, SerDeInfo> serdes, TreeMap<Long, List<FieldSchema>> cds)
throws MetaException {
final String tableName =
fieldNameToTableName.containsKey("PARTITION_KEY_VALS") ? fieldNameToTableName
.get("PARTITION_KEY_VALS") : "\"PARTITION_KEY_VALS\"";
MetastoreDirectSqlUtils
.setPartitionValues(tableName, pm, Joiner.on(',').join(partitions.keySet()), partitions);
}
}
private class PartitionParametersSetter implements MutivaluedFieldSetter {
private PartitionParametersSetter() {
//
}
@Override
public void setValue(PartitionFieldNode root, TreeMap<Long, Partition> partitions, TreeMap<Long, StorageDescriptor> sds,
TreeMap<Long, SerDeInfo> serdes, TreeMap<Long, List<FieldSchema>> cds)
throws MetaException {
MetastoreDirectSqlUtils
.setPartitionParametersWithFilter(PARTITION_PARAMS, convertMapNullsToEmptyStrings, pm,
Joiner.on(',').join(partitions.keySet()), partitions, includeParamKeyPattern,
excludeParamKeyPattern);
}
}
private class PartitionSDColsSetter implements MutivaluedFieldSetter {
private PartitionSDColsSetter() {
// prevent instantiation
}
@Override
public void setValue(PartitionFieldNode root, TreeMap<Long, Partition> partitions, TreeMap<Long, StorageDescriptor> sds,
TreeMap<Long, SerDeInfo> serdes, TreeMap<Long, List<FieldSchema>> cds)
throws MetaException {
// find the fields which are requested for sd.cols
// children field names would be sd.cols.name, sd.cols.type or sd.cols.description
List<String> childFields = getChildrenFieldNames(root);
final String tableName = fieldNameToTableName.containsKey("COLUMNS_V2") ? fieldNameToTableName
.get("COLUMNS_V2") : "\"COLUMNS_V2\"";
MetastoreDirectSqlUtils
.setSDCols(tableName, childFields, pm, cds, Joiner.on(',').join(cds.keySet()));
}
}
private class PartitionSDBucketColsSetter implements MutivaluedFieldSetter {
private PartitionSDBucketColsSetter() {
//
}
@Override
public void setValue(PartitionFieldNode root, TreeMap<Long, Partition> partitions, TreeMap<Long, StorageDescriptor> sds,
TreeMap<Long, SerDeInfo> serdes, TreeMap<Long, List<FieldSchema>> cds)
throws MetaException {
final String tableName =
fieldNameToTableName.containsKey("BUCKETING_COLS") ? fieldNameToTableName
.get("BUCKETING_COLS") : "\"BUCKETING_COLS\"";
MetastoreDirectSqlUtils
.setSDBucketCols(tableName, pm, sds, Joiner.on(',').join(sds.keySet()));
}
}
private class PartitionSortColsSetter implements MutivaluedFieldSetter {
private PartitionSortColsSetter() {
//
}
@Override
public void setValue(PartitionFieldNode root, TreeMap<Long, Partition> partitions, TreeMap<Long, StorageDescriptor> sds,
TreeMap<Long, SerDeInfo> serdes, TreeMap<Long, List<FieldSchema>> cds)
throws MetaException {
List<String> childFieldNames = getChildrenFieldNames(root);
final String tableName = fieldNameToTableName.containsKey("SORT_COLS") ? fieldNameToTableName
.get("SORT_COLS") : "\"SORT_COLS\"";
MetastoreDirectSqlUtils
.setSDSortCols(tableName, childFieldNames, pm, sds, Joiner.on(',').join(sds.keySet()));
}
}
private List<String> getChildrenFieldNames(PartitionFieldNode root) throws MetaException {
List<String> childFields = new ArrayList<>(3);
for (PartitionFieldNode child : root.getChildren()) {
if (child.getFieldName().lastIndexOf(".") < 0) {
throw new MetaException("Error parsing multi-valued field name " + child.getFieldName());
}
childFields.add(child.getFieldName().substring(child.getFieldName().lastIndexOf(".") + 1));
}
return childFields;
}
private class PartitionSDParametersSetter implements MutivaluedFieldSetter {
private PartitionSDParametersSetter() {
//
}
@Override
public void setValue(PartitionFieldNode root, TreeMap<Long, Partition> partitions, TreeMap<Long, StorageDescriptor> sds,
TreeMap<Long, SerDeInfo> serdes, TreeMap<Long, List<FieldSchema>> cds)
throws MetaException {
final String tableName = fieldNameToTableName.containsKey("SD_PARAMS") ? fieldNameToTableName
.get("SD_PARAMS") : "\"SD_PARAMS\"";
MetastoreDirectSqlUtils.setSDParameters(tableName, convertMapNullsToEmptyStrings, pm, sds,
Joiner.on(',').join(sds.keySet()));
}
}
private class PartitionSerdeInfoParametersSetter implements MutivaluedFieldSetter {
private PartitionSerdeInfoParametersSetter() {
//
}
@Override
public void setValue(PartitionFieldNode root, TreeMap<Long, Partition> partitions, TreeMap<Long, StorageDescriptor> sds,
TreeMap<Long, SerDeInfo> serdes, TreeMap<Long, List<FieldSchema>> cds)
throws MetaException {
final String tableName =
fieldNameToTableName.containsKey("SERDE_PARAMS") ? fieldNameToTableName
.get("SERDE_PARAMS") : "\"SERDE_PARAMS\"";
MetastoreDirectSqlUtils.setSerdeParams(tableName, convertMapNullsToEmptyStrings, pm, serdes,
Joiner.on(',').join(serdes.keySet()));
}
}
private class PartitionSkewedColsNamesSetter implements MutivaluedFieldSetter {
private PartitionSkewedColsNamesSetter() {
//
}
@Override
public void setValue(PartitionFieldNode root, TreeMap<Long, Partition> partitions, TreeMap<Long, StorageDescriptor> sds,
TreeMap<Long, SerDeInfo> serdes, TreeMap<Long, List<FieldSchema>> cds)
throws MetaException {
final String tableName =
fieldNameToTableName.containsKey("SKEWED_COL_NAMES") ? fieldNameToTableName
.get("SKEWED_COL_NAMES") : "\"SKEWED_COL_NAMES\"";
MetastoreDirectSqlUtils
.setSkewedColNames(tableName, pm, sds, Joiner.on(',').join(sds.keySet()));
}
}
private class PartitionSkewedColsValuesSetter implements MutivaluedFieldSetter {
private PartitionSkewedColsValuesSetter() {
//
}
@Override
public void setValue(PartitionFieldNode root, TreeMap<Long, Partition> partitions, TreeMap<Long, StorageDescriptor> sds,
TreeMap<Long, SerDeInfo> serdes, TreeMap<Long, List<FieldSchema>> cds)
throws MetaException {
final String skewedStringListVals =
fieldNameToTableName.containsKey("SKEWED_STRING_LIST_VALUES") ? fieldNameToTableName
.get("SKEWED_STRING_LIST_VALUES") : "\"SKEWED_STRING_LIST_VALUES\"";
final String skewedVals =
fieldNameToTableName.containsKey("SKEWED_VALUES") ? fieldNameToTableName
.get("SKEWED_VALUES") : "\"SKEWED_VALUES\"";
MetastoreDirectSqlUtils.setSkewedColValues(skewedStringListVals, skewedVals, pm, sds,
Joiner.on(',').join(sds.keySet()));
}
}
private class PartitionSkewedColValLocationMapSetter implements MutivaluedFieldSetter {
private PartitionSkewedColValLocationMapSetter() {
//
}
@Override
public void setValue(PartitionFieldNode root, TreeMap<Long, Partition> partitions, TreeMap<Long, StorageDescriptor> sds,
TreeMap<Long, SerDeInfo> serdes, TreeMap<Long, List<FieldSchema>> cds)
throws MetaException {
final String skewedStringListVals =
fieldNameToTableName.containsKey("SKEWED_STRING_LIST_VALUES") ? fieldNameToTableName
.get("SKEWED_STRING_LIST_VALUES") : "\"SKEWED_STRING_LIST_VALUES\"";
final String skewedColValLocMap =
fieldNameToTableName.containsKey("SKEWED_COL_VALUE_LOC_MAP") ? fieldNameToTableName
.get("SKEWED_COL_VALUE_LOC_MAP") : "\"SKEWED_COL_VALUE_LOC_MAP\"";
MetastoreDirectSqlUtils
.setSkewedColLocationMaps(skewedColValLocMap, skewedStringListVals, pm, sds,
Joiner.on(',').join(sds.keySet()));
}
}
/**
* Given a list of partition fields, checks if all the fields requested are single-valued. If all
* the fields are single-valued returns list of equivalent MPartition fieldnames
* which can be used in the setResult clause of a JDO query
*
* @param partitionFields List of partitionFields in the projection
* @return List of JDO field names which can be used in setResult clause
* of a JDO query. Returns null if input partitionFields cannot be used in a setResult clause
*/
public static List<String> getMPartitionFieldNames(List<String> partitionFields)
throws MetaException {
// if there are no partitionFields requested, it means all the fields are requested which include
// multi-valued fields.
if (partitionFields == null || partitionFields.isEmpty()) {
return null;
}
// throw exception if there are invalid field names
PartitionProjectionEvaluator.validate(partitionFields);
// else, check if all the fields are single-valued. In case there are multi-valued fields requested
// return null since setResult in JDO doesn't support multi-valued fields
if (!allPartitionSingleValuedFields.keySet().containsAll(partitionFields)) {
return null;
}
List<String> jdoFields = new ArrayList<>(partitionFields.size());
for (String partitionField : partitionFields) {
jdoFields.add(allPartitionSingleValuedFields.get(partitionField));
}
return jdoFields;
}
}
|
google/guava | 36,456 | guava-tests/test/com/google/common/net/InetAddressesTest.java | /*
* Copyright (C) 2008 The Guava Authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.common.net;
import static com.google.common.truth.Truth.assertThat;
import static org.junit.Assert.assertThrows;
import com.google.common.collect.ImmutableSet;
import com.google.common.testing.NullPointerTester;
import java.math.BigInteger;
import java.net.Inet4Address;
import java.net.Inet6Address;
import java.net.InetAddress;
import java.net.NetworkInterface;
import java.net.SocketException;
import java.net.UnknownHostException;
import java.util.Enumeration;
import junit.framework.TestCase;
import org.jspecify.annotations.NullUnmarked;
/**
* Tests for {@link InetAddresses}.
*
* @author Erik Kline
*/
@NullUnmarked
public class InetAddressesTest extends TestCase {
public void testNulls() {
NullPointerTester tester = new NullPointerTester();
tester.testAllPublicStaticMethods(InetAddresses.class);
}
public void testForStringBogusInput() {
ImmutableSet<String> bogusInputs =
ImmutableSet.of(
"",
"016.016.016.016",
"016.016.016",
"016.016",
"016",
"000.000.000.000",
"000",
"0x0a.0x0a.0x0a.0x0a",
"0x0a.0x0a.0x0a",
"0x0a.0x0a",
"0x0a",
"42.42.42.42.42",
"42.42.42",
"42.42",
"42",
"42..42.42",
"42..42.42.42",
"42.42.42.42.",
"42.42.42.42...",
".42.42.42.42",
".42.42.42",
"...42.42.42.42",
"42.42.42.-0",
"42.42.42.+0",
".",
"...",
"bogus",
"bogus.com",
"192.168.0.1.com",
"12345.67899.-54321.-98765",
"257.0.0.0",
"42.42.42.-42",
"42.42.42.ab",
"3ffe::1.net",
"3ffe::1::1",
"1::2::3::4:5",
"::7:6:5:4:3:2:", // should end with ":0"
":6:5:4:3:2:1::", // should begin with "0:"
"2001::db:::1",
"FEDC:9878",
"+1.+2.+3.4",
"1.2.3.4e0",
"6:5:4:3:2:1:0", // too few parts
"::7:6:5:4:3:2:1:0", // too many parts
"7:6:5:4:3:2:1:0::", // too many parts
"9:8:7:6:5:4:3::2:1", // too many parts
"0:1:2:3::4:5:6:7", // :: must remove at least one 0.
"3ffe:0:0:0:0:0:0:0:1", // too many parts (9 instead of 8)
"3ffe::10000", // hextet exceeds 16 bits
"3ffe::goog",
"3ffe::-0",
"3ffe::+0",
"3ffe::-1",
":",
":::",
"::1.2.3",
"::1.2.3.4.5",
"::1.2.3.4:",
"1.2.3.4::",
"2001:db8::1:",
":2001:db8::1",
":1:2:3:4:5:6:7",
"1:2:3:4:5:6:7:",
":1:2:3:4:5:6:");
for (String bogusInput : bogusInputs) {
assertThrows(
"IllegalArgumentException expected for '" + bogusInput + "'",
IllegalArgumentException.class,
() -> InetAddresses.forString(bogusInput));
assertFalse(InetAddresses.isInetAddress(bogusInput));
}
}
public void test3ff31() {
assertThrows(IllegalArgumentException.class, () -> InetAddresses.forString("3ffe:::1"));
assertFalse(InetAddresses.isInetAddress("016.016.016.016"));
}
public void testForStringIPv4Input() throws UnknownHostException {
String ipStr = "192.168.0.1";
// Shouldn't hit DNS, because it's an IP string literal.
InetAddress ipv4Addr = InetAddress.getByName(ipStr);
assertEquals(ipv4Addr, InetAddresses.forString(ipStr));
assertTrue(InetAddresses.isInetAddress(ipStr));
}
public void testForStringIPv4NonAsciiInput() throws UnknownHostException {
String ipStr = "૧૯૨.૧૬૮.૦.૧"; // 192.168.0.1 in Gujarati digits
// Shouldn't hit DNS, because it's an IP string literal.
InetAddress ipv4Addr;
try {
ipv4Addr = InetAddress.getByName(ipStr);
} catch (UnknownHostException e) {
// OK: this is probably Android, which is stricter.
return;
}
assertEquals(ipv4Addr, InetAddresses.forString(ipStr));
assertTrue(InetAddresses.isInetAddress(ipStr));
}
public void testForStringIPv6Input() throws UnknownHostException {
String ipStr = "3ffe::1";
// Shouldn't hit DNS, because it's an IP string literal.
InetAddress ipv6Addr = InetAddress.getByName(ipStr);
assertEquals(ipv6Addr, InetAddresses.forString(ipStr));
assertTrue(InetAddresses.isInetAddress(ipStr));
}
public void testForStringIPv6NonAsciiInput() throws UnknownHostException {
String ipStr = "૩ffe::૧"; // 3ffe::1 with Gujarati digits for 3 and 1
// Shouldn't hit DNS, because it's an IP string literal.
InetAddress ipv6Addr;
try {
ipv6Addr = InetAddress.getByName(ipStr);
} catch (UnknownHostException e) {
// OK: this is probably Android, which is stricter.
return;
}
assertEquals(ipv6Addr, InetAddresses.forString(ipStr));
assertTrue(InetAddresses.isInetAddress(ipStr));
}
public void testForStringIPv6EightColons() throws UnknownHostException {
ImmutableSet<String> eightColons =
ImmutableSet.of("::7:6:5:4:3:2:1", "::7:6:5:4:3:2:0", "7:6:5:4:3:2:1::", "0:6:5:4:3:2:1::");
for (String ipString : eightColons) {
// Shouldn't hit DNS, because it's an IP string literal.
InetAddress ipv6Addr = InetAddress.getByName(ipString);
assertEquals(ipv6Addr, InetAddresses.forString(ipString));
assertTrue(InetAddresses.isInetAddress(ipString));
}
}
public void testConvertDottedQuadToHex() throws UnknownHostException {
ImmutableSet<String> ipStrings =
ImmutableSet.of("7::0.128.0.127", "7::0.128.0.128", "7::128.128.0.127", "7::0.128.128.127");
for (String ipString : ipStrings) {
// Shouldn't hit DNS, because it's an IP string literal.
InetAddress ipv6Addr = InetAddress.getByName(ipString);
assertEquals(ipv6Addr, InetAddresses.forString(ipString));
assertTrue(InetAddresses.isInetAddress(ipString));
}
}
public void testIPv4AddressWithScopeId() throws SocketException {
ImmutableSet<String> ipStrings = ImmutableSet.of("1.2.3.4", "192.168.0.1");
for (String ipString : ipStrings) {
for (String scopeId : getMachineScopesAndInterfaces()) {
String withScopeId = ipString + "%" + scopeId;
assertFalse(
"InetAddresses.isInetAddress(" + withScopeId + ") should be false but was true",
InetAddresses.isInetAddress(withScopeId));
}
}
}
public void testDottedQuadAddressWithScopeId() throws SocketException {
ImmutableSet<String> ipStrings =
ImmutableSet.of("7::0.128.0.127", "7::0.128.0.128", "7::128.128.0.127", "7::0.128.128.127");
for (String ipString : ipStrings) {
for (String scopeId : getMachineScopesAndInterfaces()) {
String withScopeId = ipString + "%" + scopeId;
assertFalse(
"InetAddresses.isInetAddress(" + withScopeId + ") should be false but was true",
InetAddresses.isInetAddress(withScopeId));
}
}
}
public void testIPv6AddressWithScopeId() throws SocketException, UnknownHostException {
ImmutableSet<String> ipStrings =
ImmutableSet.of(
"::1",
"1180::a",
"1180::1",
"1180::2",
"1180::42",
"1180::3dd0:7f8e:57b7:34d5",
"1180::71a3:2b00:ddd3:753f",
"1180::8b2:d61e:e5c:b333",
"1180::b059:65f4:e877:c40",
"fe80::34",
"fec0::34");
boolean processedNamedInterface = false;
for (String ipString : ipStrings) {
for (String scopeId : getMachineScopesAndInterfaces()) {
String withScopeId = ipString + "%" + scopeId;
assertTrue(
"InetAddresses.isInetAddress(" + withScopeId + ") should be true but was false",
InetAddresses.isInetAddress(withScopeId));
Inet6Address parsed;
boolean isNumeric = scopeId.matches("\\d+");
try {
parsed = (Inet6Address) InetAddresses.forString(withScopeId);
} catch (IllegalArgumentException e) {
if (!isNumeric) {
// Android doesn't recognize %interface as valid
continue;
}
throw e;
}
processedNamedInterface |= !isNumeric;
assertThat(InetAddresses.toAddrString(parsed)).contains("%");
if (isNumeric) {
assertEquals(Integer.parseInt(scopeId), parsed.getScopeId());
} else {
assertEquals(scopeId, parsed.getScopedInterface().getName());
}
Inet6Address reparsed =
(Inet6Address) InetAddresses.forString(InetAddresses.toAddrString(parsed));
assertEquals(reparsed, parsed);
assertEquals(reparsed.getScopeId(), parsed.getScopeId());
}
}
assertTrue(processedNamedInterface);
}
public void testIPv6AddressWithScopeId_platformEquivalence()
throws SocketException, UnknownHostException {
ImmutableSet<String> ipStrings =
ImmutableSet.of(
"::1",
"1180::a",
"1180::1",
"1180::2",
"1180::42",
"1180::3dd0:7f8e:57b7:34d5",
"1180::71a3:2b00:ddd3:753f",
"1180::8b2:d61e:e5c:b333",
"1180::b059:65f4:e877:c40",
"fe80::34",
"fec0::34");
for (String ipString : ipStrings) {
for (String scopeId : getMachineScopesAndInterfaces()) {
String withScopeId = ipString + "%" + scopeId;
assertTrue(
"InetAddresses.isInetAddress(" + withScopeId + ") should be true but was false",
InetAddresses.isInetAddress(withScopeId));
Inet6Address parsed;
boolean isNumeric = scopeId.matches("\\d+");
try {
parsed = (Inet6Address) InetAddresses.forString(withScopeId);
} catch (IllegalArgumentException e) {
if (!isNumeric) {
// Android doesn't recognize %interface as valid
continue;
}
throw e;
}
Inet6Address platformValue;
try {
platformValue = (Inet6Address) InetAddress.getByName(withScopeId);
} catch (UnknownHostException e) {
// Android doesn't recognize %interface as valid
if (!isNumeric) {
continue;
}
throw e;
}
assertEquals(platformValue, parsed);
assertEquals(platformValue.getScopeId(), parsed.getScopeId());
}
}
}
public void testIPv6AddressWithBadScopeId() throws SocketException, UnknownHostException {
assertThrows(
IllegalArgumentException.class,
() -> InetAddresses.forString("1180::b059:65f4:e877:c40%eth9"));
}
public void testToAddrStringIPv4() {
// Don't need to test IPv4 much; it just calls getHostAddress().
assertEquals("1.2.3.4", InetAddresses.toAddrString(InetAddresses.forString("1.2.3.4")));
}
public void testToAddrStringIPv6() {
assertEquals(
"1:2:3:4:5:6:7:8", InetAddresses.toAddrString(InetAddresses.forString("1:2:3:4:5:6:7:8")));
assertEquals(
"2001:0:0:4::8", InetAddresses.toAddrString(InetAddresses.forString("2001:0:0:4:0:0:0:8")));
assertEquals(
"2001::4:5:6:7:8",
InetAddresses.toAddrString(InetAddresses.forString("2001:0:0:4:5:6:7:8")));
assertEquals(
"2001:0:3:4:5:6:7:8",
InetAddresses.toAddrString(InetAddresses.forString("2001:0:3:4:5:6:7:8")));
assertEquals(
"0:0:3::ffff", InetAddresses.toAddrString(InetAddresses.forString("0:0:3:0:0:0:0:ffff")));
assertEquals(
"::4:0:0:0:ffff",
InetAddresses.toAddrString(InetAddresses.forString("0:0:0:4:0:0:0:ffff")));
assertEquals(
"::5:0:0:ffff", InetAddresses.toAddrString(InetAddresses.forString("0:0:0:0:5:0:0:ffff")));
assertEquals(
"1::4:0:0:7:8", InetAddresses.toAddrString(InetAddresses.forString("1:0:0:4:0:0:7:8")));
assertEquals("::", InetAddresses.toAddrString(InetAddresses.forString("0:0:0:0:0:0:0:0")));
assertEquals("::1", InetAddresses.toAddrString(InetAddresses.forString("0:0:0:0:0:0:0:1")));
assertEquals(
"2001:658:22a:cafe::",
InetAddresses.toAddrString(InetAddresses.forString("2001:0658:022a:cafe::")));
assertEquals("::102:304", InetAddresses.toAddrString(InetAddresses.forString("::1.2.3.4")));
}
public void testToUriStringIPv4() {
String ipStr = "1.2.3.4";
InetAddress ip = InetAddresses.forString(ipStr);
assertEquals("1.2.3.4", InetAddresses.toUriString(ip));
}
public void testToUriStringIPv6() {
// Unfortunately the InetAddress.toString() method for IPv6 addresses
// does not collapse contiguous shorts of zeroes with the :: abbreviation.
String ipStr = "3ffe::1";
InetAddress ip = InetAddresses.forString(ipStr);
assertEquals("[3ffe::1]", InetAddresses.toUriString(ip));
}
public void testForUriStringIPv4() {
Inet4Address expected = (Inet4Address) InetAddresses.forString("192.168.1.1");
assertEquals(expected, InetAddresses.forUriString("192.168.1.1"));
}
public void testForUriStringIPv6() {
Inet6Address expected = (Inet6Address) InetAddresses.forString("3ffe:0:0:0:0:0:0:1");
assertEquals(expected, InetAddresses.forUriString("[3ffe:0:0:0:0:0:0:1]"));
}
public void testForUriStringIPv4Mapped() {
Inet4Address expected = (Inet4Address) InetAddresses.forString("192.0.2.1");
assertEquals(expected, InetAddresses.forUriString("[::ffff:192.0.2.1]"));
}
public void testIsUriInetAddress() {
assertTrue(InetAddresses.isUriInetAddress("192.168.1.1"));
assertTrue(InetAddresses.isUriInetAddress("[3ffe:0:0:0:0:0:0:1]"));
assertTrue(InetAddresses.isUriInetAddress("[::ffff:192.0.2.1]"));
assertFalse(InetAddresses.isUriInetAddress("[192.168.1.1"));
assertFalse(InetAddresses.isUriInetAddress("192.168.1.1]"));
assertFalse(InetAddresses.isUriInetAddress(""));
assertFalse(InetAddresses.isUriInetAddress("192.168.999.888"));
assertFalse(InetAddresses.isUriInetAddress("www.google.com"));
assertFalse(InetAddresses.isUriInetAddress("1:2e"));
assertFalse(InetAddresses.isUriInetAddress("[3ffe:0:0:0:0:0:0:1"));
assertFalse(InetAddresses.isUriInetAddress("3ffe:0:0:0:0:0:0:1]"));
assertFalse(InetAddresses.isUriInetAddress("3ffe:0:0:0:0:0:0:1"));
assertFalse(InetAddresses.isUriInetAddress("::ffff:192.0.2.1"));
}
public void testForUriStringBad() {
assertThrows(IllegalArgumentException.class, () -> InetAddresses.forUriString(""));
assertThrows(
IllegalArgumentException.class, () -> InetAddresses.forUriString("192.168.999.888"));
assertThrows(
IllegalArgumentException.class, () -> InetAddresses.forUriString("www.google.com"));
assertThrows(IllegalArgumentException.class, () -> InetAddresses.forUriString("[1:2e]"));
assertThrows(IllegalArgumentException.class, () -> InetAddresses.forUriString("[192.168.1.1]"));
assertThrows(IllegalArgumentException.class, () -> InetAddresses.forUriString("192.168.1.1]"));
assertThrows(IllegalArgumentException.class, () -> InetAddresses.forUriString("[192.168.1.1"));
assertThrows(
IllegalArgumentException.class, () -> InetAddresses.forUriString("[3ffe:0:0:0:0:0:0:1"));
assertThrows(
IllegalArgumentException.class, () -> InetAddresses.forUriString("3ffe:0:0:0:0:0:0:1]"));
assertThrows(
IllegalArgumentException.class, () -> InetAddresses.forUriString("3ffe:0:0:0:0:0:0:1"));
assertThrows(
IllegalArgumentException.class, () -> InetAddresses.forUriString("::ffff:192.0.2.1"));
}
public void testCompatIPv4Addresses() {
ImmutableSet<String> nonCompatAddresses = ImmutableSet.of("3ffe::1", "::", "::1");
for (String nonCompatAddress : nonCompatAddresses) {
InetAddress ip = InetAddresses.forString(nonCompatAddress);
assertFalse(InetAddresses.isCompatIPv4Address((Inet6Address) ip));
assertThrows(
"IllegalArgumentException expected for '" + nonCompatAddress + "'",
IllegalArgumentException.class,
() -> InetAddresses.getCompatIPv4Address((Inet6Address) ip));
}
ImmutableSet<String> validCompatAddresses = ImmutableSet.of("::1.2.3.4", "::102:304");
String compatStr = "1.2.3.4";
InetAddress compat = InetAddresses.forString(compatStr);
for (String validCompatAddress : validCompatAddresses) {
InetAddress ip = InetAddresses.forString(validCompatAddress);
assertTrue("checking '" + validCompatAddress + "'", ip instanceof Inet6Address);
assertTrue(
"checking '" + validCompatAddress + "'",
InetAddresses.isCompatIPv4Address((Inet6Address) ip));
assertEquals(
"checking '" + validCompatAddress + "'",
compat,
InetAddresses.getCompatIPv4Address((Inet6Address) ip));
}
}
public void testMappedIPv4Addresses() throws UnknownHostException {
/*
* Verify that it is not possible to instantiate an Inet6Address
* from an "IPv4 mapped" IPv6 address. Our String-based method can
* at least identify them, however.
*/
String mappedStr = "::ffff:192.168.0.1";
assertTrue(InetAddresses.isMappedIPv4Address(mappedStr));
InetAddress mapped = InetAddresses.forString(mappedStr);
assertThat(mapped).isNotInstanceOf(Inet6Address.class);
assertEquals(InetAddress.getByName("192.168.0.1"), mapped);
// check upper case
mappedStr = "::FFFF:192.168.0.1";
assertTrue(InetAddresses.isMappedIPv4Address(mappedStr));
mapped = InetAddresses.forString(mappedStr);
assertThat(mapped).isNotInstanceOf(Inet6Address.class);
assertEquals(InetAddress.getByName("192.168.0.1"), mapped);
mappedStr = "0:00:000:0000:0:ffff:1.2.3.4";
assertTrue(InetAddresses.isMappedIPv4Address(mappedStr));
mapped = InetAddresses.forString(mappedStr);
assertThat(mapped).isNotInstanceOf(Inet6Address.class);
assertEquals(InetAddress.getByName("1.2.3.4"), mapped);
mappedStr = "::ffff:0102:0304";
assertTrue(InetAddresses.isMappedIPv4Address(mappedStr));
mapped = InetAddresses.forString(mappedStr);
assertThat(mapped).isNotInstanceOf(Inet6Address.class);
assertEquals(InetAddress.getByName("1.2.3.4"), mapped);
assertFalse(InetAddresses.isMappedIPv4Address("::"));
assertFalse(InetAddresses.isMappedIPv4Address("::ffff"));
assertFalse(InetAddresses.isMappedIPv4Address("::ffff:0"));
assertFalse(InetAddresses.isMappedIPv4Address("::fffe:0:0"));
assertFalse(InetAddresses.isMappedIPv4Address("::1:ffff:0:0"));
assertFalse(InetAddresses.isMappedIPv4Address("foo"));
assertFalse(InetAddresses.isMappedIPv4Address("192.0.2.1"));
}
public void test6to4Addresses() {
ImmutableSet<String> non6to4Addresses = ImmutableSet.of("::1.2.3.4", "3ffe::1", "::", "::1");
for (String non6to4Address : non6to4Addresses) {
InetAddress ip = InetAddresses.forString(non6to4Address);
assertFalse(InetAddresses.is6to4Address((Inet6Address) ip));
assertThrows(
"IllegalArgumentException expected for '" + non6to4Address + "'",
IllegalArgumentException.class,
() -> InetAddresses.get6to4IPv4Address((Inet6Address) ip));
}
String valid6to4Address = "2002:0102:0304::1";
String ipv4Str = "1.2.3.4";
InetAddress ipv4 = InetAddresses.forString(ipv4Str);
InetAddress ip = InetAddresses.forString(valid6to4Address);
assertTrue(InetAddresses.is6to4Address((Inet6Address) ip));
assertEquals(ipv4, InetAddresses.get6to4IPv4Address((Inet6Address) ip));
}
public void testTeredoAddresses() {
ImmutableSet<String> nonTeredoAddresses = ImmutableSet.of("::1.2.3.4", "3ffe::1", "::", "::1");
for (String nonTeredoAddress : nonTeredoAddresses) {
InetAddress ip = InetAddresses.forString(nonTeredoAddress);
assertFalse(InetAddresses.isTeredoAddress((Inet6Address) ip));
assertThrows(
"IllegalArgumentException expected for '" + nonTeredoAddress + "'",
IllegalArgumentException.class,
() -> InetAddresses.getTeredoInfo((Inet6Address) ip));
}
String validTeredoAddress = "2001:0000:4136:e378:8000:63bf:3fff:fdd2";
String serverStr = "65.54.227.120";
String clientStr = "192.0.2.45";
int port = 40000;
int flags = 0x8000;
InetAddress ip = InetAddresses.forString(validTeredoAddress);
assertTrue(InetAddresses.isTeredoAddress((Inet6Address) ip));
InetAddresses.TeredoInfo teredo = InetAddresses.getTeredoInfo((Inet6Address) ip);
InetAddress server = InetAddresses.forString(serverStr);
assertEquals(server, teredo.getServer());
InetAddress client = InetAddresses.forString(clientStr);
assertEquals(client, teredo.getClient());
assertEquals(port, teredo.getPort());
assertEquals(flags, teredo.getFlags());
}
public void testTeredoAddress_nullServer() {
InetAddresses.TeredoInfo info = new InetAddresses.TeredoInfo(null, null, 80, 1000);
assertEquals(InetAddresses.forString("0.0.0.0"), info.getServer());
assertEquals(InetAddresses.forString("0.0.0.0"), info.getClient());
assertEquals(80, info.getPort());
assertEquals(1000, info.getFlags());
}
public void testIsatapAddresses() {
InetAddress ipv4 = InetAddresses.forString("1.2.3.4");
ImmutableSet<String> validIsatapAddresses =
ImmutableSet.of(
"2001:db8::5efe:102:304",
"2001:db8::100:5efe:102:304", // Private Multicast? Not likely.
"2001:db8::200:5efe:102:304",
"2001:db8::300:5efe:102:304" // Public Multicast? Also unlikely.
);
ImmutableSet<String> nonIsatapAddresses =
ImmutableSet.of(
"::1.2.3.4",
"3ffe::1",
"::",
"::1",
"2001:db8::0040:5efe:102:304",
"2001:db8::5ffe:102:304",
"2001:db8::5eff:102:304",
"2001:0:102:203:200:5efe:506:708" // Teredo address; not ISATAP
);
for (String validIsatapAddress : validIsatapAddresses) {
InetAddress ip = InetAddresses.forString(validIsatapAddress);
assertTrue(InetAddresses.isIsatapAddress((Inet6Address) ip));
assertEquals(
"checking '" + validIsatapAddress + "'",
ipv4,
InetAddresses.getIsatapIPv4Address((Inet6Address) ip));
}
for (String nonIsatapAddress : nonIsatapAddresses) {
InetAddress ip = InetAddresses.forString(nonIsatapAddress);
assertFalse(InetAddresses.isIsatapAddress((Inet6Address) ip));
assertThrows(
"IllegalArgumentException expected for '" + nonIsatapAddress + "'",
IllegalArgumentException.class,
() -> InetAddresses.getIsatapIPv4Address((Inet6Address) ip));
}
}
public void testGetEmbeddedIPv4ClientAddress() {
Inet6Address testIp;
// Test regular global unicast address.
testIp = (Inet6Address) InetAddresses.forString("2001:db8::1");
assertFalse(InetAddresses.hasEmbeddedIPv4ClientAddress(testIp));
// Test ISATAP address.
testIp = (Inet6Address) InetAddresses.forString("2001:db8::5efe:102:304");
assertFalse(InetAddresses.hasEmbeddedIPv4ClientAddress(testIp));
// Test compat address.
testIp = (Inet6Address) InetAddresses.forString("::1.2.3.4");
assertTrue(InetAddresses.hasEmbeddedIPv4ClientAddress(testIp));
InetAddress ipv4 = InetAddresses.forString("1.2.3.4");
assertEquals(ipv4, InetAddresses.getEmbeddedIPv4ClientAddress(testIp));
// Test 6to4 address.
testIp = (Inet6Address) InetAddresses.forString("2002:0102:0304::1");
assertTrue(InetAddresses.hasEmbeddedIPv4ClientAddress(testIp));
ipv4 = InetAddresses.forString("1.2.3.4");
assertEquals(ipv4, InetAddresses.getEmbeddedIPv4ClientAddress(testIp));
// Test Teredo address.
testIp = (Inet6Address) InetAddresses.forString("2001:0000:4136:e378:8000:63bf:3fff:fdd2");
assertTrue(InetAddresses.hasEmbeddedIPv4ClientAddress(testIp));
ipv4 = InetAddresses.forString("192.0.2.45");
assertEquals(ipv4, InetAddresses.getEmbeddedIPv4ClientAddress(testIp));
}
public void testGetCoercedIPv4Address() {
// Check that a coerced IPv4 address is unaltered.
assertThat(InetAddresses.getCoercedIPv4Address(InetAddresses.forString("127.0.0.1")))
.isEqualTo(InetAddresses.forString("127.0.0.1"));
// ::1 special case
assertThat(InetAddresses.getCoercedIPv4Address(InetAddresses.forString("::1")))
.isEqualTo(InetAddresses.forString("127.0.0.1"));
// :: special case
assertThat(InetAddresses.getCoercedIPv4Address(InetAddresses.forString("::")))
.isEqualTo(InetAddresses.forString("0.0.0.0"));
// test compat address (should be hashed)
assertThat(InetAddresses.getCoercedIPv4Address(InetAddresses.forString("::1.2.3.4")))
.isNotEqualTo(InetAddresses.forString("1.2.3.4"));
// test 6to4 address (should be hashed)
assertThat(InetAddresses.getCoercedIPv4Address(InetAddresses.forString("2002:0102:0304::1")))
.isNotEqualTo(InetAddresses.forString("1.2.3.4"));
// 2 6to4 addresses differing in the embedded IPv4 address should
// hash to the different values.
assertThat(InetAddresses.getCoercedIPv4Address(InetAddresses.forString("2002:0102:0304::1")))
.isNotEqualTo(
InetAddresses.getCoercedIPv4Address(InetAddresses.forString("2002:0506:0708::1")));
// 2 6to4 addresses NOT differing in the embedded IPv4 address should
// hash to the same value.
assertThat(InetAddresses.getCoercedIPv4Address(InetAddresses.forString("2002:0102:0304::1")))
.isEqualTo(
InetAddresses.getCoercedIPv4Address(InetAddresses.forString("2002:0102:0304::2")));
// test Teredo address (should be hashed)
assertThat(
InetAddresses.getCoercedIPv4Address(
InetAddresses.forString("2001:0000:4136:e378:8000:63bf:3fff:fdd2")))
.isNotEqualTo(InetAddresses.forString("192.0.2.45"));
// 2 Teredo addresses differing in their embedded IPv4 addresses should hash to different
// values.
assertThat(
InetAddresses.getCoercedIPv4Address(
InetAddresses.forString("2001:0000:4136:e378:8000:63bf:3fff:fdd2")))
.isNotEqualTo(
InetAddresses.getCoercedIPv4Address(
InetAddresses.forString("2001:0000:4136:e378:8000:63bf:3fff:fdd3")));
// 2 Teredo addresses NOT differing in their embedded IPv4 addresses should hash to the same
// value.
assertThat(
InetAddresses.getCoercedIPv4Address(
InetAddresses.forString("2001:0000:4136:e378:8000:63bf:3fff:fdd2")))
.isEqualTo(
InetAddresses.getCoercedIPv4Address(
InetAddresses.forString("2001:0000:5136:f378:9000:73bf:3fff:fdd2")));
// Test that an address hashes in to the 224.0.0.0/3 number-space.
int coercedInt =
InetAddresses.coerceToInteger(
InetAddresses.getCoercedIPv4Address(InetAddresses.forString("2001:4860::1")));
assertThat(coercedInt).isAtLeast(0xe0000000);
assertThat(coercedInt).isAtMost(0xfffffffe);
}
public void testCoerceToInteger() {
assertThat(InetAddresses.coerceToInteger(InetAddresses.forString("127.0.0.1")))
.isEqualTo(0x7f000001);
}
public void testFromInteger() {
assertThat(InetAddresses.fromInteger(0x7f000001))
.isEqualTo(InetAddresses.forString("127.0.0.1"));
}
public void testFromLittleEndianByteArray() throws UnknownHostException {
assertEquals(
InetAddresses.fromLittleEndianByteArray(new byte[] {1, 2, 3, 4}),
InetAddress.getByAddress(new byte[] {4, 3, 2, 1}));
assertEquals(
InetAddresses.fromLittleEndianByteArray(
new byte[] {1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16}),
InetAddress.getByAddress(
new byte[] {16, 15, 14, 13, 12, 11, 10, 9, 8, 7, 6, 5, 4, 3, 2, 1}));
assertThrows(
UnknownHostException.class, () -> InetAddresses.fromLittleEndianByteArray(new byte[3]));
}
public void testIsMaximum() throws UnknownHostException {
InetAddress address = InetAddress.getByName("255.255.255.254");
assertFalse(InetAddresses.isMaximum(address));
address = InetAddress.getByName("255.255.255.255");
assertTrue(InetAddresses.isMaximum(address));
address = InetAddress.getByName("ffff:ffff:ffff:ffff:ffff:ffff:ffff:fffe");
assertFalse(InetAddresses.isMaximum(address));
address = InetAddress.getByName("ffff:ffff:ffff:ffff:ffff:ffff:ffff:ffff");
assertTrue(InetAddresses.isMaximum(address));
}
@SuppressWarnings("IdentifierName") // the best we could do for adjacent digit blocks
public void testIncrementIPv4() throws UnknownHostException {
InetAddress address_66_0 = InetAddress.getByName("172.24.66.0");
InetAddress address_66_255 = InetAddress.getByName("172.24.66.255");
InetAddress address_67_0 = InetAddress.getByName("172.24.67.0");
InetAddress address = address_66_0;
for (int i = 0; i < 255; i++) {
address = InetAddresses.increment(address);
}
assertEquals(address_66_255, address);
address = InetAddresses.increment(address);
assertEquals(address_67_0, address);
InetAddress address_ffffff = InetAddress.getByName("255.255.255.255");
assertThrows(IllegalArgumentException.class, () -> InetAddresses.increment(address_ffffff));
}
@SuppressWarnings("IdentifierName") // the best we could do for adjacent digit blocks
public void testIncrementIPv6() throws UnknownHostException {
InetAddress addressV6_66_0 = InetAddress.getByName("2001:db8::6600");
InetAddress addressV6_66_ff = InetAddress.getByName("2001:db8::66ff");
InetAddress addressV6_67_0 = InetAddress.getByName("2001:db8::6700");
InetAddress address = addressV6_66_0;
for (int i = 0; i < 255; i++) {
address = InetAddresses.increment(address);
}
assertEquals(addressV6_66_ff, address);
address = InetAddresses.increment(address);
assertEquals(addressV6_67_0, address);
InetAddress addressV6_ffffff = InetAddress.getByName("ffff:ffff:ffff:ffff:ffff:ffff:ffff:ffff");
assertThrows(IllegalArgumentException.class, () -> InetAddresses.increment(addressV6_ffffff));
}
public void testDecrementIPv4() throws UnknownHostException {
InetAddress address660 = InetAddress.getByName("172.24.66.0");
InetAddress address66255 = InetAddress.getByName("172.24.66.255");
InetAddress address670 = InetAddress.getByName("172.24.67.0");
InetAddress address = address670;
address = InetAddresses.decrement(address);
assertEquals(address66255, address);
for (int i = 0; i < 255; i++) {
address = InetAddresses.decrement(address);
}
assertEquals(address660, address);
InetAddress address0000 = InetAddress.getByName("0.0.0.0");
assertThrows(IllegalArgumentException.class, () -> InetAddresses.decrement(address0000));
}
public void testDecrementIPv6() throws UnknownHostException {
InetAddress addressV6660 = InetAddress.getByName("2001:db8::6600");
InetAddress addressV666ff = InetAddress.getByName("2001:db8::66ff");
InetAddress addressV6670 = InetAddress.getByName("2001:db8::6700");
InetAddress address = addressV6670;
address = InetAddresses.decrement(address);
assertEquals(addressV666ff, address);
for (int i = 0; i < 255; i++) {
address = InetAddresses.decrement(address);
}
assertEquals(addressV6660, address);
InetAddress addressV6000000 = InetAddress.getByName("0:0:0:0:0:0:0:0");
assertThrows(IllegalArgumentException.class, () -> InetAddresses.decrement(addressV6000000));
}
public void testFromIpv4BigIntegerThrowsLessThanZero() {
IllegalArgumentException expected =
assertThrows(
IllegalArgumentException.class,
() -> InetAddresses.fromIPv4BigInteger(BigInteger.valueOf(-1L)));
assertThat(expected)
.hasMessageThat()
.isEqualTo("BigInteger must be greater than or equal to 0");
}
public void testFromIpv6BigIntegerThrowsLessThanZero() {
IllegalArgumentException expected =
assertThrows(
IllegalArgumentException.class,
() -> InetAddresses.fromIPv6BigInteger(BigInteger.valueOf(-1L)));
assertThat(expected)
.hasMessageThat()
.isEqualTo("BigInteger must be greater than or equal to 0");
}
public void testFromIpv4BigIntegerValid() {
checkBigIntegerConversion("0.0.0.0", BigInteger.ZERO);
checkBigIntegerConversion("0.0.0.1", BigInteger.ONE);
checkBigIntegerConversion("127.255.255.255", BigInteger.valueOf(Integer.MAX_VALUE));
checkBigIntegerConversion(
"255.255.255.254", BigInteger.valueOf(Integer.MAX_VALUE).multiply(BigInteger.valueOf(2)));
checkBigIntegerConversion(
"255.255.255.255", BigInteger.ONE.shiftLeft(32).subtract(BigInteger.ONE));
}
public void testFromIpv6BigIntegerValid() {
checkBigIntegerConversion("::", BigInteger.ZERO);
checkBigIntegerConversion("::1", BigInteger.ONE);
checkBigIntegerConversion("::7fff:ffff", BigInteger.valueOf(Integer.MAX_VALUE));
checkBigIntegerConversion("::7fff:ffff:ffff:ffff", BigInteger.valueOf(Long.MAX_VALUE));
checkBigIntegerConversion(
"::ffff:ffff:ffff:ffff", BigInteger.ONE.shiftLeft(64).subtract(BigInteger.ONE));
checkBigIntegerConversion(
"ffff:ffff:ffff:ffff:ffff:ffff:ffff:ffff",
BigInteger.ONE.shiftLeft(128).subtract(BigInteger.ONE));
}
public void testFromIpv4BigIntegerInputTooLarge() {
IllegalArgumentException expected =
assertThrows(
IllegalArgumentException.class,
() ->
InetAddresses.fromIPv4BigInteger(BigInteger.ONE.shiftLeft(32).add(BigInteger.ONE)));
assertThat(expected)
.hasMessageThat()
.isEqualTo(
"BigInteger cannot be converted to InetAddress because it has more than 4 bytes:"
+ " 4294967297");
}
public void testFromIpv6BigIntegerInputTooLarge() {
IllegalArgumentException expected =
assertThrows(
IllegalArgumentException.class,
() ->
InetAddresses.fromIPv6BigInteger(
BigInteger.ONE.shiftLeft(128).add(BigInteger.ONE)));
assertThat(expected)
.hasMessageThat()
.isEqualTo(
"BigInteger cannot be converted to InetAddress because it has more than 16 bytes:"
+ " 340282366920938463463374607431768211457");
}
// see https://github.com/google/guava/issues/2587
private static ImmutableSet<String> getMachineScopesAndInterfaces() throws SocketException {
ImmutableSet.Builder<String> builder = ImmutableSet.builder();
Enumeration<NetworkInterface> interfaces = NetworkInterface.getNetworkInterfaces();
assertTrue(interfaces.hasMoreElements());
while (interfaces.hasMoreElements()) {
NetworkInterface i = interfaces.nextElement();
builder.add(i.getName()).add(String.valueOf(i.getIndex()));
}
return builder.build();
}
/** Checks that the IP converts to the big integer and the big integer converts to the IP. */
private static void checkBigIntegerConversion(String ip, BigInteger bigIntegerIp) {
InetAddress address = InetAddresses.forString(ip);
boolean isIpv6 = address instanceof Inet6Address;
assertEquals(bigIntegerIp, InetAddresses.toBigInteger(address));
assertEquals(
address,
isIpv6
? InetAddresses.fromIPv6BigInteger(bigIntegerIp)
: InetAddresses.fromIPv4BigInteger(bigIntegerIp));
}
}
|
googleapis/google-cloud-java | 36,714 | java-contact-center-insights/proto-google-cloud-contact-center-insights-v1/src/main/java/com/google/cloud/contactcenterinsights/v1/ListPhraseMatchersRequest.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/contactcenterinsights/v1/contact_center_insights.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.contactcenterinsights.v1;
/**
*
*
* <pre>
* Request to list phrase matchers.
* </pre>
*
* Protobuf type {@code google.cloud.contactcenterinsights.v1.ListPhraseMatchersRequest}
*/
public final class ListPhraseMatchersRequest extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.contactcenterinsights.v1.ListPhraseMatchersRequest)
ListPhraseMatchersRequestOrBuilder {
private static final long serialVersionUID = 0L;
// Use ListPhraseMatchersRequest.newBuilder() to construct.
private ListPhraseMatchersRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private ListPhraseMatchersRequest() {
parent_ = "";
pageToken_ = "";
filter_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new ListPhraseMatchersRequest();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.contactcenterinsights.v1.ContactCenterInsightsProto
.internal_static_google_cloud_contactcenterinsights_v1_ListPhraseMatchersRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.contactcenterinsights.v1.ContactCenterInsightsProto
.internal_static_google_cloud_contactcenterinsights_v1_ListPhraseMatchersRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.contactcenterinsights.v1.ListPhraseMatchersRequest.class,
com.google.cloud.contactcenterinsights.v1.ListPhraseMatchersRequest.Builder.class);
}
public static final int PARENT_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private volatile java.lang.Object parent_ = "";
/**
*
*
* <pre>
* Required. The parent resource of the phrase matcher.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The parent.
*/
@java.lang.Override
public java.lang.String getParent() {
java.lang.Object ref = parent_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
parent_ = s;
return s;
}
}
/**
*
*
* <pre>
* Required. The parent resource of the phrase matcher.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for parent.
*/
@java.lang.Override
public com.google.protobuf.ByteString getParentBytes() {
java.lang.Object ref = parent_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
parent_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int PAGE_SIZE_FIELD_NUMBER = 2;
private int pageSize_ = 0;
/**
*
*
* <pre>
* The maximum number of phrase matchers to return in the response. If this
* value is zero, the service will select a default size. A call might return
* fewer objects than requested. A non-empty `next_page_token` in the response
* indicates that more data is available.
* </pre>
*
* <code>int32 page_size = 2;</code>
*
* @return The pageSize.
*/
@java.lang.Override
public int getPageSize() {
return pageSize_;
}
public static final int PAGE_TOKEN_FIELD_NUMBER = 3;
@SuppressWarnings("serial")
private volatile java.lang.Object pageToken_ = "";
/**
*
*
* <pre>
* The value returned by the last `ListPhraseMatchersResponse`. This value
* indicates that this is a continuation of a prior `ListPhraseMatchers` call
* and that the system should return the next page of data.
* </pre>
*
* <code>string page_token = 3;</code>
*
* @return The pageToken.
*/
@java.lang.Override
public java.lang.String getPageToken() {
java.lang.Object ref = pageToken_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
pageToken_ = s;
return s;
}
}
/**
*
*
* <pre>
* The value returned by the last `ListPhraseMatchersResponse`. This value
* indicates that this is a continuation of a prior `ListPhraseMatchers` call
* and that the system should return the next page of data.
* </pre>
*
* <code>string page_token = 3;</code>
*
* @return The bytes for pageToken.
*/
@java.lang.Override
public com.google.protobuf.ByteString getPageTokenBytes() {
java.lang.Object ref = pageToken_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
pageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int FILTER_FIELD_NUMBER = 4;
@SuppressWarnings("serial")
private volatile java.lang.Object filter_ = "";
/**
*
*
* <pre>
* A filter to reduce results to a specific subset. Useful for querying
* phrase matchers with specific properties.
* </pre>
*
* <code>string filter = 4;</code>
*
* @return The filter.
*/
@java.lang.Override
public java.lang.String getFilter() {
java.lang.Object ref = filter_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
filter_ = s;
return s;
}
}
/**
*
*
* <pre>
* A filter to reduce results to a specific subset. Useful for querying
* phrase matchers with specific properties.
* </pre>
*
* <code>string filter = 4;</code>
*
* @return The bytes for filter.
*/
@java.lang.Override
public com.google.protobuf.ByteString getFilterBytes() {
java.lang.Object ref = filter_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
filter_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 1, parent_);
}
if (pageSize_ != 0) {
output.writeInt32(2, pageSize_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(pageToken_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 3, pageToken_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(filter_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 4, filter_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, parent_);
}
if (pageSize_ != 0) {
size += com.google.protobuf.CodedOutputStream.computeInt32Size(2, pageSize_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(pageToken_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(3, pageToken_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(filter_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(4, filter_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.contactcenterinsights.v1.ListPhraseMatchersRequest)) {
return super.equals(obj);
}
com.google.cloud.contactcenterinsights.v1.ListPhraseMatchersRequest other =
(com.google.cloud.contactcenterinsights.v1.ListPhraseMatchersRequest) obj;
if (!getParent().equals(other.getParent())) return false;
if (getPageSize() != other.getPageSize()) return false;
if (!getPageToken().equals(other.getPageToken())) return false;
if (!getFilter().equals(other.getFilter())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + PARENT_FIELD_NUMBER;
hash = (53 * hash) + getParent().hashCode();
hash = (37 * hash) + PAGE_SIZE_FIELD_NUMBER;
hash = (53 * hash) + getPageSize();
hash = (37 * hash) + PAGE_TOKEN_FIELD_NUMBER;
hash = (53 * hash) + getPageToken().hashCode();
hash = (37 * hash) + FILTER_FIELD_NUMBER;
hash = (53 * hash) + getFilter().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.contactcenterinsights.v1.ListPhraseMatchersRequest parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.contactcenterinsights.v1.ListPhraseMatchersRequest parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.contactcenterinsights.v1.ListPhraseMatchersRequest parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.contactcenterinsights.v1.ListPhraseMatchersRequest parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.contactcenterinsights.v1.ListPhraseMatchersRequest parseFrom(
byte[] data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.contactcenterinsights.v1.ListPhraseMatchersRequest parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.contactcenterinsights.v1.ListPhraseMatchersRequest parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.contactcenterinsights.v1.ListPhraseMatchersRequest parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.contactcenterinsights.v1.ListPhraseMatchersRequest
parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.contactcenterinsights.v1.ListPhraseMatchersRequest
parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.contactcenterinsights.v1.ListPhraseMatchersRequest parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.contactcenterinsights.v1.ListPhraseMatchersRequest parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.contactcenterinsights.v1.ListPhraseMatchersRequest prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Request to list phrase matchers.
* </pre>
*
* Protobuf type {@code google.cloud.contactcenterinsights.v1.ListPhraseMatchersRequest}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.contactcenterinsights.v1.ListPhraseMatchersRequest)
com.google.cloud.contactcenterinsights.v1.ListPhraseMatchersRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.contactcenterinsights.v1.ContactCenterInsightsProto
.internal_static_google_cloud_contactcenterinsights_v1_ListPhraseMatchersRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.contactcenterinsights.v1.ContactCenterInsightsProto
.internal_static_google_cloud_contactcenterinsights_v1_ListPhraseMatchersRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.contactcenterinsights.v1.ListPhraseMatchersRequest.class,
com.google.cloud.contactcenterinsights.v1.ListPhraseMatchersRequest.Builder.class);
}
// Construct using
// com.google.cloud.contactcenterinsights.v1.ListPhraseMatchersRequest.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
parent_ = "";
pageSize_ = 0;
pageToken_ = "";
filter_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.contactcenterinsights.v1.ContactCenterInsightsProto
.internal_static_google_cloud_contactcenterinsights_v1_ListPhraseMatchersRequest_descriptor;
}
@java.lang.Override
public com.google.cloud.contactcenterinsights.v1.ListPhraseMatchersRequest
getDefaultInstanceForType() {
return com.google.cloud.contactcenterinsights.v1.ListPhraseMatchersRequest
.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.contactcenterinsights.v1.ListPhraseMatchersRequest build() {
com.google.cloud.contactcenterinsights.v1.ListPhraseMatchersRequest result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.contactcenterinsights.v1.ListPhraseMatchersRequest buildPartial() {
com.google.cloud.contactcenterinsights.v1.ListPhraseMatchersRequest result =
new com.google.cloud.contactcenterinsights.v1.ListPhraseMatchersRequest(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(
com.google.cloud.contactcenterinsights.v1.ListPhraseMatchersRequest result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.parent_ = parent_;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.pageSize_ = pageSize_;
}
if (((from_bitField0_ & 0x00000004) != 0)) {
result.pageToken_ = pageToken_;
}
if (((from_bitField0_ & 0x00000008) != 0)) {
result.filter_ = filter_;
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.contactcenterinsights.v1.ListPhraseMatchersRequest) {
return mergeFrom(
(com.google.cloud.contactcenterinsights.v1.ListPhraseMatchersRequest) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(
com.google.cloud.contactcenterinsights.v1.ListPhraseMatchersRequest other) {
if (other
== com.google.cloud.contactcenterinsights.v1.ListPhraseMatchersRequest
.getDefaultInstance()) return this;
if (!other.getParent().isEmpty()) {
parent_ = other.parent_;
bitField0_ |= 0x00000001;
onChanged();
}
if (other.getPageSize() != 0) {
setPageSize(other.getPageSize());
}
if (!other.getPageToken().isEmpty()) {
pageToken_ = other.pageToken_;
bitField0_ |= 0x00000004;
onChanged();
}
if (!other.getFilter().isEmpty()) {
filter_ = other.filter_;
bitField0_ |= 0x00000008;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
parent_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000001;
break;
} // case 10
case 16:
{
pageSize_ = input.readInt32();
bitField0_ |= 0x00000002;
break;
} // case 16
case 26:
{
pageToken_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000004;
break;
} // case 26
case 34:
{
filter_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000008;
break;
} // case 34
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.lang.Object parent_ = "";
/**
*
*
* <pre>
* Required. The parent resource of the phrase matcher.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The parent.
*/
public java.lang.String getParent() {
java.lang.Object ref = parent_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
parent_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Required. The parent resource of the phrase matcher.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for parent.
*/
public com.google.protobuf.ByteString getParentBytes() {
java.lang.Object ref = parent_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
parent_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Required. The parent resource of the phrase matcher.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The parent to set.
* @return This builder for chaining.
*/
public Builder setParent(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
parent_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The parent resource of the phrase matcher.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return This builder for chaining.
*/
public Builder clearParent() {
parent_ = getDefaultInstance().getParent();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The parent resource of the phrase matcher.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The bytes for parent to set.
* @return This builder for chaining.
*/
public Builder setParentBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
parent_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
private int pageSize_;
/**
*
*
* <pre>
* The maximum number of phrase matchers to return in the response. If this
* value is zero, the service will select a default size. A call might return
* fewer objects than requested. A non-empty `next_page_token` in the response
* indicates that more data is available.
* </pre>
*
* <code>int32 page_size = 2;</code>
*
* @return The pageSize.
*/
@java.lang.Override
public int getPageSize() {
return pageSize_;
}
/**
*
*
* <pre>
* The maximum number of phrase matchers to return in the response. If this
* value is zero, the service will select a default size. A call might return
* fewer objects than requested. A non-empty `next_page_token` in the response
* indicates that more data is available.
* </pre>
*
* <code>int32 page_size = 2;</code>
*
* @param value The pageSize to set.
* @return This builder for chaining.
*/
public Builder setPageSize(int value) {
pageSize_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* The maximum number of phrase matchers to return in the response. If this
* value is zero, the service will select a default size. A call might return
* fewer objects than requested. A non-empty `next_page_token` in the response
* indicates that more data is available.
* </pre>
*
* <code>int32 page_size = 2;</code>
*
* @return This builder for chaining.
*/
public Builder clearPageSize() {
bitField0_ = (bitField0_ & ~0x00000002);
pageSize_ = 0;
onChanged();
return this;
}
private java.lang.Object pageToken_ = "";
/**
*
*
* <pre>
* The value returned by the last `ListPhraseMatchersResponse`. This value
* indicates that this is a continuation of a prior `ListPhraseMatchers` call
* and that the system should return the next page of data.
* </pre>
*
* <code>string page_token = 3;</code>
*
* @return The pageToken.
*/
public java.lang.String getPageToken() {
java.lang.Object ref = pageToken_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
pageToken_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* The value returned by the last `ListPhraseMatchersResponse`. This value
* indicates that this is a continuation of a prior `ListPhraseMatchers` call
* and that the system should return the next page of data.
* </pre>
*
* <code>string page_token = 3;</code>
*
* @return The bytes for pageToken.
*/
public com.google.protobuf.ByteString getPageTokenBytes() {
java.lang.Object ref = pageToken_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
pageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* The value returned by the last `ListPhraseMatchersResponse`. This value
* indicates that this is a continuation of a prior `ListPhraseMatchers` call
* and that the system should return the next page of data.
* </pre>
*
* <code>string page_token = 3;</code>
*
* @param value The pageToken to set.
* @return This builder for chaining.
*/
public Builder setPageToken(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
pageToken_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
*
*
* <pre>
* The value returned by the last `ListPhraseMatchersResponse`. This value
* indicates that this is a continuation of a prior `ListPhraseMatchers` call
* and that the system should return the next page of data.
* </pre>
*
* <code>string page_token = 3;</code>
*
* @return This builder for chaining.
*/
public Builder clearPageToken() {
pageToken_ = getDefaultInstance().getPageToken();
bitField0_ = (bitField0_ & ~0x00000004);
onChanged();
return this;
}
/**
*
*
* <pre>
* The value returned by the last `ListPhraseMatchersResponse`. This value
* indicates that this is a continuation of a prior `ListPhraseMatchers` call
* and that the system should return the next page of data.
* </pre>
*
* <code>string page_token = 3;</code>
*
* @param value The bytes for pageToken to set.
* @return This builder for chaining.
*/
public Builder setPageTokenBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
pageToken_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
private java.lang.Object filter_ = "";
/**
*
*
* <pre>
* A filter to reduce results to a specific subset. Useful for querying
* phrase matchers with specific properties.
* </pre>
*
* <code>string filter = 4;</code>
*
* @return The filter.
*/
public java.lang.String getFilter() {
java.lang.Object ref = filter_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
filter_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* A filter to reduce results to a specific subset. Useful for querying
* phrase matchers with specific properties.
* </pre>
*
* <code>string filter = 4;</code>
*
* @return The bytes for filter.
*/
public com.google.protobuf.ByteString getFilterBytes() {
java.lang.Object ref = filter_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
filter_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* A filter to reduce results to a specific subset. Useful for querying
* phrase matchers with specific properties.
* </pre>
*
* <code>string filter = 4;</code>
*
* @param value The filter to set.
* @return This builder for chaining.
*/
public Builder setFilter(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
filter_ = value;
bitField0_ |= 0x00000008;
onChanged();
return this;
}
/**
*
*
* <pre>
* A filter to reduce results to a specific subset. Useful for querying
* phrase matchers with specific properties.
* </pre>
*
* <code>string filter = 4;</code>
*
* @return This builder for chaining.
*/
public Builder clearFilter() {
filter_ = getDefaultInstance().getFilter();
bitField0_ = (bitField0_ & ~0x00000008);
onChanged();
return this;
}
/**
*
*
* <pre>
* A filter to reduce results to a specific subset. Useful for querying
* phrase matchers with specific properties.
* </pre>
*
* <code>string filter = 4;</code>
*
* @param value The bytes for filter to set.
* @return This builder for chaining.
*/
public Builder setFilterBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
filter_ = value;
bitField0_ |= 0x00000008;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.contactcenterinsights.v1.ListPhraseMatchersRequest)
}
// @@protoc_insertion_point(class_scope:google.cloud.contactcenterinsights.v1.ListPhraseMatchersRequest)
private static final com.google.cloud.contactcenterinsights.v1.ListPhraseMatchersRequest
DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.contactcenterinsights.v1.ListPhraseMatchersRequest();
}
public static com.google.cloud.contactcenterinsights.v1.ListPhraseMatchersRequest
getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<ListPhraseMatchersRequest> PARSER =
new com.google.protobuf.AbstractParser<ListPhraseMatchersRequest>() {
@java.lang.Override
public ListPhraseMatchersRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<ListPhraseMatchersRequest> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<ListPhraseMatchersRequest> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.contactcenterinsights.v1.ListPhraseMatchersRequest
getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
apache/hop | 36,821 | plugins/transforms/textfile/src/main/java/org/apache/hop/pipeline/transforms/textfileoutput/TextFileOutput.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hop.pipeline.transforms.textfileoutput;
import java.io.BufferedOutputStream;
import java.io.IOException;
import java.io.OutputStream;
import java.io.UnsupportedEncodingException;
import java.util.ArrayList;
import java.util.Date;
import java.util.List;
import org.apache.commons.vfs2.FileObject;
import org.apache.hop.core.Const;
import org.apache.hop.core.ResultFile;
import org.apache.hop.core.compress.CompressionOutputStream;
import org.apache.hop.core.compress.CompressionProviderFactory;
import org.apache.hop.core.compress.ICompressionProvider;
import org.apache.hop.core.compress.zip.ZipCompressionProvider;
import org.apache.hop.core.exception.HopException;
import org.apache.hop.core.exception.HopFileException;
import org.apache.hop.core.exception.HopTransformException;
import org.apache.hop.core.exception.HopValueException;
import org.apache.hop.core.fileinput.CharsetToolkit;
import org.apache.hop.core.row.IRowMeta;
import org.apache.hop.core.row.IValueMeta;
import org.apache.hop.core.util.Utils;
import org.apache.hop.core.variables.IVariables;
import org.apache.hop.core.vfs.HopVfs;
import org.apache.hop.i18n.BaseMessages;
import org.apache.hop.pipeline.Pipeline;
import org.apache.hop.pipeline.PipelineMeta;
import org.apache.hop.pipeline.transform.BaseTransform;
import org.apache.hop.pipeline.transform.TransformMeta;
/** Converts input rows to text and then writes this text to one or more files. */
public class TextFileOutput<Meta extends TextFileOutputMeta, Data extends TextFileOutputData>
extends BaseTransform<Meta, Data> {
private static final Class<?> PKG = TextFileOutputMeta.class;
private static final String FILE_COMPRESSION_TYPE_NONE =
TextFileOutputMeta.fileCompressionTypeCodes[TextFileOutputMeta.FILE_COMPRESSION_TYPE_NONE];
public static final String CONST_ERROR_OPENING_NEW_FILE = "Error opening new file : ";
public TextFileOutput(
TransformMeta transformMeta,
Meta meta,
Data data,
int copyNr,
PipelineMeta pipelineMeta,
Pipeline pipeline) {
super(transformMeta, meta, data, copyNr, pipelineMeta, pipeline);
}
private void initFieldNumbers(IRowMeta outputRowMeta, TextFileField[] outputFields)
throws HopException {
data.fieldnrs = new int[outputFields.length];
for (int i = 0; i < outputFields.length; i++) {
data.fieldnrs[i] = outputRowMeta.indexOfValue(outputFields[i].getName());
if (data.fieldnrs[i] < 0) {
throw new HopTransformException(
"Field [" + outputFields[i].getName() + "] couldn't be found in the input stream!");
}
}
}
public boolean isFileExists(String filename) throws HopException {
try {
return getFileObject(filename, this).exists();
} catch (Exception e) {
throw new HopException(CONST_ERROR_OPENING_NEW_FILE + e.toString());
}
}
public boolean isFileEmpty(String filename) throws HopException {
try {
return getFileObject(filename, this).getContent().getSize() == 0;
} catch (Exception e) {
throw new HopException(CONST_ERROR_OPENING_NEW_FILE + e.toString());
}
}
private ICompressionProvider getCompressionProvider() throws HopException {
String compressionType = meta.getFileCompression();
if (Utils.isEmpty(compressionType)) {
compressionType = FILE_COMPRESSION_TYPE_NONE;
}
ICompressionProvider compressionProvider =
CompressionProviderFactory.getInstance().getCompressionProviderByName(compressionType);
if (compressionProvider == null) {
throw new HopException("No compression provider found with name = " + compressionType);
}
if (!compressionProvider.supportsOutput()) {
throw new HopException(
"Compression provider " + compressionType + " does not support output streams!");
}
return compressionProvider;
}
public void initFileStreamWriter(String filename) throws HopException {
data.writer = null;
try {
TextFileOutputData.FileStream fileStreams = null;
try {
if (data.splitEvery > 0) {
if (filename.equals(data.getFileStreamsCollection().getLastFileName())) {
fileStreams = data.getFileStreamsCollection().getLastStream();
}
} else {
fileStreams = data.getFileStreamsCollection().getStream(filename);
}
boolean writingToFileForFirstTime = fileStreams == null;
if (writingToFileForFirstTime) { // Opening file for first time
if (meta.isAddToResultFiles()) {
// Add this to the result file names...
ResultFile resultFile =
new ResultFile(
ResultFile.FILE_TYPE_GENERAL,
getFileObject(filename, this),
getPipelineMeta().getName(),
getTransformName());
resultFile.setComment(BaseMessages.getString(PKG, "TextFileOutput.AddResultFile"));
addResultFile(resultFile);
}
ICompressionProvider compressionProvider = getCompressionProvider();
boolean isZipFile = compressionProvider instanceof ZipCompressionProvider;
boolean createParentDirIfNotExists = meta.isCreateParentFolder();
boolean appendToExistingFile = meta.isFileAppended();
if (appendToExistingFile && isZipFile && isFileExists(filename)) {
throw new HopException("Can not append to an existing zip file : " + filename);
}
int maxOpenFiles = getMaxOpenFiles();
if ((maxOpenFiles > 0)
&& (data.getFileStreamsCollection().getNumOpenFiles() >= maxOpenFiles)) {
// If the file we're going to close is a zip file, going to remove it from the
// collection of files
// that have been opened. We do this because it is not possible to reopen a
// zip file for append. By removing it from the collection, if the same file is
// referenced later, it will look
// like we're opening the file for the first time, and if we're set up to append to
// existing files it will cause and
// exception to be thrown, which is the desired result.
data.getFileStreamsCollection().closeOldestOpenFile(isZipFile);
}
if (createParentDirIfNotExists
&& ((data.getFileStreamsCollection().size() == 0) || meta.isFileNameInField())) {
createParentFolder(filename);
}
if (isDetailed()) {
logDetailed("Opening output stream using provider: " + compressionProvider.getName());
}
OutputStream fileOutputStream =
getOutputStream(filename, this, !isZipFile && appendToExistingFile);
CompressionOutputStream compressionOutputStream =
compressionProvider.createOutputStream(fileOutputStream);
// The compression output stream may also archive entries. For this we create the filename
// (with appropriate extension) and add it as an entry to the output stream. For providers
// that do not archive entries, they should use the default no-op implementation.
compressionOutputStream.addEntry(filename, resolve(meta.getExtension()));
if (isDetailed()) {
if (!Utils.isEmpty(meta.getEncoding())) {
logDetailed("Opening output stream in encoding: " + meta.getEncoding());
} else {
logDetailed("Opening output stream in default encoding");
}
}
BufferedOutputStream bufferedOutputStream =
new BufferedOutputStream(compressionOutputStream, 5000);
fileStreams =
data.new FileStream(fileOutputStream, compressionOutputStream, bufferedOutputStream);
data.getFileStreamsCollection().add(filename, fileStreams);
if (isDetailed()) {
logDetailed("Opened new file with name [" + HopVfs.getFriendlyURI(filename) + "]");
}
} else if (fileStreams.getBufferedOutputStream()
== null) { // File was previously opened and now needs to be reopened.
int maxOpenFiles = getMaxOpenFiles();
if ((maxOpenFiles > 0)
&& (data.getFileStreamsCollection().getNumOpenFiles() >= maxOpenFiles)) {
data.getFileStreamsCollection().closeOldestOpenFile(false);
}
OutputStream fileOutputStream = getOutputStream(filename, this, true);
ICompressionProvider compressionProvider = getCompressionProvider();
CompressionOutputStream compressionOutputStream =
compressionProvider.createOutputStream(fileOutputStream);
compressionOutputStream.addEntry(filename, resolve(meta.getExtension()));
BufferedOutputStream bufferedOutputStream =
new BufferedOutputStream(compressionOutputStream, 5000);
fileStreams.setFileOutputStream(fileOutputStream);
fileStreams.setCompressedOutputStream(compressionOutputStream);
fileStreams.setBufferedOutputStream(bufferedOutputStream);
}
} catch (Exception e) {
if (!(e instanceof HopException hopException)) {
throw new HopException(CONST_ERROR_OPENING_NEW_FILE + e.toString());
} else {
throw hopException;
}
}
fileStreams.setDirty(true);
data.fos = fileStreams.getFileOutputStream();
data.out = fileStreams.getCompressedOutputStream();
data.writer = fileStreams.getBufferedOutputStream();
} catch (HopException ke) {
throw ke;
} catch (Exception e) {
throw new HopException(CONST_ERROR_OPENING_NEW_FILE + e.toString());
}
}
public String getOutputFileName(Object[] row) throws HopException {
String filename = null;
if (row == null) {
if (data.writer != null) {
filename = data.getFileStreamsCollection().getLastFileName();
} else {
filename = meta.getFileName();
if (filename == null) {
throw new HopFileException(
BaseMessages.getString(PKG, "TextFileOutput.Exception.FileNameNotSet"));
}
filename = buildFilename(resolve(filename), true);
}
} else {
data.fileNameFieldIndex = getInputRowMeta().indexOfValue(meta.getFileNameField());
if (data.fileNameFieldIndex < 0) {
throw new HopTransformException(
BaseMessages.getString(
PKG, "TextFileOutput.Exception.FileNameFieldNotFound", meta.getFileNameField()));
}
data.fileNameMeta = getInputRowMeta().getValueMeta(data.fileNameFieldIndex);
data.fileName = data.fileNameMeta.getString(row[data.fileNameFieldIndex]);
if (data.fileName == null) {
throw new HopFileException(
BaseMessages.getString(PKG, "TextFileOutput.Exception.FileNameNotSet"));
}
filename = buildFilename(resolve(data.fileName), true);
}
return filename;
}
public int getFlushInterval() {
String var = variables.getVariable("HOP_FILE_OUTPUT_MAX_STREAM_LIFE");
int flushInterval = 0;
if (var != null) {
try {
flushInterval = Integer.parseInt(var);
} catch (Exception ex) {
// Do nothing
}
}
return flushInterval;
}
public int getMaxOpenFiles() {
String var = variables.getVariable("HOP_FILE_OUTPUT_MAX_STREAM_COUNT");
int maxStreamCount = 0;
if (var != null) {
try {
maxStreamCount = Integer.parseInt(var);
} catch (Exception ex) {
// Do nothing
}
}
return maxStreamCount;
}
// Warning!!!
// We need to be very particular about how we go about determining whether or not to write a file
// header before writing the row data.
// There are two performance issues in play. 1: Don't hit the file system unnecessarily. 2: Don't
// search the collection of
// file streams unnecessarily. Messing around with this method could have serious performance
// impacts.
public boolean isWriteHeader(String filename) throws HopException {
boolean writingToFileForFirstTime = first;
boolean isWriteHeader = meta.isHeaderEnabled();
if (isWriteHeader) {
if (data.splitEvery > 0) {
writingToFileForFirstTime |=
!filename.equals(data.getFileStreamsCollection().getLastFileName());
} else {
writingToFileForFirstTime |= data.getFileStreamsCollection().getStream(filename) == null;
}
}
isWriteHeader &=
writingToFileForFirstTime
&& (!meta.isFileAppended() || (!isFileExists(filename)) || isFileEmpty(filename));
return isWriteHeader;
}
private boolean writeRowToFile(Object[] row) throws HopException {
if (row != null) {
String filename = getOutputFileName(meta.isFileNameInField() ? row : null);
boolean isWriteHeader = isWriteHeader(filename);
if (data.writer == null || meta.isFileNameInField()) {
initFileStreamWriter(filename);
}
first = false;
if (isWriteHeader) {
writeHeader();
}
// If file has reached max user defined size. Close current file and open a new file.
if (!meta.isFileNameInField()
&& (getLinesOutput() > 0)
&& (data.splitEvery > 0)
&& ((getLinesOutput() + meta.getFooterShift()) % data.splitEvery) == 0) {
// If needed write footer to file before closing it.
if (meta.isFooterEnabled()) {
writeHeader();
}
closeFile(filename);
// Open a new file and write footer if needed.
data.splitnr++;
data.fos = null;
data.out = null;
data.writer = null;
filename = getOutputFileName(null);
isWriteHeader = isWriteHeader(filename);
initFileStreamWriter(filename);
if (isWriteHeader) {
writeHeader();
}
}
writeRow(data.outputRowMeta, row);
putRow(data.outputRowMeta, row); // in case we want it to go further...
if (checkFeedback(getLinesOutput())) {
logBasic("linenr " + getLinesOutput());
}
int flushInterval = getFlushInterval();
if (flushInterval > 0) {
long currentTime = new Date().getTime();
if (data.lastFileFlushTime == 0) {
data.lastFileFlushTime = currentTime;
} else if (data.lastFileFlushTime - currentTime > flushInterval) {
try {
data.getFileStreamsCollection().flushOpenFiles(false);
} catch (IOException e) {
throw new HopException("Unable to flush open files", e);
}
data.lastFileFlushTime = new Date().getTime();
}
}
return true;
} else {
if (data.writer != null) {
if (data.outputRowMeta != null && meta.isFooterEnabled()) {
writeHeader();
}
} else if (!Utils.isEmpty(resolve(meta.getEndedLine())) && !meta.isFileNameInField()) {
String filename = getOutputFileName(null);
initFileStreamWriter(filename);
initBinaryDataFields();
}
if (data.writer != null) {
writeEndedLine();
}
try {
flushOpenFiles(true);
} catch (IOException e) {
throw new HopException("Unable to flush open files", e);
}
setOutputDone();
return false;
}
}
public void flushOpenFiles(boolean closeAfterFlush) throws IOException {
data.getFileStreamsCollection().flushOpenFiles(true);
}
@Override
public synchronized boolean processRow() throws HopException {
if ((meta.getEncoding() == null) || (meta.getEncoding().isEmpty())) {
meta.setEncoding(CharsetToolkit.getDefaultSystemCharset().name());
}
Object[] row = getRow(); // This also waits for a row to be finished.
if (row != null && first) {
data.outputRowMeta = getInputRowMeta().clone();
}
if (first) {
initBinaryDataFields();
if (data.outputRowMeta != null) {
initFieldNumbers(data.outputRowMeta, meta.getOutputFields());
if (row != null) {
meta.getFields(
data.outputRowMeta, getTransformName(), null, null, this, metadataProvider);
}
}
}
return writeRowTo(row);
}
protected boolean writeRowTo(Object[] row) throws HopException {
return writeRowToFile(row);
}
public void writeRow(IRowMeta rowMeta, Object[] r) throws HopTransformException {
try {
if (meta.getOutputFields() == null || meta.getOutputFields().length == 0) {
/*
* Write all values in stream to text file.
*/
for (int i = 0; i < rowMeta.size(); i++) {
if (i > 0 && data.binarySeparator.length > 0) {
data.writer.write(data.binarySeparator);
}
IValueMeta v = rowMeta.getValueMeta(i);
Object valueData = r[i];
// no special null value default was specified since no fields are specified at all
// As such, we pass null
//
writeField(v, valueData, null);
}
data.writer.write(data.binaryNewline);
} else {
/*
* Only write the fields specified!
*/
for (int i = 0; i < meta.getOutputFields().length; i++) {
if (i > 0 && data.binarySeparator.length > 0) {
data.writer.write(data.binarySeparator);
}
IValueMeta v = rowMeta.getValueMeta(data.fieldnrs[i]);
Object valueData = r[data.fieldnrs[i]];
writeField(v, valueData, data.binaryNullValue[i]);
}
data.writer.write(data.binaryNewline);
}
incrementLinesOutput();
} catch (Exception e) {
throw new HopTransformException("Error writing line", e);
}
}
private byte[] formatField(IValueMeta v, Object valueData) throws HopValueException {
if (v.isString()) {
if (v.isStorageBinaryString()
&& v.getTrimType() == IValueMeta.TRIM_TYPE_NONE
&& v.getLength() < 0
&& Utils.isEmpty(v.getStringEncoding())) {
return (byte[]) valueData;
} else {
String svalue =
(valueData instanceof String stringValueData)
? stringValueData
: v.getString(valueData);
return convertStringToBinaryString(v, Const.trimToType(svalue, v.getTrimType()));
}
} else {
return v.getBinaryString(valueData);
}
}
private byte[] convertStringToBinaryString(IValueMeta v, String string) throws HopValueException {
int length = v.getLength();
if (string == null) {
return new byte[] {};
}
if (length > -1 && length < string.length()) {
// we need to truncate
String tmp = string.substring(0, length);
if (Utils.isEmpty(v.getStringEncoding())) {
return tmp.getBytes();
} else {
try {
return tmp.getBytes(v.getStringEncoding());
} catch (UnsupportedEncodingException e) {
throw new HopValueException(
"Unable to convert String to Binary with specified string encoding ["
+ v.getStringEncoding()
+ "]",
e);
}
}
} else {
byte[] text;
if (Utils.isEmpty(meta.getEncoding())) {
text = string.getBytes();
} else {
try {
text = string.getBytes(meta.getEncoding());
} catch (UnsupportedEncodingException e) {
throw new HopValueException(
"Unable to convert String to Binary with specified string encoding ["
+ v.getStringEncoding()
+ "]",
e);
}
}
if (length > string.length()) {
// we need to pad this
// not all encoding use single characters, so we need to cope
// with this.
int size = 0;
byte[] filler = null;
try {
if (!Utils.isEmpty(meta.getEncoding())) {
filler = " ".getBytes(meta.getEncoding());
} else {
filler = " ".getBytes();
}
size = text.length + filler.length * (length - string.length());
} catch (UnsupportedEncodingException uee) {
throw new HopValueException(uee);
}
byte[] bytes = new byte[size];
System.arraycopy(text, 0, bytes, 0, text.length);
if (filler.length == 1) {
java.util.Arrays.fill(bytes, text.length, size, filler[0]);
} else {
int currIndex = text.length;
for (int i = 0; i < (length - string.length()); i++) {
for (int j = 0; j < filler.length; j++) {
bytes[currIndex++] = filler[j];
}
}
}
return bytes;
} else {
// do not need to pad or truncate
return text;
}
}
}
private byte[] getBinaryString(String string) throws HopTransformException {
try {
if (data.hasEncoding) {
return string.getBytes(meta.getEncoding());
} else {
return string.getBytes();
}
} catch (Exception e) {
throw new HopTransformException(e);
}
}
private void writeField(IValueMeta v, Object valueData, byte[] nullString)
throws HopTransformException {
try {
byte[] str;
// First check whether or not we have a null string set
// These values should be set when a null value passes
//
if (nullString != null && v.isNull(valueData)) {
str = nullString;
} else {
if (meta.isFastDump()) {
if (valueData instanceof byte[] bytesValueData) {
str = bytesValueData;
} else {
str = getBinaryString((valueData == null) ? "" : valueData.toString());
}
} else {
str = formatField(v, valueData);
}
}
if (str != null && str.length > 0) {
List<Integer> enclosures = null;
boolean writeEnclosures = false;
if (v.isString()) {
if (meta.isEnclosureForced() && !meta.isPadded()) {
writeEnclosures = true;
} else if (!meta.isEnclosureFixDisabled()
&& containsSeparatorOrEnclosure(str, data.binarySeparator, data.binaryEnclosure)) {
writeEnclosures = true;
}
}
if (writeEnclosures) {
data.writer.write(data.binaryEnclosure);
enclosures = getEnclosurePositions(str);
}
if (enclosures == null) {
data.writer.write(str);
} else {
// Skip the enclosures, double them instead...
int from = 0;
for (int i = 0; i < enclosures.size(); i++) {
int position = enclosures.get(i);
data.writer.write(str, from, position + data.binaryEnclosure.length - from);
data.writer.write(data.binaryEnclosure); // write enclosure a second time
from = position + data.binaryEnclosure.length;
}
if (from < str.length) {
data.writer.write(str, from, str.length - from);
}
}
if (writeEnclosures) {
data.writer.write(data.binaryEnclosure);
}
}
} catch (Exception e) {
throw new HopTransformException("Error writing field content to file", e);
}
}
private List<Integer> getEnclosurePositions(byte[] str) {
List<Integer> positions = null;
if (data.binaryEnclosure != null && data.binaryEnclosure.length > 0) {
// +1 because otherwise we will not find it at the end
for (int i = 0, len = str.length - data.binaryEnclosure.length + 1; i < len; i++) {
// verify if on position i there is an enclosure
//
boolean found = true;
for (int x = 0; found && x < data.binaryEnclosure.length; x++) {
if (str[i + x] != data.binaryEnclosure[x]) {
found = false;
}
}
if (found) {
if (positions == null) {
positions = new ArrayList<>();
}
positions.add(i);
}
}
}
return positions;
}
protected boolean writeEndedLine() {
boolean retval = false;
try {
String sLine = resolve(meta.getEndedLine());
if (sLine != null && !sLine.trim().isEmpty()) {
data.writer.write(getBinaryString(sLine));
incrementLinesOutput();
}
} catch (Exception e) {
logError("Error writing ended tag line: " + e.toString());
logError(Const.getStackTracker(e));
retval = true;
}
return retval;
}
protected boolean writeHeader() {
boolean retval = false;
IRowMeta r = data.outputRowMeta;
try {
// If we have fields specified: list them in this order!
if (meta.getOutputFields() != null && meta.getOutputFields().length > 0) {
for (int i = 0; i < meta.getOutputFields().length; i++) {
String fieldName = meta.getOutputFields()[i].getName();
IValueMeta v = r.searchValueMeta(fieldName);
if (i > 0 && data.binarySeparator.length > 0) {
data.writer.write(data.binarySeparator);
}
boolean writeEnclosure =
(meta.isEnclosureForced()
&& data.binaryEnclosure.length > 0
&& v != null
&& v.isString())
|| ((!meta.isEnclosureFixDisabled()
&& containsSeparatorOrEnclosure(
fieldName.getBytes(), data.binarySeparator, data.binaryEnclosure)));
if (writeEnclosure) {
data.writer.write(data.binaryEnclosure);
}
data.writer.write(getBinaryString(fieldName));
if (writeEnclosure) {
data.writer.write(data.binaryEnclosure);
}
}
data.writer.write(data.binaryNewline);
} else if (r != null) {
// Just put all field names in the header/footer
for (int i = 0; i < r.size(); i++) {
if (i > 0 && data.binarySeparator.length > 0) {
data.writer.write(data.binarySeparator);
}
IValueMeta v = r.getValueMeta(i);
boolean writeEnclosure =
(meta.isEnclosureForced()
&& data.binaryEnclosure.length > 0
&& v != null
&& v.isString())
|| ((!meta.isEnclosureFixDisabled()
&& containsSeparatorOrEnclosure(
v.getName().getBytes(), data.binarySeparator, data.binaryEnclosure)));
if (writeEnclosure) {
data.writer.write(data.binaryEnclosure);
}
data.writer.write(getBinaryString(v.getName()));
if (writeEnclosure) {
data.writer.write(data.binaryEnclosure);
}
}
data.writer.write(data.binaryNewline);
} else {
data.writer.write(getBinaryString("no rows selected" + Const.CR));
}
} catch (Exception e) {
logError("Error writing header line: " + e.toString());
logError(Const.getStackTracker(e));
retval = true;
}
incrementLinesOutput();
return retval;
}
public String buildFilename(String filename, boolean ziparchive) {
return meta.buildFilename(
filename,
meta.getExtension(),
this,
getCopy(),
getPartitionId(),
data.splitnr,
data.isBeamContext(),
getLogChannelId(),
data.getBeamBundleNr(),
ziparchive,
meta);
}
protected boolean closeFile(String filename) {
try {
data.getFileStreamsCollection().closeFile(filename);
} catch (Exception e) {
logError("Exception trying to close file: " + e.toString());
setErrors(1);
return false;
}
return true;
}
protected boolean closeFile() {
boolean retval;
try {
if (data.writer != null) {
data.getFileStreamsCollection().closeStream(data.writer);
}
data.writer = null;
data.out = null;
data.fos = null;
if (isDebug()) {
logDebug("Closing normal file ...");
}
retval = true;
} catch (Exception e) {
logError("Exception trying to close file: " + e.toString());
setErrors(1);
// Clean resources
data.writer = null;
data.out = null;
data.fos = null;
retval = false;
}
return retval;
}
@Override
public boolean init() {
if (super.init()) {
data.splitnr = 0;
// In case user want to create file at first row
// In that case, DO NOT create file at Init
if (!meta.isDoNotOpenNewFileInit() && !meta.isFileNameInField()) {
try {
initOutput();
} catch (Exception e) {
logError(
"Couldn't open file "
+ HopVfs.getFriendlyURI(getParentVariables().resolve(meta.getFileName()))
+ "."
+ getParentVariables().resolve(meta.getExtension()),
e);
setErrors(1L);
stopAll();
}
}
try {
initBinaryDataFields();
} catch (Exception e) {
logError("Couldn't initialize binary data fields", e);
setErrors(1L);
stopAll();
}
return true;
}
return false;
}
protected void initOutput() throws HopException {
String filename = getOutputFileName(null);
initFileStreamWriter(filename);
}
protected void initBinaryDataFields() throws HopException {
try {
data.hasEncoding = !Utils.isEmpty(meta.getEncoding());
data.binarySeparator = new byte[] {};
data.binaryEnclosure = new byte[] {};
data.binaryNewline = new byte[] {};
if (data.hasEncoding) {
if (!Utils.isEmpty(meta.getSeparator())) {
data.binarySeparator = resolve(meta.getSeparator()).getBytes(meta.getEncoding());
}
if (!Utils.isEmpty(meta.getEnclosure())) {
data.binaryEnclosure = resolve(meta.getEnclosure()).getBytes(meta.getEncoding());
}
if (!Utils.isEmpty(meta.getNewline())) {
data.binaryNewline = meta.getNewline().getBytes(meta.getEncoding());
}
} else {
if (!Utils.isEmpty(meta.getSeparator())) {
data.binarySeparator = resolve(meta.getSeparator()).getBytes();
}
if (!Utils.isEmpty(meta.getEnclosure())) {
data.binaryEnclosure = resolve(meta.getEnclosure()).getBytes();
}
if (!Utils.isEmpty(meta.getNewline())) {
data.binaryNewline = resolve(meta.getNewline()).getBytes();
}
}
data.binaryNullValue = new byte[meta.getOutputFields().length][];
for (int i = 0; i < meta.getOutputFields().length; i++) {
data.binaryNullValue[i] = null;
String nullString = meta.getOutputFields()[i].getNullString();
if (!Utils.isEmpty(nullString)) {
if (data.hasEncoding) {
data.binaryNullValue[i] = nullString.getBytes(meta.getEncoding());
} else {
data.binaryNullValue[i] = nullString.getBytes();
}
}
}
data.splitEvery = meta.getSplitEvery(variables);
} catch (Exception e) {
throw new HopException("Unexpected error while encoding binary fields", e);
}
}
protected void close() throws IOException {
if (!meta.isServletOutput()) {
data.getFileStreamsCollection().flushOpenFiles(true);
data.writer = null;
}
}
@Override
public void dispose() {
try {
close();
} catch (Exception e) {
logError("Unexpected error closing file", e);
setErrors(1);
}
data.writer = null;
data.out = null;
data.fos = null;
super.dispose();
}
public boolean containsSeparatorOrEnclosure(byte[] source, byte[] separator, byte[] enclosure) {
boolean result = false;
boolean enclosureExists = enclosure != null && enclosure.length > 0;
boolean separatorExists = separator != null && separator.length > 0;
// Skip entire test if neither separator nor enclosure exist
if (separatorExists || enclosureExists) {
// Search for the first occurrence of the separator or enclosure
for (int index = 0; !result && index < source.length; index++) {
if (enclosureExists && source[index] == enclosure[0]) {
// Potential match found, make sure there are enough bytes to support a full match
if (index + enclosure.length <= source.length) {
// First byte of enclosure found
result = true; // Assume match
for (int i = 1; i < enclosure.length; i++) {
if (source[index + i] != enclosure[i]) {
// Enclosure match is proven false
result = false;
break;
}
}
}
} else if (separatorExists
&& source[index] == separator[0]
&& index + separator.length <= source.length) {
// Potential match found, make sure there are enough bytes to support a full match
// First byte of separator found
result = true; // Assume match
for (int i = 1; i < separator.length; i++) {
if (source[index + i] != separator[i]) {
// Separator match is proven false
result = false;
break;
}
}
}
}
}
return result;
}
private void createParentFolder(String filename) throws Exception {
// Check for parent folder
FileObject parentfolder = null;
try {
// Get parent folder
parentfolder = getFileObject(filename, variables).getParent();
if (parentfolder.exists()) {
if (isDetailed()) {
logDetailed(
BaseMessages.getString(
PKG,
"TextFileOutput.Log.ParentFolderExist",
HopVfs.getFriendlyURI(parentfolder)));
}
} else {
if (isDetailed()) {
logDetailed(
BaseMessages.getString(
PKG,
"TextFileOutput.Log.ParentFolderNotExist",
HopVfs.getFriendlyURI(parentfolder)));
}
if (meta.isCreateParentFolder()) {
parentfolder.createFolder();
if (isDetailed()) {
logDetailed(
BaseMessages.getString(
PKG,
"TextFileOutput.Log.ParentFolderCreated",
HopVfs.getFriendlyURI(parentfolder)));
}
} else {
throw new HopException(
BaseMessages.getString(
PKG,
"TextFileOutput.Log.ParentFolderNotExistCreateIt",
HopVfs.getFriendlyURI(parentfolder),
HopVfs.getFriendlyURI(filename)));
}
}
} finally {
if (parentfolder != null) {
try {
parentfolder.close();
} catch (Exception ex) {
// Ignore
}
}
}
}
protected FileObject getFileObject(String vfsFilename) throws HopFileException {
return HopVfs.getFileObject(vfsFilename, variables);
}
protected FileObject getFileObject(String vfsFilename, IVariables variables)
throws HopFileException {
return HopVfs.getFileObject(vfsFilename, variables);
}
protected OutputStream getOutputStream(String vfsFilename, IVariables variables, boolean append)
throws HopFileException {
return HopVfs.getOutputStream(vfsFilename, append, variables);
}
@Override
public void startBundle() {
// Do Nothing
}
@Override
public void batchComplete() throws HopException {
if (!data.isBeamContext()) {
try {
close();
} catch (IOException e) {
throw new HopException("Error closing file(s)", e);
}
}
}
@Override
public void finishBundle() throws HopException {
try {
close();
} catch (IOException e) {
throw new HopException("Error closing file(s)", e);
}
}
}
|
googleapis/google-cloud-java | 36,772 | java-apigee-registry/proto-google-cloud-apigee-registry-v1/src/main/java/com/google/cloud/apigeeregistry/v1/ListArtifactsResponse.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/apigeeregistry/v1/registry_service.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.apigeeregistry.v1;
/**
*
*
* <pre>
* Response message for ListArtifacts.
* </pre>
*
* Protobuf type {@code google.cloud.apigeeregistry.v1.ListArtifactsResponse}
*/
public final class ListArtifactsResponse extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.apigeeregistry.v1.ListArtifactsResponse)
ListArtifactsResponseOrBuilder {
private static final long serialVersionUID = 0L;
// Use ListArtifactsResponse.newBuilder() to construct.
private ListArtifactsResponse(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private ListArtifactsResponse() {
artifacts_ = java.util.Collections.emptyList();
nextPageToken_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new ListArtifactsResponse();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.apigeeregistry.v1.RegistryServiceProto
.internal_static_google_cloud_apigeeregistry_v1_ListArtifactsResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.apigeeregistry.v1.RegistryServiceProto
.internal_static_google_cloud_apigeeregistry_v1_ListArtifactsResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.apigeeregistry.v1.ListArtifactsResponse.class,
com.google.cloud.apigeeregistry.v1.ListArtifactsResponse.Builder.class);
}
public static final int ARTIFACTS_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private java.util.List<com.google.cloud.apigeeregistry.v1.Artifact> artifacts_;
/**
*
*
* <pre>
* The artifacts from the specified publisher.
* </pre>
*
* <code>repeated .google.cloud.apigeeregistry.v1.Artifact artifacts = 1;</code>
*/
@java.lang.Override
public java.util.List<com.google.cloud.apigeeregistry.v1.Artifact> getArtifactsList() {
return artifacts_;
}
/**
*
*
* <pre>
* The artifacts from the specified publisher.
* </pre>
*
* <code>repeated .google.cloud.apigeeregistry.v1.Artifact artifacts = 1;</code>
*/
@java.lang.Override
public java.util.List<? extends com.google.cloud.apigeeregistry.v1.ArtifactOrBuilder>
getArtifactsOrBuilderList() {
return artifacts_;
}
/**
*
*
* <pre>
* The artifacts from the specified publisher.
* </pre>
*
* <code>repeated .google.cloud.apigeeregistry.v1.Artifact artifacts = 1;</code>
*/
@java.lang.Override
public int getArtifactsCount() {
return artifacts_.size();
}
/**
*
*
* <pre>
* The artifacts from the specified publisher.
* </pre>
*
* <code>repeated .google.cloud.apigeeregistry.v1.Artifact artifacts = 1;</code>
*/
@java.lang.Override
public com.google.cloud.apigeeregistry.v1.Artifact getArtifacts(int index) {
return artifacts_.get(index);
}
/**
*
*
* <pre>
* The artifacts from the specified publisher.
* </pre>
*
* <code>repeated .google.cloud.apigeeregistry.v1.Artifact artifacts = 1;</code>
*/
@java.lang.Override
public com.google.cloud.apigeeregistry.v1.ArtifactOrBuilder getArtifactsOrBuilder(int index) {
return artifacts_.get(index);
}
public static final int NEXT_PAGE_TOKEN_FIELD_NUMBER = 2;
@SuppressWarnings("serial")
private volatile java.lang.Object nextPageToken_ = "";
/**
*
*
* <pre>
* A token, which can be sent as `page_token` to retrieve the next page.
* If this field is omitted, there are no subsequent pages.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The nextPageToken.
*/
@java.lang.Override
public java.lang.String getNextPageToken() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
nextPageToken_ = s;
return s;
}
}
/**
*
*
* <pre>
* A token, which can be sent as `page_token` to retrieve the next page.
* If this field is omitted, there are no subsequent pages.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The bytes for nextPageToken.
*/
@java.lang.Override
public com.google.protobuf.ByteString getNextPageTokenBytes() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
nextPageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
for (int i = 0; i < artifacts_.size(); i++) {
output.writeMessage(1, artifacts_.get(i));
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 2, nextPageToken_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
for (int i = 0; i < artifacts_.size(); i++) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, artifacts_.get(i));
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, nextPageToken_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.apigeeregistry.v1.ListArtifactsResponse)) {
return super.equals(obj);
}
com.google.cloud.apigeeregistry.v1.ListArtifactsResponse other =
(com.google.cloud.apigeeregistry.v1.ListArtifactsResponse) obj;
if (!getArtifactsList().equals(other.getArtifactsList())) return false;
if (!getNextPageToken().equals(other.getNextPageToken())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (getArtifactsCount() > 0) {
hash = (37 * hash) + ARTIFACTS_FIELD_NUMBER;
hash = (53 * hash) + getArtifactsList().hashCode();
}
hash = (37 * hash) + NEXT_PAGE_TOKEN_FIELD_NUMBER;
hash = (53 * hash) + getNextPageToken().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.apigeeregistry.v1.ListArtifactsResponse parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.apigeeregistry.v1.ListArtifactsResponse parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.apigeeregistry.v1.ListArtifactsResponse parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.apigeeregistry.v1.ListArtifactsResponse parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.apigeeregistry.v1.ListArtifactsResponse parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.apigeeregistry.v1.ListArtifactsResponse parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.apigeeregistry.v1.ListArtifactsResponse parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.apigeeregistry.v1.ListArtifactsResponse parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.apigeeregistry.v1.ListArtifactsResponse parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.apigeeregistry.v1.ListArtifactsResponse parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.apigeeregistry.v1.ListArtifactsResponse parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.apigeeregistry.v1.ListArtifactsResponse parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.apigeeregistry.v1.ListArtifactsResponse prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Response message for ListArtifacts.
* </pre>
*
* Protobuf type {@code google.cloud.apigeeregistry.v1.ListArtifactsResponse}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.apigeeregistry.v1.ListArtifactsResponse)
com.google.cloud.apigeeregistry.v1.ListArtifactsResponseOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.apigeeregistry.v1.RegistryServiceProto
.internal_static_google_cloud_apigeeregistry_v1_ListArtifactsResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.apigeeregistry.v1.RegistryServiceProto
.internal_static_google_cloud_apigeeregistry_v1_ListArtifactsResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.apigeeregistry.v1.ListArtifactsResponse.class,
com.google.cloud.apigeeregistry.v1.ListArtifactsResponse.Builder.class);
}
// Construct using com.google.cloud.apigeeregistry.v1.ListArtifactsResponse.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
if (artifactsBuilder_ == null) {
artifacts_ = java.util.Collections.emptyList();
} else {
artifacts_ = null;
artifactsBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000001);
nextPageToken_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.apigeeregistry.v1.RegistryServiceProto
.internal_static_google_cloud_apigeeregistry_v1_ListArtifactsResponse_descriptor;
}
@java.lang.Override
public com.google.cloud.apigeeregistry.v1.ListArtifactsResponse getDefaultInstanceForType() {
return com.google.cloud.apigeeregistry.v1.ListArtifactsResponse.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.apigeeregistry.v1.ListArtifactsResponse build() {
com.google.cloud.apigeeregistry.v1.ListArtifactsResponse result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.apigeeregistry.v1.ListArtifactsResponse buildPartial() {
com.google.cloud.apigeeregistry.v1.ListArtifactsResponse result =
new com.google.cloud.apigeeregistry.v1.ListArtifactsResponse(this);
buildPartialRepeatedFields(result);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartialRepeatedFields(
com.google.cloud.apigeeregistry.v1.ListArtifactsResponse result) {
if (artifactsBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0)) {
artifacts_ = java.util.Collections.unmodifiableList(artifacts_);
bitField0_ = (bitField0_ & ~0x00000001);
}
result.artifacts_ = artifacts_;
} else {
result.artifacts_ = artifactsBuilder_.build();
}
}
private void buildPartial0(com.google.cloud.apigeeregistry.v1.ListArtifactsResponse result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000002) != 0)) {
result.nextPageToken_ = nextPageToken_;
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.apigeeregistry.v1.ListArtifactsResponse) {
return mergeFrom((com.google.cloud.apigeeregistry.v1.ListArtifactsResponse) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.apigeeregistry.v1.ListArtifactsResponse other) {
if (other == com.google.cloud.apigeeregistry.v1.ListArtifactsResponse.getDefaultInstance())
return this;
if (artifactsBuilder_ == null) {
if (!other.artifacts_.isEmpty()) {
if (artifacts_.isEmpty()) {
artifacts_ = other.artifacts_;
bitField0_ = (bitField0_ & ~0x00000001);
} else {
ensureArtifactsIsMutable();
artifacts_.addAll(other.artifacts_);
}
onChanged();
}
} else {
if (!other.artifacts_.isEmpty()) {
if (artifactsBuilder_.isEmpty()) {
artifactsBuilder_.dispose();
artifactsBuilder_ = null;
artifacts_ = other.artifacts_;
bitField0_ = (bitField0_ & ~0x00000001);
artifactsBuilder_ =
com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders
? getArtifactsFieldBuilder()
: null;
} else {
artifactsBuilder_.addAllMessages(other.artifacts_);
}
}
}
if (!other.getNextPageToken().isEmpty()) {
nextPageToken_ = other.nextPageToken_;
bitField0_ |= 0x00000002;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
com.google.cloud.apigeeregistry.v1.Artifact m =
input.readMessage(
com.google.cloud.apigeeregistry.v1.Artifact.parser(), extensionRegistry);
if (artifactsBuilder_ == null) {
ensureArtifactsIsMutable();
artifacts_.add(m);
} else {
artifactsBuilder_.addMessage(m);
}
break;
} // case 10
case 18:
{
nextPageToken_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000002;
break;
} // case 18
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.util.List<com.google.cloud.apigeeregistry.v1.Artifact> artifacts_ =
java.util.Collections.emptyList();
private void ensureArtifactsIsMutable() {
if (!((bitField0_ & 0x00000001) != 0)) {
artifacts_ =
new java.util.ArrayList<com.google.cloud.apigeeregistry.v1.Artifact>(artifacts_);
bitField0_ |= 0x00000001;
}
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.apigeeregistry.v1.Artifact,
com.google.cloud.apigeeregistry.v1.Artifact.Builder,
com.google.cloud.apigeeregistry.v1.ArtifactOrBuilder>
artifactsBuilder_;
/**
*
*
* <pre>
* The artifacts from the specified publisher.
* </pre>
*
* <code>repeated .google.cloud.apigeeregistry.v1.Artifact artifacts = 1;</code>
*/
public java.util.List<com.google.cloud.apigeeregistry.v1.Artifact> getArtifactsList() {
if (artifactsBuilder_ == null) {
return java.util.Collections.unmodifiableList(artifacts_);
} else {
return artifactsBuilder_.getMessageList();
}
}
/**
*
*
* <pre>
* The artifacts from the specified publisher.
* </pre>
*
* <code>repeated .google.cloud.apigeeregistry.v1.Artifact artifacts = 1;</code>
*/
public int getArtifactsCount() {
if (artifactsBuilder_ == null) {
return artifacts_.size();
} else {
return artifactsBuilder_.getCount();
}
}
/**
*
*
* <pre>
* The artifacts from the specified publisher.
* </pre>
*
* <code>repeated .google.cloud.apigeeregistry.v1.Artifact artifacts = 1;</code>
*/
public com.google.cloud.apigeeregistry.v1.Artifact getArtifacts(int index) {
if (artifactsBuilder_ == null) {
return artifacts_.get(index);
} else {
return artifactsBuilder_.getMessage(index);
}
}
/**
*
*
* <pre>
* The artifacts from the specified publisher.
* </pre>
*
* <code>repeated .google.cloud.apigeeregistry.v1.Artifact artifacts = 1;</code>
*/
public Builder setArtifacts(int index, com.google.cloud.apigeeregistry.v1.Artifact value) {
if (artifactsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureArtifactsIsMutable();
artifacts_.set(index, value);
onChanged();
} else {
artifactsBuilder_.setMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* The artifacts from the specified publisher.
* </pre>
*
* <code>repeated .google.cloud.apigeeregistry.v1.Artifact artifacts = 1;</code>
*/
public Builder setArtifacts(
int index, com.google.cloud.apigeeregistry.v1.Artifact.Builder builderForValue) {
if (artifactsBuilder_ == null) {
ensureArtifactsIsMutable();
artifacts_.set(index, builderForValue.build());
onChanged();
} else {
artifactsBuilder_.setMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* The artifacts from the specified publisher.
* </pre>
*
* <code>repeated .google.cloud.apigeeregistry.v1.Artifact artifacts = 1;</code>
*/
public Builder addArtifacts(com.google.cloud.apigeeregistry.v1.Artifact value) {
if (artifactsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureArtifactsIsMutable();
artifacts_.add(value);
onChanged();
} else {
artifactsBuilder_.addMessage(value);
}
return this;
}
/**
*
*
* <pre>
* The artifacts from the specified publisher.
* </pre>
*
* <code>repeated .google.cloud.apigeeregistry.v1.Artifact artifacts = 1;</code>
*/
public Builder addArtifacts(int index, com.google.cloud.apigeeregistry.v1.Artifact value) {
if (artifactsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureArtifactsIsMutable();
artifacts_.add(index, value);
onChanged();
} else {
artifactsBuilder_.addMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* The artifacts from the specified publisher.
* </pre>
*
* <code>repeated .google.cloud.apigeeregistry.v1.Artifact artifacts = 1;</code>
*/
public Builder addArtifacts(
com.google.cloud.apigeeregistry.v1.Artifact.Builder builderForValue) {
if (artifactsBuilder_ == null) {
ensureArtifactsIsMutable();
artifacts_.add(builderForValue.build());
onChanged();
} else {
artifactsBuilder_.addMessage(builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* The artifacts from the specified publisher.
* </pre>
*
* <code>repeated .google.cloud.apigeeregistry.v1.Artifact artifacts = 1;</code>
*/
public Builder addArtifacts(
int index, com.google.cloud.apigeeregistry.v1.Artifact.Builder builderForValue) {
if (artifactsBuilder_ == null) {
ensureArtifactsIsMutable();
artifacts_.add(index, builderForValue.build());
onChanged();
} else {
artifactsBuilder_.addMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* The artifacts from the specified publisher.
* </pre>
*
* <code>repeated .google.cloud.apigeeregistry.v1.Artifact artifacts = 1;</code>
*/
public Builder addAllArtifacts(
java.lang.Iterable<? extends com.google.cloud.apigeeregistry.v1.Artifact> values) {
if (artifactsBuilder_ == null) {
ensureArtifactsIsMutable();
com.google.protobuf.AbstractMessageLite.Builder.addAll(values, artifacts_);
onChanged();
} else {
artifactsBuilder_.addAllMessages(values);
}
return this;
}
/**
*
*
* <pre>
* The artifacts from the specified publisher.
* </pre>
*
* <code>repeated .google.cloud.apigeeregistry.v1.Artifact artifacts = 1;</code>
*/
public Builder clearArtifacts() {
if (artifactsBuilder_ == null) {
artifacts_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
} else {
artifactsBuilder_.clear();
}
return this;
}
/**
*
*
* <pre>
* The artifacts from the specified publisher.
* </pre>
*
* <code>repeated .google.cloud.apigeeregistry.v1.Artifact artifacts = 1;</code>
*/
public Builder removeArtifacts(int index) {
if (artifactsBuilder_ == null) {
ensureArtifactsIsMutable();
artifacts_.remove(index);
onChanged();
} else {
artifactsBuilder_.remove(index);
}
return this;
}
/**
*
*
* <pre>
* The artifacts from the specified publisher.
* </pre>
*
* <code>repeated .google.cloud.apigeeregistry.v1.Artifact artifacts = 1;</code>
*/
public com.google.cloud.apigeeregistry.v1.Artifact.Builder getArtifactsBuilder(int index) {
return getArtifactsFieldBuilder().getBuilder(index);
}
/**
*
*
* <pre>
* The artifacts from the specified publisher.
* </pre>
*
* <code>repeated .google.cloud.apigeeregistry.v1.Artifact artifacts = 1;</code>
*/
public com.google.cloud.apigeeregistry.v1.ArtifactOrBuilder getArtifactsOrBuilder(int index) {
if (artifactsBuilder_ == null) {
return artifacts_.get(index);
} else {
return artifactsBuilder_.getMessageOrBuilder(index);
}
}
/**
*
*
* <pre>
* The artifacts from the specified publisher.
* </pre>
*
* <code>repeated .google.cloud.apigeeregistry.v1.Artifact artifacts = 1;</code>
*/
public java.util.List<? extends com.google.cloud.apigeeregistry.v1.ArtifactOrBuilder>
getArtifactsOrBuilderList() {
if (artifactsBuilder_ != null) {
return artifactsBuilder_.getMessageOrBuilderList();
} else {
return java.util.Collections.unmodifiableList(artifacts_);
}
}
/**
*
*
* <pre>
* The artifacts from the specified publisher.
* </pre>
*
* <code>repeated .google.cloud.apigeeregistry.v1.Artifact artifacts = 1;</code>
*/
public com.google.cloud.apigeeregistry.v1.Artifact.Builder addArtifactsBuilder() {
return getArtifactsFieldBuilder()
.addBuilder(com.google.cloud.apigeeregistry.v1.Artifact.getDefaultInstance());
}
/**
*
*
* <pre>
* The artifacts from the specified publisher.
* </pre>
*
* <code>repeated .google.cloud.apigeeregistry.v1.Artifact artifacts = 1;</code>
*/
public com.google.cloud.apigeeregistry.v1.Artifact.Builder addArtifactsBuilder(int index) {
return getArtifactsFieldBuilder()
.addBuilder(index, com.google.cloud.apigeeregistry.v1.Artifact.getDefaultInstance());
}
/**
*
*
* <pre>
* The artifacts from the specified publisher.
* </pre>
*
* <code>repeated .google.cloud.apigeeregistry.v1.Artifact artifacts = 1;</code>
*/
public java.util.List<com.google.cloud.apigeeregistry.v1.Artifact.Builder>
getArtifactsBuilderList() {
return getArtifactsFieldBuilder().getBuilderList();
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.apigeeregistry.v1.Artifact,
com.google.cloud.apigeeregistry.v1.Artifact.Builder,
com.google.cloud.apigeeregistry.v1.ArtifactOrBuilder>
getArtifactsFieldBuilder() {
if (artifactsBuilder_ == null) {
artifactsBuilder_ =
new com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.apigeeregistry.v1.Artifact,
com.google.cloud.apigeeregistry.v1.Artifact.Builder,
com.google.cloud.apigeeregistry.v1.ArtifactOrBuilder>(
artifacts_, ((bitField0_ & 0x00000001) != 0), getParentForChildren(), isClean());
artifacts_ = null;
}
return artifactsBuilder_;
}
private java.lang.Object nextPageToken_ = "";
/**
*
*
* <pre>
* A token, which can be sent as `page_token` to retrieve the next page.
* If this field is omitted, there are no subsequent pages.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The nextPageToken.
*/
public java.lang.String getNextPageToken() {
java.lang.Object ref = nextPageToken_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
nextPageToken_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* A token, which can be sent as `page_token` to retrieve the next page.
* If this field is omitted, there are no subsequent pages.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The bytes for nextPageToken.
*/
public com.google.protobuf.ByteString getNextPageTokenBytes() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
nextPageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* A token, which can be sent as `page_token` to retrieve the next page.
* If this field is omitted, there are no subsequent pages.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @param value The nextPageToken to set.
* @return This builder for chaining.
*/
public Builder setNextPageToken(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
nextPageToken_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* A token, which can be sent as `page_token` to retrieve the next page.
* If this field is omitted, there are no subsequent pages.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return This builder for chaining.
*/
public Builder clearNextPageToken() {
nextPageToken_ = getDefaultInstance().getNextPageToken();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
return this;
}
/**
*
*
* <pre>
* A token, which can be sent as `page_token` to retrieve the next page.
* If this field is omitted, there are no subsequent pages.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @param value The bytes for nextPageToken to set.
* @return This builder for chaining.
*/
public Builder setNextPageTokenBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
nextPageToken_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.apigeeregistry.v1.ListArtifactsResponse)
}
// @@protoc_insertion_point(class_scope:google.cloud.apigeeregistry.v1.ListArtifactsResponse)
private static final com.google.cloud.apigeeregistry.v1.ListArtifactsResponse DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.apigeeregistry.v1.ListArtifactsResponse();
}
public static com.google.cloud.apigeeregistry.v1.ListArtifactsResponse getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<ListArtifactsResponse> PARSER =
new com.google.protobuf.AbstractParser<ListArtifactsResponse>() {
@java.lang.Override
public ListArtifactsResponse parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<ListArtifactsResponse> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<ListArtifactsResponse> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.apigeeregistry.v1.ListArtifactsResponse getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
apache/usergrid | 36,805 | stack/query-validator/src/test/java/org/apache/usergrid/query/validator/users/UserQueryIT.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.usergrid.query.validator.users;
import net.jcip.annotations.NotThreadSafe;
import org.junit.Assert;
import org.junit.BeforeClass;
import org.junit.Ignore;
import org.junit.Test;
import org.apache.usergrid.persistence.Entity;
import org.apache.usergrid.query.validator.AbstractQueryIT;
import org.apache.usergrid.query.validator.QueryRequest;
import org.apache.usergrid.query.validator.QueryResponse;
import org.apache.usergrid.query.validator.QueryResultsMatcher;
import org.apache.usergrid.utils.StringUtils;
import java.util.List;
/**
* @author Sungju Jin
*/
@NotThreadSafe
public class UserQueryIT extends AbstractQueryIT {
@BeforeClass
public static void setDatas() throws InterruptedException{
createInitializationDatas("user");
}
@Test
public void sexEqualAndNameEqual() {
String sqlite = "SELECT * FROM users WHERE sex = 'male' and name = 'judekim' LIMIT 10";
String api = "SELECT * WHERE sex = 'male' AND name = 'judekim'";
QueryRequest request = new QueryRequest();
request.setDbQuery(sqlite);
request.getApiQuery().setQuery(api);
QueryResponse response = validator.execute(request);
Assert.assertTrue(response.toString(), response.result());
}
@Test
public void nameEqualAndSexEqual() {
String sqlite = "SELECT * FROM users WHERE name = 'judekim' and sex = 'male' LIMIT 10";
String api = "select * where name = 'judekim' and sex = 'male'";
QueryRequest request = new QueryRequest();
request.setDbQuery(sqlite);
request.getApiQuery().setQuery(api);
QueryResponse response = validator.execute(request);
Assert.assertTrue(response.toString(), response.result());
}
@Test
public void nameEqualAndSexEqual_limitL20() {
String sqlite = "SELECT * FROM users WHERE name = 'judekim' and sex = 'male' LIMIT 20";
String api = "select * where name = 'judekim' and sex = 'male'";
int limit = 20;
QueryRequest request = new QueryRequest();
request.setDbQuery(sqlite);
request.getApiQuery().setQuery(api);
request.getApiQuery().setLimit(limit);
QueryResponse response = validator.execute(request);
Assert.assertTrue(response.toString(), response.result());
}
@Test
public void sexEqualAndNameEqualExtra1() {
String sqlite = "SELECT * FROM users WHERE sex = 'female' and name = 'curioe' LIMIT 10";
String api = "select * where sex = 'female' and name = 'curioe'";
QueryRequest request = new QueryRequest();
request.setDbQuery(sqlite);
request.getApiQuery().setQuery(api);
QueryResponse response = validator.execute(request);
Assert.assertTrue(response.toString(), response.result());
}
@Test
public void sexEqualOrNameEqual() {
String sqlite = "SELECT * FROM users WHERE sex = 'female' or name = 'curioe' ORDER BY created DESC LIMIT 10";
String api = "select * where sex = 'female' or name = 'curioe' order by created desc";
QueryRequest request = new QueryRequest();
request.setDbQuery(sqlite);
request.getApiQuery().setQuery(api);
QueryResponse response = validator.execute(request);
Assert.assertTrue(response.toString(), response.result());
}
@Test
@Ignore("TODO: uncomment this test when you are ready to fix USERGRID-1314")
public void nameBeginswithAndSexEqualAndAgeGreaterthanequalOrSexEqual_sortNameDesc() {
String sqlite = "SELECT * FROM users WHERE name LIKE 'a%' and sex = 'male' and age >= 35 or sex = 'female' " +
"ORDER BY name desc LIMIT 10";
String api = "select * where name = 'a*' and sex = 'male' and age >= 35 or sex = 'female' " +
"order by name desc";
QueryRequest request = new QueryRequest();
request.setDbQuery(sqlite);
request.getApiQuery().setQuery(api);
QueryResponse response = validator.execute(request);
Assert.assertTrue(response.toString(), response.result());
}
@Test
@Ignore("TODO: uncomment this test when you are ready to fix USERGRID-1314")
public void nameBeginswithAndSexEqualAndAgeGreaterthanequalOrSexEqual_sortAddressAscNameDesc() {
String sqlite = "SELECT * FROM users WHERE name LIKE 'a%' and sex = 'male' and age >= 35 or sex = 'female' " +
"ORDER BY address asc, name desc LIMIT 4";
String api = "select * where name = 'a*' and sex = 'male' and age >= 35 or sex = 'female' " +
"order by address asc, name desc";
QueryRequest request = new QueryRequest();
request.setDbQuery(sqlite);
request.getApiQuery().setQuery(api);
request.getApiQuery().setLimit(4);
QueryResponse response = validator.execute(request);
Assert.assertTrue(response.toString(), response.result());
}
@Test
@Ignore("TODO: uncomment this test when you are ready to fix USERGRID-1314")
public void nameBeginswithAndSexEqualAndAgeGreaterthanequalOrSexEqual_sortAddressAscNameDesc_limitL4() {
String sqlite = "SELECT * FROM users WHERE name LIKE 'a%' and sex = 'male' and age >= 35 or sex = 'female' " +
"ORDER BY address asc, name desc LIMIT 4";
String api = "select * where name = 'a*' and sex = 'male' and age >= 35 or sex = 'female' " +
"order by address asc, name desc";
QueryRequest request = new QueryRequest();
request.setDbQuery(sqlite);
request.getApiQuery().setQuery(api);
request.getApiQuery().setLimit(4);
QueryResponse response = validator.execute(request);
Assert.assertTrue(response.toString(), response.result());
}
@Test
public void sexEqual_sortAgeDescExtra1() {
String sqlite = "SELECT * FROM users WHERE sex = 'male' ORDER BY age desc LIMIT 10";
String api = "select * where sex = 'male' order by age desc";
QueryRequest request = new QueryRequest();
request.setDbQuery(sqlite);
request.getApiQuery().setQuery(api);
QueryResponse response = validator.execute(request, new QueryResultsMatcher() {
@Override
public boolean equals(List<Entity> expectedEntities, List<Entity> actuallyEntities) {
boolean equals = expectedEntities.size() == expectedEntities.size();
if( !equals )
return false;
for(Entity entity : actuallyEntities) {
String sex = (String)entity.getProperty("sex");
if((StringUtils.equals("male",sex)) == false) {
return false;
}
}
return equals;
}
});
Assert.assertTrue(response.toString(), response.result());
}
@Test
public void sexEqual_sortAgeDescExtra2() {
String sqlite = " SELECT * FROM users WHERE sex = 'female' ORDER BY age desc LIMIT 10";
String api = "select * where sex = 'female' order by age desc";
QueryRequest request = new QueryRequest();
request.setDbQuery(sqlite);
request.getApiQuery().setQuery(api);
QueryResponse response = validator.execute(request);
Assert.assertTrue(response.toString(), response.result());
}
@Test
public void sexEqualAndAgeGreaterthanequal() {
String sqlite = " SELECT * FROM users WHERE sex = 'male' and age >= 35 ORDER BY created DESC LIMIT 10";
String api = "select * where sex = 'male' and age >= 35 order by created desc";
QueryRequest request = new QueryRequest();
request.setDbQuery(sqlite);
request.getApiQuery().setQuery(api);
QueryResponse response = validator.execute(request);
Assert.assertTrue(response.toString(), response.result());
}
@Test
public void sexEqualAndAgeGreaterthanequal_sortAgeDesc() {
String sqlite = "SELECT * FROM users WHERE sex = 'male' and age >= 35 ORDER BY age desc LIMIT 10";
String api = "select * where sex = 'male' and age >= 35 order by age desc";
QueryRequest request = new QueryRequest();
request.setDbQuery(sqlite);
request.getApiQuery().setQuery(api);
QueryResponse response = validator.execute(request);
Assert.assertTrue(response.toString(), response.result());
}
@Test
public void sexEqualAndAgeGreaterthanequal_sortNameDesc() {
String sqlite = "SELECT * FROM users WHERE sex = 'male' and age >= 35 ORDER BY name desc LIMIT 10";
String api = "select * where sex = 'male' and age >= 35 order by name desc";
QueryRequest request = new QueryRequest();
request.setDbQuery(sqlite);
request.getApiQuery().setQuery(api);
QueryResponse response = validator.execute(request);
Assert.assertTrue(response.toString(), response.result());
}
@Test
public void sexEqualAndAgeGreaterthanequal_sortAgeDesc_limitL20() {
String sqlite = "SELECT * FROM users WHERE sex = 'male' and age >= 35 ORDER BY age desc LIMIT 20";
String api = "select * where sex = 'male' and age >= 35 order by age desc";
int limit = 20;
QueryRequest request = new QueryRequest();
request.setDbQuery(sqlite);
request.getApiQuery().setQuery(api);
request.getApiQuery().setLimit(limit);
QueryResponse response = validator.execute(request);
Assert.assertTrue(response.toString(), response.result());
}
@Test
public void qlSexEqualAndAgeGreaterthanequal_sortNameDesc_limitL20() {
String sqlite = "SELECT * FROM users WHERE sex = 'male' and age >= 35 ORDER BY name desc LIMIT 20";
String api = "select * where sex = 'male' and age >= 35 order by name desc";
int limit = 20;
QueryRequest request = new QueryRequest();
request.setDbQuery(sqlite);
request.getApiQuery().setQuery(api);
request.getApiQuery().setLimit(limit);
QueryResponse response = validator.execute(request);
Assert.assertTrue(response.toString(), response.result());
}
@Test
public void sexEqualAndAgeGreaterthanequalOrSexEqual() {
String sqlite = "SELECT * FROM users WHERE sex = 'male' and age >= 35 or sex = 'female' LIMIT 10";
String api = "select * where sex = 'male' and age >= 35 or sex = 'female'";
QueryRequest request = new QueryRequest();
request.setDbQuery(sqlite);
request.getApiQuery().setQuery(api);
QueryResponse response = validator.execute(request, new QueryResultsMatcher() {
@Override
public boolean equals(List<Entity> expectedEntities, List<Entity> actuallyEntities) {
boolean equals = expectedEntities.size() == expectedEntities.size();
if( !equals )
return false;
for(Entity entity : actuallyEntities) {
int age = (Integer)entity.getProperty("age");
String sex = (String)entity.getProperty("sex");
if(((StringUtils.equals("male",sex) && age >= 35) || StringUtils.equals("female",sex)) == false) {
return false;
}
}
return equals;
}
});
Assert.assertTrue(response.toString(), response.result());
}
@Test
public void sexEqualAndAgeGreaterthanequalOrSexEqual_sortAgeDesc() {
String sqlite = "SELECT * FROM users WHERE sex = 'male' and age >= 35 or sex = 'female' " +
"ORDER BY age desc LIMIT 10";
String api = "select * where sex = 'male' and age >= 35 or sex = 'female' order by age desc";
QueryRequest request = new QueryRequest();
request.setDbQuery(sqlite);
request.getApiQuery().setQuery(api);
QueryResponse response = validator.execute(request);
Assert.assertTrue(response.toString(), response.result());
}
@Test
public void limitL12() {
String sqlite = "SELECT * FROM users order by created desc LIMIT 12";
String api = null;
int limit = 12;
QueryRequest request = new QueryRequest();
request.setDbQuery(sqlite);
request.getApiQuery().setQuery(api);
request.getApiQuery().setLimit(limit);
QueryResponse response = validator.execute(request);
Assert.assertTrue(response.toString(), response.result());
}
@Test
public void sexEqualAndAgeGreaterthanequalOrSexEqual_sortNameDesc() {
String sqlite = "SELECT * FROM users WHERE sex = 'male' and age >= 35 or sex = 'female' " +
"ORDER BY name desc LIMIT 10";
String api = "select * where sex = 'male' and age >= 35 or sex = 'female' order by name desc";
QueryRequest request = new QueryRequest();
request.setDbQuery(sqlite);
request.getApiQuery().setQuery(api);
QueryResponse response = validator.execute(request);
Assert.assertTrue(response.toString(), response.result());
}
@Test
public void sexEqualAndAgeGreaterthanequalOrSexEqual_sortNameDesc_limitL20() {
String sqlite = "SELECT * FROM users WHERE sex = 'male' and age >= 35 or sex = 'female' " +
"ORDER BY name desc LIMIT 20";
String api = "select * where sex = 'male' and age >= 35 or sex = 'female' order by name desc";
int limit = 20;
QueryRequest request = new QueryRequest();
request.setDbQuery(sqlite);
request.getApiQuery().setQuery(api);
request.getApiQuery().setLimit(limit);
QueryResponse response = validator.execute(request);
Assert.assertTrue(response.toString(), response.result());
}
@Test
public void limitL11() {
String sqlite = "SELECT * FROM users order by created desc LIMIT 11";
String api = null;
int limit = 11;
QueryRequest request = new QueryRequest();
request.setDbQuery(sqlite);
request.getApiQuery().setQuery(api);
request.getApiQuery().setLimit(limit);
QueryResponse response = validator.execute(request);
Assert.assertTrue(response.toString(), response.result());
}
@Test
@Ignore("TODO: uncomment this test when you are ready to fix USERGRID-1314")
public void nameBeginswithAndSexEqualAndAgeGreaterthanequalOrSexEqual() {
String sqlite = "SELECT * FROM users WHERE name LIKE 'a%' and sex = 'male' and age >= 20 " +
"or sex = 'female' LIMIT 10";
String api = "select * where name = 'a*' and sex = 'male' and age >= 20 or sex = 'female'";
QueryRequest request = new QueryRequest();
request.setDbQuery(sqlite);
request.getApiQuery().setQuery(api);
QueryResponse response = validator.execute(request);
Assert.assertTrue(response.toString(), response.result());
}
@Test
@Ignore("TODO: uncomment this test when you are ready to fix USERGRID-1314")
public void nameBeginswithAndSexEqualAndAgeGreaterthanequalOrSexEqual_limitL20() {
String sqlite = "SELECT * FROM users WHERE name LIKE 'a%' and sex = 'male' and age >= 20 " +
"or sex = 'female' LIMIT 20";
String api = "select * where name = 'a*' and sex = 'male' and age >= 20 or sex = 'female'";
int limit = 20;
QueryRequest request = new QueryRequest();
request.setDbQuery(sqlite);
request.getApiQuery().setQuery(api);
request.getApiQuery().setLimit(limit);
QueryResponse response = validator.execute(request);
Assert.assertTrue(response.toString(), response.result());
}
@Test
public void sexEqualAndAgeGreaterthanequal_sortAddressDesc_limitL100() {
String sqlite = "SELECT * FROM users WHERE sex = 'male' and age >= 35 ORDER BY address desc LIMIT 100";
String api = "select * where sex = 'male' and age >= 35 order by address desc";
int limit = 100;
QueryRequest request = new QueryRequest();
request.setDbQuery(sqlite);
request.getApiQuery().setQuery(api);
request.getApiQuery().setLimit(limit);
QueryResponse response = validator.execute(request);
Assert.assertTrue(response.toString(), response.result());
}
@Test
public void qlNameEqual() {
String sqlite = "SELECT * FROM users WHERE name = 'judekim' LIMIT 10";
String api = "select * where name = 'judekim'";
QueryRequest request = new QueryRequest();
request.setDbQuery(sqlite);
request.getApiQuery().setQuery(api);
QueryResponse response = validator.execute(request);
Assert.assertTrue(response.toString(), response.result());
}
@Test
public void qlAgeEqual() {
String sqlite = "SELECT * FROM users WHERE age = 16 LIMIT 10";
String api = "select * where age = 16";
QueryRequest request = new QueryRequest();
request.setDbQuery(sqlite);
request.getApiQuery().setQuery(api);
QueryResponse response = validator.execute(request);
Assert.assertTrue(response.toString(), response.result());
}
@Test
public void qlNameEqualAndAgeEqual() {
String sqlite = "SELECT * FROM users WHERE name = 'askagirl' and age = 16 LIMIT 10";
String api = "select * where name = 'askagirl' and age = 16";
QueryRequest request = new QueryRequest();
request.setDbQuery(sqlite);
request.getApiQuery().setQuery(api);
QueryResponse response = validator.execute(request, new QueryResultsMatcher() {
@Override
public boolean equals(List<Entity> expectedEntities, List<Entity> actuallyEntities) {
boolean equals = expectedEntities.size() == expectedEntities.size();
if( !equals )
return false;
for(Entity entity : actuallyEntities) {
String name = entity.getName();
int age = (Integer)entity.getProperty("age");
if ((StringUtils.equals("askagirl", name) && age == 16) == false) {
return false;
}
}
return equals;
}
});
Assert.assertTrue(response.toString(), response.result());
}
@Test
public void qlAgeLessthan() {
String sqlite = "SELECT * FROM users WHERE age < 16 LIMIT 10";
String api = "select * where age < 16";
QueryRequest request = new QueryRequest();
request.setDbQuery(sqlite);
request.getApiQuery().setQuery(api);
QueryResponse response = validator.execute(request, new QueryResultsMatcher() {
@Override
public boolean equals(List<Entity> expectedEntities, List<Entity> actuallyEntities) {
boolean equals = expectedEntities.size() == expectedEntities.size();
if( !equals )
return false;
for(Entity entity : actuallyEntities) {
int age = (Integer)entity.getProperty("age");
if((age < 16) == false) {
return false;
}
}
return equals;
}
});
Assert.assertTrue(response.toString(), response.result());
}
@Test
public void qlAgeLessthanequal() {
String sqlite = "SELECT * FROM users WHERE age <= 16 LIMIT 20";
String api = "select * where age <= 16";
int limit = 20;
QueryRequest request = new QueryRequest();
request.setDbQuery(sqlite);
request.getApiQuery().setQuery(api);
request.getApiQuery().setLimit(limit);
QueryResponse response = validator.execute(request, new QueryResultsMatcher() {
@Override
public boolean equals(List<Entity> expectedEntities, List<Entity> actuallyEntities) {
boolean equals = expectedEntities.size() == expectedEntities.size();
if( !equals )
return false;
for(Entity entity : actuallyEntities) {
int age = (Integer)entity.getProperty("age");
if((age <= 16) == false) {
return false;
}
}
return equals;
}
});
Assert.assertTrue(response.toString(), response.result());
}
@Test
public void qlAgeGreaterthan() {
String sqlite = "SELECT * FROM users WHERE age > 16 LIMIT 10";
String api = "select * where age > 16";
QueryRequest request = new QueryRequest();
request.setDbQuery(sqlite);
request.getApiQuery().setQuery(api);
QueryResponse response = validator.execute(request, new QueryResultsMatcher() {
@Override
public boolean equals(List<Entity> expectedEntities, List<Entity> actuallyEntities) {
boolean equals = expectedEntities.size() == expectedEntities.size();
if( !equals )
return false;
for(Entity entity : actuallyEntities) {
int age = (Integer)entity.getProperty("age");
if((age > 16) == false) {
return false;
}
}
return equals;
}
});
Assert.assertTrue(response.toString(), response.result());
}
@Test
public void qlAgeGreaterthanequal() {
String sqlite = "SELECT * FROM users WHERE age >= 16 LIMIT 10";
String api = "select * where age >= 16";
QueryRequest request = new QueryRequest();
request.setDbQuery(sqlite);
request.getApiQuery().setQuery(api);
QueryResponse response = validator.execute(request, new QueryResultsMatcher() {
@Override
public boolean equals(List<Entity> expectedEntities, List<Entity> actuallyEntities) {
boolean equals = expectedEntities.size() == expectedEntities.size();
if( !equals )
return false;
for(Entity entity : actuallyEntities) {
int age = (Integer)entity.getProperty("age");
if((age >= 16) == false) {
return false;
}
}
return equals;
}
});
Assert.assertTrue(response.toString(), response.result());
}
@Test
public void qlAgeGreaterthanequalAndAgeLessthan() {
String sqlite = "SELECT * FROM users WHERE age >= 32 and age < 40 LIMIT 10";
String api = "select * where age >= 32 and age < 40";
QueryRequest request = new QueryRequest();
request.setDbQuery(sqlite);
request.getApiQuery().setQuery(api);
QueryResponse response = validator.execute(request, new QueryResultsMatcher() {
@Override
public boolean equals(List<Entity> expectedEntities, List<Entity> actuallyEntities) {
boolean equals = expectedEntities.size() == expectedEntities.size();
if( !equals )
return false;
for(Entity entity : actuallyEntities) {
int age = (Integer)entity.getProperty("age");
if((age >= 32 && age < 40) == false) {
return false;
}
}
return equals;
}
});
Assert.assertTrue(response.toString(), response.result());
}
@Test
public void qlAgeGreaterthanequalAndAgeLessthanAndSexEqual() {
String sqlite = "SELECT * FROM users WHERE age >= 32 and age < 40 and sex = 'female' LIMIT 10";
String api = "select * where age >= 32 and age < 40 and sex = 'female'";
QueryRequest request = new QueryRequest();
request.setDbQuery(sqlite);
request.getApiQuery().setQuery(api);
QueryResponse response = validator.execute(request, new QueryResultsMatcher() {
@Override
public boolean equals(List<Entity> expectedEntities, List<Entity> actuallyEntities) {
boolean equals = expectedEntities.size() == expectedEntities.size();
if( !equals )
return false;
for(Entity entity : actuallyEntities) {
int age = (Integer)entity.getProperty("age");
String sex = (String)entity.getProperty("sex");
if((age >= 32 && age < 40 && StringUtils.equals("female",sex)) == false) {
return false;
}
}
return equals;
}
});
Assert.assertTrue(response.toString(), response.result());
}
@Test
public void qlAddressFulltext() {
String sqlite = "SELECT * FROM users WHERE address LIKE '%서울시%' LIMIT 10";
String api = "select * where address contains '서울시'";
QueryRequest request = new QueryRequest();
request.setDbQuery(sqlite);
request.getApiQuery().setQuery(api);
QueryResponse response = validator.execute(request, new QueryResultsMatcher() {
@Override
public boolean equals(List<Entity> expectedEntities, List<Entity> actuallyEntities) {
boolean equals = expectedEntities.size() == expectedEntities.size();
if( !equals )
return false;
for(Entity entity : actuallyEntities) {
String address = (String)entity.getProperty("address");
if((StringUtils.contains(address,"서울시")) == false) {
return false;
}
}
return equals;
}
});
Assert.assertTrue(response.toString(), response.result());
}
@Test
public void qlAddressFulltextstartswith() {
String sqlite = " SELECT * FROM users WHERE address LIKE 'A%' LIMIT 10";
String api = "select * where address contains 'A*'";
QueryRequest request = new QueryRequest();
request.setDbQuery(sqlite);
request.getApiQuery().setQuery(api);
QueryResponse response = validator.execute(request, new QueryResultsMatcher() {
@Override
public boolean equals(List<Entity> expectedEntities, List<Entity> actuallyEntities) {
boolean equals = expectedEntities.size() == expectedEntities.size();
if( !equals )
return false;
for(Entity entity : actuallyEntities) {
String address = (String)entity.getProperty("address");
if((StringUtils.startsWith(address,"A")) == false) {
return false;
}
}
return equals;
}
});
Assert.assertTrue(response.toString(), response.result());
}
@Test
public void qlAddressBeginswith() {
String sqlite = "SELECT * FROM users WHERE address LIKE 'B%' LIMIT 10";
String api = "select * where address = 'B*'";
QueryRequest request = new QueryRequest();
request.setDbQuery(sqlite);
request.getApiQuery().setQuery(api);
QueryResponse response = validator.execute(request, new QueryResultsMatcher() {
@Override
public boolean equals(List<Entity> expectedEntities, List<Entity> actuallyEntities) {
boolean equals = expectedEntities.size() == expectedEntities.size();
if( !equals )
return false;
for(Entity entity : actuallyEntities) {
String address = (String)entity.getProperty("address");
if((StringUtils.startsWith(address,"B")) == false) {
return false;
}
}
return equals;
}
});
Assert.assertTrue(response.toString(), response.result());
}
@Test
public void qlAddressBeginswithAndSexFulltextstartswith() {
String sqlite = "SELECT * FROM users WHERE address LIKE 'C%' and sex LIKE 'ma%' LIMIT 10";
String api = "select * where address = 'C*' and sex contains 'ma*'";
QueryRequest request = new QueryRequest();
request.setDbQuery(sqlite);
request.getApiQuery().setQuery(api);
QueryResponse response = validator.execute(request, new QueryResultsMatcher() {
@Override
public boolean equals(List<Entity> expectedEntities, List<Entity> actuallyEntities) {
boolean equals = expectedEntities.size() == expectedEntities.size();
if( !equals )
return false;
for(Entity entity : actuallyEntities) {
String address = (String)entity.getProperty("address");
if((StringUtils.startsWith(address,"C")) == false) {
return false;
}
}
return equals;
}
});
Assert.assertTrue(response.toString(), response.result());
}
@Test
public void qlAddressBeginswithAndSexFulltext() {
String sqlite = "SELECT * FROM users WHERE (address LIKE 'D%' and sex LIKE '%male%') LIMIT 10";
String api = "select * where address = 'D*' and sex contains 'male'";
QueryRequest request = new QueryRequest();
request.setDbQuery(sqlite);
request.getApiQuery().setQuery(api);
QueryResponse response = validator.execute(request, new QueryResultsMatcher() {
@Override
public boolean equals(List<Entity> expectedEntities, List<Entity> actuallyEntities) {
boolean equals = expectedEntities.size() == expectedEntities.size();
if( !equals )
return false;
for(Entity entity : actuallyEntities) {
String address = (String)entity.getProperty("address");
if((StringUtils.startsWith(address,"D")) == false) {
return false;
}
}
return equals;
}
});
Assert.assertTrue(response.toString(), response.result());
}
@Test
public void qlSexEqualOrAgeGreaterthanequalAndAgeLessthan_limitL20() {
String sqlite = "SELECT * FROM users WHERE sex = 'female' or age >= 12 and age < 20 LIMIT 20";
String api = "select * where sex = 'female' or age >= 12 and age < 20";
int limit = 20;
QueryRequest request = new QueryRequest();
request.setDbQuery(sqlite);
request.getApiQuery().setQuery(api);
request.getApiQuery().setLimit(limit);
QueryResponse response = validator.execute(request, new QueryResultsMatcher() {
@Override
public boolean equals(List<Entity> expectedEntities, List<Entity> actuallyEntities) {
boolean equals = expectedEntities.size() == expectedEntities.size();
if( !equals )
return false;
for(Entity entity : actuallyEntities) {
int age = (Integer)entity.getProperty("age");
String sex = (String)entity.getProperty("sex");
if(((StringUtils.equals("female",sex) || age >= 12) && age < 20) == false) {
return false;
}
}
return equals;
}
});
Assert.assertTrue(response.toString(), response.result());
}
@Test
public void sortNameAsc() {
String sqlite = "SELECT * FROM users ORDER BY name asc LIMIT 10";
String api = "select * order by name asc";
QueryRequest request = new QueryRequest();
request.setDbQuery(sqlite);
request.getApiQuery().setQuery(api);
QueryResponse response = validator.execute(request);
Assert.assertTrue(response.toString(), response.result());
}
@Test
public void sortNameDesc() {
String sqlite = "SELECT * FROM users ORDER BY name desc LIMIT 10";
String api = "select * order by name desc";
QueryRequest request = new QueryRequest();
request.setDbQuery(sqlite);
request.getApiQuery().setQuery(api);
QueryResponse response = validator.execute(request);
Assert.assertTrue(response.toString(), response.result());
}
@Test
public void sortNameAscAgeDesc() {
String sqlite = "SELECT * FROM users ORDER BY name asc, age desc LIMIT 10";
String api = "select * order by name asc, age desc";
QueryRequest request = new QueryRequest();
request.setDbQuery(sqlite);
request.getApiQuery().setQuery(api);
QueryResponse response = validator.execute(request);
Assert.assertTrue(response.toString(), response.result());
}
@Test
public void sortNameAscAddressAsc() {
String sqlite = "SELECT * FROM users ORDER BY name asc, address asc LIMIT 10";
String api = "select * order by name asc, address asc";
QueryRequest request = new QueryRequest();
request.setDbQuery(sqlite);
request.getApiQuery().setQuery(api);
QueryResponse response = validator.execute(request);
Assert.assertTrue(response.toString(), response.result());
}
@Test
public void sortCreatedAsc() {
String sqlite = "SELECT * FROM users ORDER BY created asc LIMIT 10";
String api = "select * order by created asc";
QueryRequest request = new QueryRequest();
request.setDbQuery(sqlite);
request.getApiQuery().setQuery(api);
QueryResponse response = validator.execute(request);
Assert.assertTrue(response.toString(), response.result());
}
@Test
public void sortCreatedDesc() {
String sqlite = "SELECT * FROM users ORDER BY created desc LIMIT 10";
String api = "select * order by created desc";
QueryRequest request = new QueryRequest();
request.setDbQuery(sqlite);
request.getApiQuery().setQuery(api);
QueryResponse response = validator.execute(request);
Assert.assertTrue(response.toString(), response.result());
}
@Test
public void sortModifiedAsc() {
String sqlite = "SELECT * FROM users ORDER BY modified asc LIMIT 10";
String api = "select * order by modified asc";
QueryRequest request = new QueryRequest();
request.setDbQuery(sqlite);
request.getApiQuery().setQuery(api);
QueryResponse response = validator.execute(request);
Assert.assertTrue(response.toString(), response.result());
}
@Test
public void sortCreatedDescNameAsc() {
String sqlite = "SELECT * FROM users ORDER BY created desc, name asc LIMIT 10";
String api = "select * order by created desc, name asc";
QueryRequest request = new QueryRequest();
request.setDbQuery(sqlite);
request.getApiQuery().setQuery(api);
QueryResponse response = validator.execute(request);
Assert.assertTrue(response.toString(), response.result());
}
@Test
public void sortNameAscCreatedDesc() {
String sqlite = "SELECT * FROM users ORDER BY name asc, created desc LIMIT 10";
String api = "select * order by name asc, created desc";
QueryRequest request = new QueryRequest();
request.setDbQuery(sqlite);
request.getApiQuery().setQuery(api);
QueryResponse response = validator.execute(request);
Assert.assertTrue(response.toString(), response.result());
}
}
|
googleapis/google-api-java-client-services | 36,928 | clients/google-api-services-bigquery/v2/1.31.0/com/google/api/services/bigquery/model/Dataset.java | /*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
* in compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
/*
* This code was generated by https://github.com/googleapis/google-api-java-client-services/
* Modify at your own risk.
*/
package com.google.api.services.bigquery.model;
/**
* Model definition for Dataset.
*
* <p> This is the Java data model class that specifies how to parse/serialize into the JSON that is
* transmitted over HTTP when working with the BigQuery API. For a detailed explanation see:
* <a href="https://developers.google.com/api-client-library/java/google-http-java-client/json">https://developers.google.com/api-client-library/java/google-http-java-client/json</a>
* </p>
*
* @author Google, Inc.
*/
@SuppressWarnings("javadoc")
public final class Dataset extends com.google.api.client.json.GenericJson {
/**
* [Optional] An array of objects that define dataset access for one or more entities. You can set
* this property when inserting or updating a dataset in order to control who is allowed to access
* the data. If unspecified at dataset creation time, BigQuery adds default dataset access for the
* following entities: access.specialGroup: projectReaders; access.role: READER;
* access.specialGroup: projectWriters; access.role: WRITER; access.specialGroup: projectOwners;
* access.role: OWNER; access.userByEmail: [dataset creator email]; access.role: OWNER;
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.util.List<Access> access;
static {
// hack to force ProGuard to consider Access used, since otherwise it would be stripped out
// see https://github.com/google/google-api-java-client/issues/543
com.google.api.client.util.Data.nullOf(Access.class);
}
/**
* [Output-only] The time when this dataset was created, in milliseconds since the epoch.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key @com.google.api.client.json.JsonString
private java.lang.Long creationTime;
/**
* [Required] A reference that identifies the dataset.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private DatasetReference datasetReference;
/**
* [Output-only] The default collation of the dataset.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String defaultCollation;
/**
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private EncryptionConfiguration defaultEncryptionConfiguration;
/**
* [Optional] The default partition expiration for all partitioned tables in the dataset, in
* milliseconds. Once this property is set, all newly-created partitioned tables in the dataset
* will have an expirationMs property in the timePartitioning settings set to this value, and
* changing the value will only affect new tables, not existing ones. The storage in a partition
* will have an expiration time of its partition time plus this value. Setting this property
* overrides the use of defaultTableExpirationMs for partitioned tables: only one of
* defaultTableExpirationMs and defaultPartitionExpirationMs will be used for any new partitioned
* table. If you provide an explicit timePartitioning.expirationMs when creating or updating a
* partitioned table, that value takes precedence over the default partition expiration time
* indicated by this property.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key @com.google.api.client.json.JsonString
private java.lang.Long defaultPartitionExpirationMs;
/**
* [Optional] The default lifetime of all tables in the dataset, in milliseconds. The minimum
* value is 3600000 milliseconds (one hour). Once this property is set, all newly-created tables
* in the dataset will have an expirationTime property set to the creation time plus the value in
* this property, and changing the value will only affect new tables, not existing ones. When the
* expirationTime for a given table is reached, that table will be deleted automatically. If a
* table's expirationTime is modified or removed before the table expires, or if you provide an
* explicit expirationTime when creating a table, that value takes precedence over the default
* expiration time indicated by this property.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key @com.google.api.client.json.JsonString
private java.lang.Long defaultTableExpirationMs;
/**
* [Optional] A user-friendly description of the dataset.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String description;
/**
* [Output-only] A hash of the resource.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String etag;
/**
* [Optional] A descriptive name for the dataset.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String friendlyName;
/**
* [Output-only] The fully-qualified unique name of the dataset in the format projectId:datasetId.
* The dataset name without the project name is given in the datasetId field. When creating a new
* dataset, leave this field blank, and instead specify the datasetId field.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String id;
/**
* [Optional] Indicates if table names are case insensitive in the dataset.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.Boolean isCaseInsensitive;
/**
* [Output-only] The resource type.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String kind;
/**
* The labels associated with this dataset. You can use these to organize and group your datasets.
* You can set this property when inserting or updating a dataset. See Creating and Updating
* Dataset Labels for more information.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.util.Map<String, java.lang.String> labels;
/**
* [Output-only] The date when this dataset or any of its tables was last modified, in
* milliseconds since the epoch.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key @com.google.api.client.json.JsonString
private java.lang.Long lastModifiedTime;
/**
* The geographic location where the dataset should reside. The default value is US. See details
* at https://cloud.google.com/bigquery/docs/locations.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String location;
/**
* [Optional] Number of hours for the max time travel for all tables in the dataset.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key @com.google.api.client.json.JsonString
private java.lang.Long maxTimeTravelHours;
/**
* [Output-only] Reserved for future use.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.Boolean satisfiesPzs;
/**
* [Output-only] A URL that can be used to access the resource again. You can use this URL in Get
* or Update requests to the resource.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String selfLink;
/**
* [Optional]The tags associated with this dataset. Tag keys are globally unique.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.util.List<Tags> tags;
static {
// hack to force ProGuard to consider Tags used, since otherwise it would be stripped out
// see https://github.com/google/google-api-java-client/issues/543
com.google.api.client.util.Data.nullOf(Tags.class);
}
/**
* [Optional] An array of objects that define dataset access for one or more entities. You can set
* this property when inserting or updating a dataset in order to control who is allowed to access
* the data. If unspecified at dataset creation time, BigQuery adds default dataset access for the
* following entities: access.specialGroup: projectReaders; access.role: READER;
* access.specialGroup: projectWriters; access.role: WRITER; access.specialGroup: projectOwners;
* access.role: OWNER; access.userByEmail: [dataset creator email]; access.role: OWNER;
* @return value or {@code null} for none
*/
public java.util.List<Access> getAccess() {
return access;
}
/**
* [Optional] An array of objects that define dataset access for one or more entities. You can set
* this property when inserting or updating a dataset in order to control who is allowed to access
* the data. If unspecified at dataset creation time, BigQuery adds default dataset access for the
* following entities: access.specialGroup: projectReaders; access.role: READER;
* access.specialGroup: projectWriters; access.role: WRITER; access.specialGroup: projectOwners;
* access.role: OWNER; access.userByEmail: [dataset creator email]; access.role: OWNER;
* @param access access or {@code null} for none
*/
public Dataset setAccess(java.util.List<Access> access) {
this.access = access;
return this;
}
/**
* [Output-only] The time when this dataset was created, in milliseconds since the epoch.
* @return value or {@code null} for none
*/
public java.lang.Long getCreationTime() {
return creationTime;
}
/**
* [Output-only] The time when this dataset was created, in milliseconds since the epoch.
* @param creationTime creationTime or {@code null} for none
*/
public Dataset setCreationTime(java.lang.Long creationTime) {
this.creationTime = creationTime;
return this;
}
/**
* [Required] A reference that identifies the dataset.
* @return value or {@code null} for none
*/
public DatasetReference getDatasetReference() {
return datasetReference;
}
/**
* [Required] A reference that identifies the dataset.
* @param datasetReference datasetReference or {@code null} for none
*/
public Dataset setDatasetReference(DatasetReference datasetReference) {
this.datasetReference = datasetReference;
return this;
}
/**
* [Output-only] The default collation of the dataset.
* @return value or {@code null} for none
*/
public java.lang.String getDefaultCollation() {
return defaultCollation;
}
/**
* [Output-only] The default collation of the dataset.
* @param defaultCollation defaultCollation or {@code null} for none
*/
public Dataset setDefaultCollation(java.lang.String defaultCollation) {
this.defaultCollation = defaultCollation;
return this;
}
/**
* @return value or {@code null} for none
*/
public EncryptionConfiguration getDefaultEncryptionConfiguration() {
return defaultEncryptionConfiguration;
}
/**
* @param defaultEncryptionConfiguration defaultEncryptionConfiguration or {@code null} for none
*/
public Dataset setDefaultEncryptionConfiguration(EncryptionConfiguration defaultEncryptionConfiguration) {
this.defaultEncryptionConfiguration = defaultEncryptionConfiguration;
return this;
}
/**
* [Optional] The default partition expiration for all partitioned tables in the dataset, in
* milliseconds. Once this property is set, all newly-created partitioned tables in the dataset
* will have an expirationMs property in the timePartitioning settings set to this value, and
* changing the value will only affect new tables, not existing ones. The storage in a partition
* will have an expiration time of its partition time plus this value. Setting this property
* overrides the use of defaultTableExpirationMs for partitioned tables: only one of
* defaultTableExpirationMs and defaultPartitionExpirationMs will be used for any new partitioned
* table. If you provide an explicit timePartitioning.expirationMs when creating or updating a
* partitioned table, that value takes precedence over the default partition expiration time
* indicated by this property.
* @return value or {@code null} for none
*/
public java.lang.Long getDefaultPartitionExpirationMs() {
return defaultPartitionExpirationMs;
}
/**
* [Optional] The default partition expiration for all partitioned tables in the dataset, in
* milliseconds. Once this property is set, all newly-created partitioned tables in the dataset
* will have an expirationMs property in the timePartitioning settings set to this value, and
* changing the value will only affect new tables, not existing ones. The storage in a partition
* will have an expiration time of its partition time plus this value. Setting this property
* overrides the use of defaultTableExpirationMs for partitioned tables: only one of
* defaultTableExpirationMs and defaultPartitionExpirationMs will be used for any new partitioned
* table. If you provide an explicit timePartitioning.expirationMs when creating or updating a
* partitioned table, that value takes precedence over the default partition expiration time
* indicated by this property.
* @param defaultPartitionExpirationMs defaultPartitionExpirationMs or {@code null} for none
*/
public Dataset setDefaultPartitionExpirationMs(java.lang.Long defaultPartitionExpirationMs) {
this.defaultPartitionExpirationMs = defaultPartitionExpirationMs;
return this;
}
/**
* [Optional] The default lifetime of all tables in the dataset, in milliseconds. The minimum
* value is 3600000 milliseconds (one hour). Once this property is set, all newly-created tables
* in the dataset will have an expirationTime property set to the creation time plus the value in
* this property, and changing the value will only affect new tables, not existing ones. When the
* expirationTime for a given table is reached, that table will be deleted automatically. If a
* table's expirationTime is modified or removed before the table expires, or if you provide an
* explicit expirationTime when creating a table, that value takes precedence over the default
* expiration time indicated by this property.
* @return value or {@code null} for none
*/
public java.lang.Long getDefaultTableExpirationMs() {
return defaultTableExpirationMs;
}
/**
* [Optional] The default lifetime of all tables in the dataset, in milliseconds. The minimum
* value is 3600000 milliseconds (one hour). Once this property is set, all newly-created tables
* in the dataset will have an expirationTime property set to the creation time plus the value in
* this property, and changing the value will only affect new tables, not existing ones. When the
* expirationTime for a given table is reached, that table will be deleted automatically. If a
* table's expirationTime is modified or removed before the table expires, or if you provide an
* explicit expirationTime when creating a table, that value takes precedence over the default
* expiration time indicated by this property.
* @param defaultTableExpirationMs defaultTableExpirationMs or {@code null} for none
*/
public Dataset setDefaultTableExpirationMs(java.lang.Long defaultTableExpirationMs) {
this.defaultTableExpirationMs = defaultTableExpirationMs;
return this;
}
/**
* [Optional] A user-friendly description of the dataset.
* @return value or {@code null} for none
*/
public java.lang.String getDescription() {
return description;
}
/**
* [Optional] A user-friendly description of the dataset.
* @param description description or {@code null} for none
*/
public Dataset setDescription(java.lang.String description) {
this.description = description;
return this;
}
/**
* [Output-only] A hash of the resource.
* @return value or {@code null} for none
*/
public java.lang.String getEtag() {
return etag;
}
/**
* [Output-only] A hash of the resource.
* @param etag etag or {@code null} for none
*/
public Dataset setEtag(java.lang.String etag) {
this.etag = etag;
return this;
}
/**
* [Optional] A descriptive name for the dataset.
* @return value or {@code null} for none
*/
public java.lang.String getFriendlyName() {
return friendlyName;
}
/**
* [Optional] A descriptive name for the dataset.
* @param friendlyName friendlyName or {@code null} for none
*/
public Dataset setFriendlyName(java.lang.String friendlyName) {
this.friendlyName = friendlyName;
return this;
}
/**
* [Output-only] The fully-qualified unique name of the dataset in the format projectId:datasetId.
* The dataset name without the project name is given in the datasetId field. When creating a new
* dataset, leave this field blank, and instead specify the datasetId field.
* @return value or {@code null} for none
*/
public java.lang.String getId() {
return id;
}
/**
* [Output-only] The fully-qualified unique name of the dataset in the format projectId:datasetId.
* The dataset name without the project name is given in the datasetId field. When creating a new
* dataset, leave this field blank, and instead specify the datasetId field.
* @param id id or {@code null} for none
*/
public Dataset setId(java.lang.String id) {
this.id = id;
return this;
}
/**
* [Optional] Indicates if table names are case insensitive in the dataset.
* @return value or {@code null} for none
*/
public java.lang.Boolean getIsCaseInsensitive() {
return isCaseInsensitive;
}
/**
* [Optional] Indicates if table names are case insensitive in the dataset.
* @param isCaseInsensitive isCaseInsensitive or {@code null} for none
*/
public Dataset setIsCaseInsensitive(java.lang.Boolean isCaseInsensitive) {
this.isCaseInsensitive = isCaseInsensitive;
return this;
}
/**
* [Output-only] The resource type.
* @return value or {@code null} for none
*/
public java.lang.String getKind() {
return kind;
}
/**
* [Output-only] The resource type.
* @param kind kind or {@code null} for none
*/
public Dataset setKind(java.lang.String kind) {
this.kind = kind;
return this;
}
/**
* The labels associated with this dataset. You can use these to organize and group your datasets.
* You can set this property when inserting or updating a dataset. See Creating and Updating
* Dataset Labels for more information.
* @return value or {@code null} for none
*/
public java.util.Map<String, java.lang.String> getLabels() {
return labels;
}
/**
* The labels associated with this dataset. You can use these to organize and group your datasets.
* You can set this property when inserting or updating a dataset. See Creating and Updating
* Dataset Labels for more information.
* @param labels labels or {@code null} for none
*/
public Dataset setLabels(java.util.Map<String, java.lang.String> labels) {
this.labels = labels;
return this;
}
/**
* [Output-only] The date when this dataset or any of its tables was last modified, in
* milliseconds since the epoch.
* @return value or {@code null} for none
*/
public java.lang.Long getLastModifiedTime() {
return lastModifiedTime;
}
/**
* [Output-only] The date when this dataset or any of its tables was last modified, in
* milliseconds since the epoch.
* @param lastModifiedTime lastModifiedTime or {@code null} for none
*/
public Dataset setLastModifiedTime(java.lang.Long lastModifiedTime) {
this.lastModifiedTime = lastModifiedTime;
return this;
}
/**
* The geographic location where the dataset should reside. The default value is US. See details
* at https://cloud.google.com/bigquery/docs/locations.
* @return value or {@code null} for none
*/
public java.lang.String getLocation() {
return location;
}
/**
* The geographic location where the dataset should reside. The default value is US. See details
* at https://cloud.google.com/bigquery/docs/locations.
* @param location location or {@code null} for none
*/
public Dataset setLocation(java.lang.String location) {
this.location = location;
return this;
}
/**
* [Optional] Number of hours for the max time travel for all tables in the dataset.
* @return value or {@code null} for none
*/
public java.lang.Long getMaxTimeTravelHours() {
return maxTimeTravelHours;
}
/**
* [Optional] Number of hours for the max time travel for all tables in the dataset.
* @param maxTimeTravelHours maxTimeTravelHours or {@code null} for none
*/
public Dataset setMaxTimeTravelHours(java.lang.Long maxTimeTravelHours) {
this.maxTimeTravelHours = maxTimeTravelHours;
return this;
}
/**
* [Output-only] Reserved for future use.
* @return value or {@code null} for none
*/
public java.lang.Boolean getSatisfiesPzs() {
return satisfiesPzs;
}
/**
* [Output-only] Reserved for future use.
* @param satisfiesPzs satisfiesPzs or {@code null} for none
*/
public Dataset setSatisfiesPzs(java.lang.Boolean satisfiesPzs) {
this.satisfiesPzs = satisfiesPzs;
return this;
}
/**
* [Output-only] A URL that can be used to access the resource again. You can use this URL in Get
* or Update requests to the resource.
* @return value or {@code null} for none
*/
public java.lang.String getSelfLink() {
return selfLink;
}
/**
* [Output-only] A URL that can be used to access the resource again. You can use this URL in Get
* or Update requests to the resource.
* @param selfLink selfLink or {@code null} for none
*/
public Dataset setSelfLink(java.lang.String selfLink) {
this.selfLink = selfLink;
return this;
}
/**
* [Optional]The tags associated with this dataset. Tag keys are globally unique.
* @return value or {@code null} for none
*/
public java.util.List<Tags> getTags() {
return tags;
}
/**
* [Optional]The tags associated with this dataset. Tag keys are globally unique.
* @param tags tags or {@code null} for none
*/
public Dataset setTags(java.util.List<Tags> tags) {
this.tags = tags;
return this;
}
@Override
public Dataset set(String fieldName, Object value) {
return (Dataset) super.set(fieldName, value);
}
@Override
public Dataset clone() {
return (Dataset) super.clone();
}
/**
* Model definition for DatasetAccess.
*/
public static final class Access extends com.google.api.client.json.GenericJson {
/**
* [Pick one] A grant authorizing all resources of a particular type in a particular dataset
* access to this dataset. Only views are supported for now. The role field is not required when
* this field is set. If that dataset is deleted and re-created, its access needs to be granted
* again via an update operation.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private DatasetAccessEntry dataset;
/**
* [Pick one] A domain to grant access to. Any users signed in with the domain specified will be
* granted the specified access. Example: "example.com". Maps to IAM policy member
* "domain:DOMAIN".
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String domain;
/**
* [Pick one] An email address of a Google Group to grant access to. Maps to IAM policy member
* "group:GROUP".
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String groupByEmail;
/**
* [Pick one] Some other type of member that appears in the IAM Policy but isn't a user, group,
* domain, or special group.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String iamMember;
/**
* [Required] An IAM role ID that should be granted to the user, group, or domain specified in
* this access entry. The following legacy mappings will be applied: OWNER
* roles/bigquery.dataOwner WRITER roles/bigquery.dataEditor READER roles/bigquery.dataViewer
* This field will accept any of the above formats, but will return only the legacy format. For
* example, if you set this field to "roles/bigquery.dataOwner", it will be returned back as
* "OWNER".
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String role;
/**
* [Pick one] A routine from a different dataset to grant access to. Queries executed against that
* routine will have read access to views/tables/routines in this dataset. Only UDF is supported
* for now. The role field is not required when this field is set. If that routine is updated by
* any user, access to the routine needs to be granted again via an update operation.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private RoutineReference routine;
/**
* [Pick one] A special group to grant access to. Possible values include: projectOwners: Owners
* of the enclosing project. projectReaders: Readers of the enclosing project. projectWriters:
* Writers of the enclosing project. allAuthenticatedUsers: All authenticated BigQuery users. Maps
* to similarly-named IAM members.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String specialGroup;
/**
* [Pick one] An email address of a user to grant access to. For example: fred@example.com. Maps
* to IAM policy member "user:EMAIL" or "serviceAccount:EMAIL".
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String userByEmail;
/**
* [Pick one] A view from a different dataset to grant access to. Queries executed against that
* view will have read access to tables in this dataset. The role field is not required when this
* field is set. If that view is updated by any user, access to the view needs to be granted again
* via an update operation.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private TableReference view;
/**
* [Pick one] A grant authorizing all resources of a particular type in a particular dataset
* access to this dataset. Only views are supported for now. The role field is not required when
* this field is set. If that dataset is deleted and re-created, its access needs to be granted
* again via an update operation.
* @return value or {@code null} for none
*/
public DatasetAccessEntry getDataset() {
return dataset;
}
/**
* [Pick one] A grant authorizing all resources of a particular type in a particular dataset
* access to this dataset. Only views are supported for now. The role field is not required when
* this field is set. If that dataset is deleted and re-created, its access needs to be granted
* again via an update operation.
* @param dataset dataset or {@code null} for none
*/
public Access setDataset(DatasetAccessEntry dataset) {
this.dataset = dataset;
return this;
}
/**
* [Pick one] A domain to grant access to. Any users signed in with the domain specified will be
* granted the specified access. Example: "example.com". Maps to IAM policy member
* "domain:DOMAIN".
* @return value or {@code null} for none
*/
public java.lang.String getDomain() {
return domain;
}
/**
* [Pick one] A domain to grant access to. Any users signed in with the domain specified will be
* granted the specified access. Example: "example.com". Maps to IAM policy member
* "domain:DOMAIN".
* @param domain domain or {@code null} for none
*/
public Access setDomain(java.lang.String domain) {
this.domain = domain;
return this;
}
/**
* [Pick one] An email address of a Google Group to grant access to. Maps to IAM policy member
* "group:GROUP".
* @return value or {@code null} for none
*/
public java.lang.String getGroupByEmail() {
return groupByEmail;
}
/**
* [Pick one] An email address of a Google Group to grant access to. Maps to IAM policy member
* "group:GROUP".
* @param groupByEmail groupByEmail or {@code null} for none
*/
public Access setGroupByEmail(java.lang.String groupByEmail) {
this.groupByEmail = groupByEmail;
return this;
}
/**
* [Pick one] Some other type of member that appears in the IAM Policy but isn't a user, group,
* domain, or special group.
* @return value or {@code null} for none
*/
public java.lang.String getIamMember() {
return iamMember;
}
/**
* [Pick one] Some other type of member that appears in the IAM Policy but isn't a user, group,
* domain, or special group.
* @param iamMember iamMember or {@code null} for none
*/
public Access setIamMember(java.lang.String iamMember) {
this.iamMember = iamMember;
return this;
}
/**
* [Required] An IAM role ID that should be granted to the user, group, or domain specified in
* this access entry. The following legacy mappings will be applied: OWNER
* roles/bigquery.dataOwner WRITER roles/bigquery.dataEditor READER roles/bigquery.dataViewer
* This field will accept any of the above formats, but will return only the legacy format. For
* example, if you set this field to "roles/bigquery.dataOwner", it will be returned back as
* "OWNER".
* @return value or {@code null} for none
*/
public java.lang.String getRole() {
return role;
}
/**
* [Required] An IAM role ID that should be granted to the user, group, or domain specified in
* this access entry. The following legacy mappings will be applied: OWNER
* roles/bigquery.dataOwner WRITER roles/bigquery.dataEditor READER roles/bigquery.dataViewer
* This field will accept any of the above formats, but will return only the legacy format. For
* example, if you set this field to "roles/bigquery.dataOwner", it will be returned back as
* "OWNER".
* @param role role or {@code null} for none
*/
public Access setRole(java.lang.String role) {
this.role = role;
return this;
}
/**
* [Pick one] A routine from a different dataset to grant access to. Queries executed against that
* routine will have read access to views/tables/routines in this dataset. Only UDF is supported
* for now. The role field is not required when this field is set. If that routine is updated by
* any user, access to the routine needs to be granted again via an update operation.
* @return value or {@code null} for none
*/
public RoutineReference getRoutine() {
return routine;
}
/**
* [Pick one] A routine from a different dataset to grant access to. Queries executed against that
* routine will have read access to views/tables/routines in this dataset. Only UDF is supported
* for now. The role field is not required when this field is set. If that routine is updated by
* any user, access to the routine needs to be granted again via an update operation.
* @param routine routine or {@code null} for none
*/
public Access setRoutine(RoutineReference routine) {
this.routine = routine;
return this;
}
/**
* [Pick one] A special group to grant access to. Possible values include: projectOwners: Owners
* of the enclosing project. projectReaders: Readers of the enclosing project. projectWriters:
* Writers of the enclosing project. allAuthenticatedUsers: All authenticated BigQuery users. Maps
* to similarly-named IAM members.
* @return value or {@code null} for none
*/
public java.lang.String getSpecialGroup() {
return specialGroup;
}
/**
* [Pick one] A special group to grant access to. Possible values include: projectOwners: Owners
* of the enclosing project. projectReaders: Readers of the enclosing project. projectWriters:
* Writers of the enclosing project. allAuthenticatedUsers: All authenticated BigQuery users. Maps
* to similarly-named IAM members.
* @param specialGroup specialGroup or {@code null} for none
*/
public Access setSpecialGroup(java.lang.String specialGroup) {
this.specialGroup = specialGroup;
return this;
}
/**
* [Pick one] An email address of a user to grant access to. For example: fred@example.com. Maps
* to IAM policy member "user:EMAIL" or "serviceAccount:EMAIL".
* @return value or {@code null} for none
*/
public java.lang.String getUserByEmail() {
return userByEmail;
}
/**
* [Pick one] An email address of a user to grant access to. For example: fred@example.com. Maps
* to IAM policy member "user:EMAIL" or "serviceAccount:EMAIL".
* @param userByEmail userByEmail or {@code null} for none
*/
public Access setUserByEmail(java.lang.String userByEmail) {
this.userByEmail = userByEmail;
return this;
}
/**
* [Pick one] A view from a different dataset to grant access to. Queries executed against that
* view will have read access to tables in this dataset. The role field is not required when this
* field is set. If that view is updated by any user, access to the view needs to be granted again
* via an update operation.
* @return value or {@code null} for none
*/
public TableReference getView() {
return view;
}
/**
* [Pick one] A view from a different dataset to grant access to. Queries executed against that
* view will have read access to tables in this dataset. The role field is not required when this
* field is set. If that view is updated by any user, access to the view needs to be granted again
* via an update operation.
* @param view view or {@code null} for none
*/
public Access setView(TableReference view) {
this.view = view;
return this;
}
@Override
public Access set(String fieldName, Object value) {
return (Access) super.set(fieldName, value);
}
@Override
public Access clone() {
return (Access) super.clone();
}
}
/**
* Model definition for DatasetTags.
*/
public static final class Tags extends com.google.api.client.json.GenericJson {
/**
* [Required] The namespaced friendly name of the tag key, e.g. "12345/environment" where 12345 is
* org id.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String tagKey;
/**
* [Required] Friendly short name of the tag value, e.g. "production".
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String tagValue;
/**
* [Required] The namespaced friendly name of the tag key, e.g. "12345/environment" where 12345 is
* org id.
* @return value or {@code null} for none
*/
public java.lang.String getTagKey() {
return tagKey;
}
/**
* [Required] The namespaced friendly name of the tag key, e.g. "12345/environment" where 12345 is
* org id.
* @param tagKey tagKey or {@code null} for none
*/
public Tags setTagKey(java.lang.String tagKey) {
this.tagKey = tagKey;
return this;
}
/**
* [Required] Friendly short name of the tag value, e.g. "production".
* @return value or {@code null} for none
*/
public java.lang.String getTagValue() {
return tagValue;
}
/**
* [Required] Friendly short name of the tag value, e.g. "production".
* @param tagValue tagValue or {@code null} for none
*/
public Tags setTagValue(java.lang.String tagValue) {
this.tagValue = tagValue;
return this;
}
@Override
public Tags set(String fieldName, Object value) {
return (Tags) super.set(fieldName, value);
}
@Override
public Tags clone() {
return (Tags) super.clone();
}
}
}
|
apache/sqoop | 36,660 | src/java/org/apache/sqoop/manager/SqlManager.java | /**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.sqoop.manager;
import java.io.IOException;
import java.io.PrintWriter;
import java.sql.Connection;
import java.sql.DatabaseMetaData;
import java.sql.DriverManager;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.ResultSetMetaData;
import java.sql.SQLException;
import java.sql.Statement;
import java.sql.Timestamp;
import java.sql.Types;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Properties;
import java.util.TreeMap;
import org.apache.commons.lang.StringUtils;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.sqoop.accumulo.AccumuloUtil;
import org.apache.sqoop.mapreduce.AccumuloImportJob;
import org.apache.sqoop.mapreduce.HBaseBulkImportJob;
import org.apache.sqoop.mapreduce.JdbcCallExportJob;
import org.apache.sqoop.util.LoggingUtils;
import org.apache.sqoop.util.SqlTypeMap;
import org.apache.sqoop.SqoopOptions;
import org.apache.sqoop.hbase.HBaseUtil;
import org.apache.sqoop.mapreduce.DataDrivenImportJob;
import org.apache.sqoop.mapreduce.HBaseImportJob;
import org.apache.sqoop.mapreduce.ImportJobBase;
import org.apache.sqoop.mapreduce.JdbcExportJob;
import org.apache.sqoop.mapreduce.JdbcUpdateExportJob;
import org.apache.sqoop.mapreduce.db.DataDrivenDBInputFormat;
import org.apache.sqoop.util.ExportException;
import org.apache.sqoop.util.ImportException;
import org.apache.sqoop.util.ResultSetPrinter;
/**
* ConnManager implementation for generic SQL-compliant database.
* This is an abstract class; it requires a database-specific
* ConnManager implementation to actually create the connection.
*/
public abstract class SqlManager
extends org.apache.sqoop.manager.ConnManager {
public static final Log LOG = LogFactory.getLog(SqlManager.class.getName());
/** Substring that must appear in free-form queries submitted by users.
* This is the string '$CONDITIONS'.
*/
public static final String SUBSTITUTE_TOKEN =
DataDrivenDBInputFormat.SUBSTITUTE_TOKEN;
protected static final int DEFAULT_FETCH_SIZE = 1000;
private Statement lastStatement;
/**
* Constructs the SqlManager.
* @param opts the SqoopOptions describing the user's requested action.
*/
public SqlManager(final SqoopOptions opts) {
this.options = opts;
initOptionDefaults();
}
/**
* Sets default values for values that were not provided by the user.
* Only options with database-specific defaults should be configured here.
*/
protected void initOptionDefaults() {
if (options.getFetchSize() == null) {
LOG.info("Using default fetchSize of " + DEFAULT_FETCH_SIZE);
options.setFetchSize(DEFAULT_FETCH_SIZE);
}
}
/**
* @return the SQL query to use in getColumnNames() in case this logic must
* be tuned per-database, but the main extraction loop is still inheritable.
*/
protected String getColNamesQuery(String tableName) {
// adding where clause to prevent loading a big table
return "SELECT t.* FROM " + escapeTableName(tableName) + " AS t WHERE 1=0";
}
@Override
/** {@inheritDoc} */
public String[] getColumnNames(String tableName) {
String stmt = getColNamesQuery(tableName);
return filterSpecifiedColumnNames(getColumnNamesForRawQuery(stmt));
}
/**
* Utilize the --columns option, if specified.
* @param columns
* @return the subset of columns which were specified by --columns option.
*/
protected String[] filterSpecifiedColumnNames(String[] columns) {
if (options.getColumns() == null) {
return columns;
}
List<String> colNames = new ArrayList<String>();
for (String col : columns) {
String userColName = options.getColumnNameCaseInsensitive(col);
if (userColName != null) {
colNames.add(userColName);
}
}
return colNames.toArray(new String[colNames.size()]);
}
@Override
/** {@inheritDoc} */
public String [] getColumnNamesForQuery(String query) {
String rawQuery = query.replace(SUBSTITUTE_TOKEN, " (1 = 0) ");
return getColumnNamesForRawQuery(rawQuery);
}
/**
* Get column names for a query statement that we do not modify further.
*/
public String[] getColumnNamesForRawQuery(String stmt) {
ResultSet results;
try {
results = execute(stmt);
} catch (SQLException sqlE) {
LoggingUtils.logAll(LOG, "Error executing statement: " + sqlE.toString(),
sqlE);
release();
return null;
}
try {
int cols = results.getMetaData().getColumnCount();
ArrayList<String> columns = new ArrayList<String>();
ResultSetMetaData metadata = results.getMetaData();
for (int i = 1; i < cols + 1; i++) {
String colName = metadata.getColumnLabel(i);
if (colName == null || colName.equals("")) {
colName = metadata.getColumnName(i);
if (null == colName) {
colName = "_RESULT_" + i;
}
}
columns.add(colName);
LOG.debug("Found column " + colName);
}
return columns.toArray(new String[0]);
} catch (SQLException sqlException) {
LoggingUtils.logAll(LOG, "Error reading from database: "
+ sqlException.toString(), sqlException);
return null;
} finally {
try {
results.close();
getConnection().commit();
} catch (SQLException sqlE) {
LoggingUtils.logAll(LOG, "SQLException closing ResultSet: "
+ sqlE.toString(), sqlE);
}
release();
}
}
@Override
public String[] getColumnNamesForProcedure(String procedureName) {
List<String> ret = new ArrayList<String>();
try {
DatabaseMetaData metaData = this.getConnection().getMetaData();
ResultSet results = metaData.getProcedureColumns(null, null,
procedureName, null);
if (null == results) {
return null;
}
try {
while (results.next()) {
if (results.getInt("COLUMN_TYPE")
!= DatabaseMetaData.procedureColumnReturn) {
int index = results.getInt("ORDINAL_POSITION") - 1;
if (index < 0) {
continue; // actually the return type
}
for(int i = ret.size(); i < index; ++i) {
ret.add(null);
}
String name = results.getString("COLUMN_NAME");
if (index == ret.size()) {
ret.add(name);
} else {
ret.set(index, name);
}
}
}
LOG.debug("getColumnsNamesForProcedure returns "
+ StringUtils.join(ret, ","));
return ret.toArray(new String[ret.size()]);
} finally {
results.close();
getConnection().commit();
}
} catch (SQLException e) {
LoggingUtils.logAll(LOG, "Error reading procedure metadata: ", e);
throw new RuntimeException("Can't fetch column names for procedure.", e);
}
}
/**
* @return the SQL query to use in getColumnTypes() in case this logic must
* be tuned per-database, but the main extraction loop is still inheritable.
*/
protected String getColTypesQuery(String tableName) {
return getColNamesQuery(tableName);
}
@Override
public Map<String, Integer> getColumnTypes(String tableName) {
String stmt = getColTypesQuery(tableName);
return getColumnTypesForRawQuery(stmt);
}
@Override
public Map<String, Integer> getColumnTypesForQuery(String query) {
// Manipulate the query to return immediately, with zero rows.
String rawQuery = query.replace(SUBSTITUTE_TOKEN, " (1 = 0) ");
return getColumnTypesForRawQuery(rawQuery);
}
/**
* Get column types for a query statement that we do not modify further.
*/
protected Map<String, Integer> getColumnTypesForRawQuery(String stmt) {
Map<String, List<Integer>> colInfo = getColumnInfoForRawQuery(stmt);
if (colInfo == null) {
return null;
}
Map<String, Integer> colTypes = new SqlTypeMap<String, Integer>();
for (String s : colInfo.keySet()) {
List<Integer> info = colInfo.get(s);
colTypes.put(s, info.get(0));
}
return colTypes;
}
@Override
public Map<String, List<Integer>> getColumnInfo(String tableName) {
String stmt = getColNamesQuery(tableName);
return getColumnInfoForRawQuery(stmt);
}
@Override
public Map<String, List<Integer>> getColumnInfoForQuery(String query) {
// Manipulate the query to return immediately, with zero rows.
String rawQuery = query.replace(SUBSTITUTE_TOKEN, " (1 = 0) ");
return getColumnInfoForRawQuery(rawQuery);
}
protected Map<String, List<Integer>> getColumnInfoForRawQuery(String stmt) {
ResultSet results;
LOG.debug("Execute getColumnInfoRawQuery : " + stmt);
try {
results = execute(stmt);
} catch (SQLException sqlE) {
LoggingUtils.logAll(LOG, "Error executing statement: " + sqlE.toString(),
sqlE);
release();
return null;
}
try {
Map<String, List<Integer>> colInfo =
new SqlTypeMap<String, List<Integer>>();
int cols = results.getMetaData().getColumnCount();
ResultSetMetaData metadata = results.getMetaData();
for (int i = 1; i < cols + 1; i++) {
int typeId = metadata.getColumnType(i);
int precision = metadata.getPrecision(i);
int scale = metadata.getScale(i);
// If we have an unsigned int we need to make extra room by
// plopping it into a bigint
if (typeId == Types.INTEGER && !metadata.isSigned(i)){
typeId = Types.BIGINT;
}
String colName = metadata.getColumnLabel(i);
if (colName == null || colName.equals("")) {
colName = metadata.getColumnName(i);
}
List<Integer> info = new ArrayList<Integer>(3);
info.add(Integer.valueOf(typeId));
info.add(precision);
info.add(scale);
colInfo.put(colName, info);
LOG.debug("Found column " + colName + " of type " + info);
}
return colInfo;
} catch (SQLException sqlException) {
LoggingUtils.logAll(LOG, "Error reading from database: "
+ sqlException.toString(), sqlException);
return null;
} finally {
try {
results.close();
getConnection().commit();
} catch (SQLException sqlE) {
LoggingUtils.logAll(LOG,
"SQLException closing ResultSet: " + sqlE.toString(), sqlE);
}
release();
}
}
@Override
public Map<String, String> getColumnTypeNamesForTable(String tableName) {
String stmt = getColTypesQuery(tableName);
return getColumnTypeNamesForRawQuery(stmt);
}
@Override
public Map<String, String> getColumnTypeNamesForQuery(String query) {
// Manipulate the query to return immediately, with zero rows.
String rawQuery = query.replace(SUBSTITUTE_TOKEN, " (1 = 0) ");
return getColumnTypeNamesForRawQuery(rawQuery);
}
protected Map<String, String> getColumnTypeNamesForRawQuery(String stmt) {
ResultSet results;
try {
results = execute(stmt);
} catch (SQLException sqlE) {
LoggingUtils.logAll(LOG, "Error executing statement: " + sqlE.toString(),
sqlE);
release();
return null;
}
try {
Map<String, String> colTypeNames = new HashMap<String, String>();
int cols = results.getMetaData().getColumnCount();
ResultSetMetaData metadata = results.getMetaData();
for (int i = 1; i < cols + 1; i++) {
String colTypeName = metadata.getColumnTypeName(i);
String colName = metadata.getColumnLabel(i);
if (colName == null || colName.equals("")) {
colName = metadata.getColumnName(i);
}
colTypeNames.put(colName, colTypeName);
LOG.debug("Found column " + colName + " of type " + colTypeName);
}
return colTypeNames;
} catch (SQLException sqlException) {
LoggingUtils.logAll(LOG, "Error reading from database: "
+ sqlException.toString(), sqlException);
return null;
} finally {
try {
results.close();
getConnection().commit();
} catch (SQLException sqlE) {
LoggingUtils.logAll(LOG, "SQLException closing ResultSet: "
+ sqlE.toString(), sqlE);
}
release();
}
}
@Override
public ResultSet readTable(String tableName, String[] columns)
throws SQLException {
if (columns == null) {
columns = getColumnNames(tableName);
}
StringBuilder sb = new StringBuilder();
sb.append("SELECT ");
boolean first = true;
for (String col : columns) {
if (!first) {
sb.append(", ");
}
sb.append(escapeColName(col));
first = false;
}
sb.append(" FROM ");
sb.append(escapeTableName(tableName));
sb.append(" AS "); // needed for hsqldb; doesn't hurt anyone else.
sb.append(escapeTableName(tableName));
String sqlCmd = sb.toString();
LOG.debug("Reading table with command: " + sqlCmd);
return execute(sqlCmd);
}
@Override
public String[] listDatabases() {
// TODO(aaron): Implement this!
LOG.error("Generic SqlManager.listDatabases() not implemented.");
return null;
}
@Override
public Map<String, Integer> getColumnTypesForProcedure(String procedureName) {
Map<String, List<Integer>> colInfo =
getColumnInfoForProcedure(procedureName);
if (colInfo == null) {
return null;
}
Map<String, Integer> colTypes = new SqlTypeMap<String, Integer>();
for (String s : colInfo.keySet()) {
List<Integer> info = colInfo.get(s);
colTypes.put(s, info.get(0));
}
return colTypes;
}
@Override
public Map<String, List<Integer>>
getColumnInfoForProcedure(String procedureName) {
Map<String, List<Integer>> ret = new TreeMap<String, List<Integer>>();
try {
DatabaseMetaData metaData = this.getConnection().getMetaData();
ResultSet results = metaData.getProcedureColumns(null, null,
procedureName, null);
if (null == results) {
return null;
}
try {
while (results.next()) {
if (results.getInt("COLUMN_TYPE")
!= DatabaseMetaData.procedureColumnReturn
&& results.getInt("ORDINAL_POSITION") > 0) {
// we don't care if we get several rows for the
// same ORDINAL_POSITION (e.g. like H2 gives us)
// as we'll just overwrite the entry in the map:
List<Integer> info = new ArrayList<Integer>(3);
info.add(results.getInt("DATA_TYPE"));
info.add(results.getInt("PRECISION"));
info.add(results.getInt("SCALE"));
ret.put(results.getString("COLUMN_NAME"), info);
}
}
LOG.debug("Columns returned = " + StringUtils.join(ret.keySet(), ","));
LOG.debug("Types returned = " + StringUtils.join(ret.values(), ","));
return ret.isEmpty() ? null : ret;
} finally {
results.close();
getConnection().commit();
}
} catch (SQLException sqlException) {
LoggingUtils.logAll(LOG, "Error reading primary key metadata: "
+ sqlException.toString(), sqlException);
return null;
}
}
@Override
public Map<String, String>
getColumnTypeNamesForProcedure(String procedureName) {
Map<String, String> ret = new TreeMap<String, String>();
try {
DatabaseMetaData metaData = this.getConnection().getMetaData();
ResultSet results = metaData.getProcedureColumns(null, null,
procedureName, null);
if (null == results) {
return null;
}
try {
while (results.next()) {
if (results.getInt("COLUMN_TYPE")
!= DatabaseMetaData.procedureColumnReturn
&& results.getInt("ORDINAL_POSITION") > 0) {
// we don't care if we get several rows for the
// same ORDINAL_POSITION (e.g. like H2 gives us)
// as we'll just overwrite the entry in the map:
ret.put(
results.getString("COLUMN_NAME"),
results.getString("TYPE_NAME"));
}
}
LOG.debug("Columns returned = " + StringUtils.join(ret.keySet(), ","));
LOG.debug(
"Type names returned = " + StringUtils.join(ret.values(), ","));
return ret.isEmpty() ? null : ret;
} finally {
results.close();
getConnection().commit();
}
} catch (SQLException sqlException) {
LoggingUtils.logAll(LOG, "Error reading primary key metadata: "
+ sqlException.toString(), sqlException);
return null;
}
}
@Override
public String[] listTables() {
ResultSet results = null;
String [] tableTypes = {"TABLE"};
try {
try {
DatabaseMetaData metaData = this.getConnection().getMetaData();
results = metaData.getTables(null, null, null, tableTypes);
} catch (SQLException sqlException) {
LoggingUtils.logAll(LOG, "Error reading database metadata: "
+ sqlException.toString(), sqlException);
return null;
}
if (null == results) {
return null;
}
try {
ArrayList<String> tables = new ArrayList<String>();
while (results.next()) {
String tableName = results.getString("TABLE_NAME");
tables.add(tableName);
}
return tables.toArray(new String[0]);
} catch (SQLException sqlException) {
LoggingUtils.logAll(LOG, "Error reading from database: "
+ sqlException.toString(), sqlException);
return null;
}
} finally {
if (null != results) {
try {
results.close();
getConnection().commit();
} catch (SQLException sqlE) {
LoggingUtils.logAll(LOG, "Exception closing ResultSet: "
+ sqlE.toString(), sqlE);
}
}
}
}
@Override
public String getPrimaryKey(String tableName) {
try {
DatabaseMetaData metaData = this.getConnection().getMetaData();
ResultSet results = metaData.getPrimaryKeys(null, null, tableName);
if (null == results) {
return null;
}
try {
if (results.next()) {
return results.getString("COLUMN_NAME");
} else {
return null;
}
} finally {
results.close();
getConnection().commit();
}
} catch (SQLException sqlException) {
LoggingUtils.logAll(LOG, "Error reading primary key metadata: "
+ sqlException.toString(), sqlException);
return null;
}
}
/**
* Retrieve the actual connection from the outer ConnManager.
*/
public abstract Connection getConnection() throws SQLException;
/**
* Determine what column to use to split the table.
* @param opts the SqoopOptions controlling this import.
* @param tableName the table to import.
* @return the splitting column, if one is set or inferrable, or null
* otherwise.
*/
protected String getSplitColumn(SqoopOptions opts, String tableName) {
String splitCol = opts.getSplitByCol();
if (null == splitCol && null != tableName) {
// If the user didn't specify a splitting column, try to infer one.
splitCol = getPrimaryKey(tableName);
}
return splitCol;
}
/**
* Offers the ConnManager an opportunity to validate that the
* options specified in the ImportJobContext are valid.
* @throws ImportException if the import is misconfigured.
*/
protected void checkTableImportOptions(
org.apache.sqoop.manager.ImportJobContext context)
throws IOException, ImportException {
String tableName = context.getTableName();
SqoopOptions opts = context.getOptions();
// Default implementation: check that the split column is set
// correctly.
String splitCol = getSplitColumn(opts, tableName);
if (null == splitCol && opts.getNumMappers() > 1) {
if (!opts.getAutoResetToOneMapper()) {
// Can't infer a primary key.
throw new ImportException("No primary key could be found for table "
+ tableName + ". Please specify one with --split-by or perform "
+ "a sequential import with '-m 1'.");
} else {
LOG.warn("Split by column not provided or can't be inferred. Resetting to one mapper");
opts.setNumMappers(1);
}
}
}
/**
* Default implementation of importTable() is to launch a MapReduce job
* via DataDrivenImportJob to read the table with DataDrivenDBInputFormat.
*/
public void importTable(org.apache.sqoop.manager.ImportJobContext context)
throws IOException, ImportException {
String tableName = context.getTableName();
String jarFile = context.getJarFile();
SqoopOptions opts = context.getOptions();
context.setConnManager(this);
ImportJobBase importer;
if (opts.getHBaseTable() != null) {
// Import to HBase.
if (!HBaseUtil.isHBaseJarPresent()) {
throw new ImportException("HBase jars are not present in "
+ "classpath, cannot import to HBase!");
}
if (!opts.isBulkLoadEnabled()){
importer = new HBaseImportJob(opts, context);
} else {
importer = new HBaseBulkImportJob(opts, context);
}
} else if (opts.getAccumuloTable() != null) {
// Import to Accumulo.
if (!AccumuloUtil.isAccumuloJarPresent()) {
throw new ImportException("Accumulo jars are not present in "
+ "classpath, cannot import to Accumulo!");
}
importer = new AccumuloImportJob(opts, context);
} else {
// Import to HDFS.
importer = new DataDrivenImportJob(opts, context.getInputFormat(),
context, getParquetJobConfigurator().createParquetImportJobConfigurator());
}
checkTableImportOptions(context);
String splitCol = getSplitColumn(opts, tableName);
importer.runImport(tableName, jarFile, splitCol, opts.getConf());
}
/**
* Default implementation of importQuery() is to launch a MapReduce job
* via DataDrivenImportJob to read the table with DataDrivenDBInputFormat,
* using its free-form query importer.
*/
public void importQuery(org.apache.sqoop.manager.ImportJobContext context)
throws IOException, ImportException {
String jarFile = context.getJarFile();
SqoopOptions opts = context.getOptions();
context.setConnManager(this);
ImportJobBase importer;
if (opts.getHBaseTable() != null) {
// Import to HBase.
if (!HBaseUtil.isHBaseJarPresent()) {
throw new ImportException("HBase jars are not present in classpath,"
+ " cannot import to HBase!");
}
if (!opts.isBulkLoadEnabled()){
importer = new HBaseImportJob(opts, context);
} else {
importer = new HBaseBulkImportJob(opts, context);
}
} else if (opts.getAccumuloTable() != null) {
// Import to Accumulo.
if (!AccumuloUtil.isAccumuloJarPresent()) {
throw new ImportException("Accumulo jars are not present in classpath,"
+ " cannot import to Accumulo!");
}
importer = new AccumuloImportJob(opts, context);
} else {
// Import to HDFS.
importer = new DataDrivenImportJob(opts, context.getInputFormat(),
context, getParquetJobConfigurator().createParquetImportJobConfigurator());
}
String splitCol = getSplitColumn(opts, null);
if (splitCol == null) {
String boundaryQuery = opts.getBoundaryQuery();
if (opts.getNumMappers() > 1) {
// Can't infer a primary key.
throw new ImportException("A split-by column must be specified for "
+ "parallel free-form query imports. Please specify one with "
+ "--split-by or perform a sequential import with '-m 1'.");
} else if (boundaryQuery != null && !boundaryQuery.isEmpty()) {
// Query import with boundary query and no split column specified
throw new ImportException("Using a boundary query for a query based "
+ "import requires specifying the split by column as well. Please "
+ "specify a column name using --split-by and try again.");
}
}
importer.runImport(null, jarFile, splitCol, opts.getConf());
}
/**
* Executes an arbitrary SQL statement.
* @param stmt The SQL statement to execute
* @param fetchSize Overrides default or parameterized fetch size
* @return A ResultSet encapsulating the results or null on error
*/
protected ResultSet execute(String stmt, Integer fetchSize, Object... args)
throws SQLException {
// Release any previously-open statement.
release();
PreparedStatement statement = null;
statement = this.getConnection().prepareStatement(stmt,
ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY);
if (fetchSize != null) {
LOG.debug("Using fetchSize for next query: " + fetchSize);
statement.setFetchSize(fetchSize);
}
this.lastStatement = statement;
if (null != args) {
for (int i = 0; i < args.length; i++) {
statement.setObject(i + 1, args[i]);
}
}
LOG.info("Executing SQL statement: " + stmt);
return statement.executeQuery();
}
/**
* Executes an arbitrary SQL Statement.
* @param stmt The SQL statement to execute
* @return A ResultSet encapsulating the results or null on error
*/
protected ResultSet execute(String stmt, Object... args) throws SQLException {
return execute(stmt, options.getFetchSize(), args);
}
public void close() throws SQLException {
release();
}
/**
* Prints the contents of a ResultSet to the specified PrintWriter.
* The ResultSet is closed at the end of this method.
* @param results the ResultSet to print.
* @param pw the location to print the data to.
*/
protected void formatAndPrintResultSet(ResultSet results, PrintWriter pw) {
try {
try {
int cols = results.getMetaData().getColumnCount();
pw.println("Got " + cols + " columns back");
if (cols > 0) {
ResultSetMetaData rsmd = results.getMetaData();
String schema = rsmd.getSchemaName(1);
String table = rsmd.getTableName(1);
if (null != schema) {
pw.println("Schema: " + schema);
}
if (null != table) {
pw.println("Table: " + table);
}
}
} catch (SQLException sqlE) {
LoggingUtils.logAll(LOG, "SQLException reading result metadata: "
+ sqlE.toString(), sqlE);
}
try {
new ResultSetPrinter().printResultSet(pw, results);
} catch (IOException ioe) {
LOG.error("IOException writing results: " + ioe.toString());
return;
}
} finally {
try {
results.close();
getConnection().commit();
} catch (SQLException sqlE) {
LoggingUtils.logAll(LOG, "SQLException closing ResultSet: "
+ sqlE.toString(), sqlE);
}
release();
}
}
/**
* Poor man's SQL query interface; used for debugging.
* @param s the SQL statement to execute.
*/
public void execAndPrint(String s) {
ResultSet results = null;
try {
results = execute(s);
} catch (SQLException sqlE) {
LoggingUtils.logAll(LOG, "Error executing statement: ", sqlE);
release();
return;
}
PrintWriter pw = new PrintWriter(System.out, true);
try {
formatAndPrintResultSet(results, pw);
} finally {
pw.close();
}
}
/**
* Create a connection to the database; usually used only from within
* getConnection(), which enforces a singleton guarantee around the
* Connection object.
*/
protected Connection makeConnection() throws SQLException {
Connection connection;
String driverClass = getDriverClass();
try {
Class.forName(driverClass);
} catch (ClassNotFoundException cnfe) {
throw new RuntimeException("Could not load db driver class: "
+ driverClass);
}
String username = options.getUsername();
String password = options.getPassword();
String connectString = options.getConnectString();
Properties connectionParams = options.getConnectionParams();
if (connectionParams != null && connectionParams.size() > 0) {
LOG.debug("User specified connection params. "
+ "Using properties specific API for making connection.");
Properties props = new Properties();
if (username != null) {
props.put("user", username);
}
if (password != null) {
props.put("password", password);
}
props.putAll(connectionParams);
connection = DriverManager.getConnection(connectString, props);
} else {
LOG.debug("No connection paramenters specified. "
+ "Using regular API for making connection.");
if (username == null) {
connection = DriverManager.getConnection(connectString);
} else {
connection = DriverManager.getConnection(
connectString, username, password);
}
}
// We only use this for metadata queries. Loosest semantics are okay.
connection.setTransactionIsolation(getMetadataIsolationLevel());
connection.setAutoCommit(false);
return connection;
}
/**
* @return the transaction isolation level to use for metadata queries
* (queries executed by the ConnManager itself).
*/
protected int getMetadataIsolationLevel() {
return options.getMetadataTransactionIsolationLevel();
}
/**
* Export data stored in HDFS into a table in a database.
*/
public void exportTable(org.apache.sqoop.manager.ExportJobContext context)
throws IOException, ExportException {
context.setConnManager(this);
JdbcExportJob exportJob = new JdbcExportJob(context, getParquetJobConfigurator().createParquetExportJobConfigurator());
exportJob.runExport();
}
@Override
public void callTable(org.apache.sqoop.manager.ExportJobContext context)
throws IOException,
ExportException {
context.setConnManager(this);
JdbcCallExportJob exportJob = new JdbcCallExportJob(context, getParquetJobConfigurator().createParquetExportJobConfigurator());
exportJob.runExport();
}
public void release() {
if (null != this.lastStatement) {
try {
this.lastStatement.close();
} catch (SQLException e) {
LoggingUtils.logAll(LOG, "Exception closing executed Statement: "
+ e, e);
}
this.lastStatement = null;
}
}
@Override
/**
* {@inheritDoc}
*/
public void updateTable(
org.apache.sqoop.manager.ExportJobContext context)
throws IOException, ExportException {
context.setConnManager(this);
JdbcUpdateExportJob exportJob = new JdbcUpdateExportJob(context, getParquetJobConfigurator().createParquetExportJobConfigurator());
exportJob.runExport();
}
/**
* @return a SQL query to retrieve the current timestamp from the db.
*/
protected String getCurTimestampQuery() {
return "SELECT CURRENT_TIMESTAMP()";
}
@Override
/**
* {@inheritDoc}
*/
public Timestamp getCurrentDbTimestamp() {
release(); // Release any previous ResultSet.
Statement s = null;
ResultSet rs = null;
try {
Connection c = getConnection();
s = c.createStatement();
rs = s.executeQuery(getCurTimestampQuery());
if (rs == null || !rs.next()) {
return null; // empty ResultSet.
}
return rs.getTimestamp(1);
} catch (SQLException sqlE) {
LoggingUtils.logAll(LOG, "SQL exception accessing current timestamp: "
+ sqlE, sqlE);
return null;
} finally {
try {
if (null != rs) {
rs.close();
}
} catch (SQLException sqlE) {
LoggingUtils.logAll(LOG, "SQL Exception closing resultset: "
+ sqlE, sqlE);
}
try {
if (null != s) {
s.close();
}
} catch (SQLException sqlE) {
LoggingUtils.logAll(LOG, "SQL Exception closing statement: "
+ sqlE, sqlE);
}
}
}
@Override
public long getTableRowCount(String tableName) throws SQLException {
release(); // Release any previous ResultSet
// Escape used table name
tableName = escapeTableName(tableName);
long result = -1;
String countQuery = "SELECT COUNT(*) FROM " + tableName;
Statement stmt = null;
ResultSet rset = null;
try {
Connection conn = getConnection();
stmt = conn.createStatement();
rset = stmt.executeQuery(countQuery);
rset.next();
result = rset.getLong(1);
} catch (SQLException ex) {
LoggingUtils.logAll(LOG, "Unable to query count * for table "
+ tableName, ex);
throw ex;
} finally {
if (rset != null) {
try {
rset.close();
} catch (SQLException ex) {
LoggingUtils.logAll(LOG, "Unable to close result set", ex);
}
}
if (stmt != null) {
try {
stmt.close();
} catch (SQLException ex) {
LoggingUtils.logAll(LOG, "Unable to close statement", ex);
}
}
}
return result;
}
@Override
public void deleteAllRecords(String tableName) throws SQLException {
release(); // Release any previous ResultSet
// Escape table name
tableName = escapeTableName(tableName);
String deleteQuery = "DELETE FROM " + tableName;
Statement stmt = null;
try {
Connection conn = getConnection();
stmt = conn.createStatement();
int updateCount = stmt.executeUpdate(deleteQuery);
conn.commit();
LOG.info("Deleted " + updateCount + " records from " + tableName);
} catch (SQLException ex) {
LoggingUtils.logAll(LOG, "Unable to execute delete query: "
+ deleteQuery, ex);
throw ex;
} finally {
if (stmt != null) {
try {
stmt.close();
} catch (SQLException ex) {
LoggingUtils.logAll(LOG, "Unable to close statement", ex);
}
}
}
}
@Override
public void migrateData(String fromTable, String toTable)
throws SQLException {
release(); // Release any previous ResultSet
// Escape all table names
fromTable = escapeTableName(fromTable);
toTable = escapeTableName(toTable);
String updateQuery = "INSERT INTO " + toTable
+ " ( SELECT * FROM " + fromTable + " )";
String deleteQuery = "DELETE FROM " + fromTable;
Statement stmt = null;
try {
Connection conn = getConnection();
stmt = conn.createStatement();
// Insert data from the fromTable to the toTable
int updateCount = stmt.executeUpdate(updateQuery);
LOG.info("Migrated " + updateCount + " records from " + fromTable
+ " to " + toTable);
// Delete the records from the fromTable
int deleteCount = stmt.executeUpdate(deleteQuery);
// If the counts do not match, fail the transaction
if (updateCount != deleteCount) {
conn.rollback();
throw new RuntimeException("Inconsistent record counts");
}
conn.commit();
} catch (SQLException ex) {
LoggingUtils.logAll(LOG, "Unable to migrate data from "
+ fromTable + " to " + toTable, ex);
throw ex;
} finally {
if (stmt != null) {
try {
stmt.close();
} catch (SQLException ex) {
LoggingUtils.logAll(LOG, "Unable to close statement", ex);
}
}
}
}
public String getInputBoundsQuery(String splitByCol, String sanitizedQuery) {
return options.getBoundaryQuery();
}
}
|
google/guava | 36,456 | android/guava-tests/test/com/google/common/net/InetAddressesTest.java | /*
* Copyright (C) 2008 The Guava Authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.common.net;
import static com.google.common.truth.Truth.assertThat;
import static org.junit.Assert.assertThrows;
import com.google.common.collect.ImmutableSet;
import com.google.common.testing.NullPointerTester;
import java.math.BigInteger;
import java.net.Inet4Address;
import java.net.Inet6Address;
import java.net.InetAddress;
import java.net.NetworkInterface;
import java.net.SocketException;
import java.net.UnknownHostException;
import java.util.Enumeration;
import junit.framework.TestCase;
import org.jspecify.annotations.NullUnmarked;
/**
* Tests for {@link InetAddresses}.
*
* @author Erik Kline
*/
@NullUnmarked
public class InetAddressesTest extends TestCase {
public void testNulls() {
NullPointerTester tester = new NullPointerTester();
tester.testAllPublicStaticMethods(InetAddresses.class);
}
public void testForStringBogusInput() {
ImmutableSet<String> bogusInputs =
ImmutableSet.of(
"",
"016.016.016.016",
"016.016.016",
"016.016",
"016",
"000.000.000.000",
"000",
"0x0a.0x0a.0x0a.0x0a",
"0x0a.0x0a.0x0a",
"0x0a.0x0a",
"0x0a",
"42.42.42.42.42",
"42.42.42",
"42.42",
"42",
"42..42.42",
"42..42.42.42",
"42.42.42.42.",
"42.42.42.42...",
".42.42.42.42",
".42.42.42",
"...42.42.42.42",
"42.42.42.-0",
"42.42.42.+0",
".",
"...",
"bogus",
"bogus.com",
"192.168.0.1.com",
"12345.67899.-54321.-98765",
"257.0.0.0",
"42.42.42.-42",
"42.42.42.ab",
"3ffe::1.net",
"3ffe::1::1",
"1::2::3::4:5",
"::7:6:5:4:3:2:", // should end with ":0"
":6:5:4:3:2:1::", // should begin with "0:"
"2001::db:::1",
"FEDC:9878",
"+1.+2.+3.4",
"1.2.3.4e0",
"6:5:4:3:2:1:0", // too few parts
"::7:6:5:4:3:2:1:0", // too many parts
"7:6:5:4:3:2:1:0::", // too many parts
"9:8:7:6:5:4:3::2:1", // too many parts
"0:1:2:3::4:5:6:7", // :: must remove at least one 0.
"3ffe:0:0:0:0:0:0:0:1", // too many parts (9 instead of 8)
"3ffe::10000", // hextet exceeds 16 bits
"3ffe::goog",
"3ffe::-0",
"3ffe::+0",
"3ffe::-1",
":",
":::",
"::1.2.3",
"::1.2.3.4.5",
"::1.2.3.4:",
"1.2.3.4::",
"2001:db8::1:",
":2001:db8::1",
":1:2:3:4:5:6:7",
"1:2:3:4:5:6:7:",
":1:2:3:4:5:6:");
for (String bogusInput : bogusInputs) {
assertThrows(
"IllegalArgumentException expected for '" + bogusInput + "'",
IllegalArgumentException.class,
() -> InetAddresses.forString(bogusInput));
assertFalse(InetAddresses.isInetAddress(bogusInput));
}
}
public void test3ff31() {
assertThrows(IllegalArgumentException.class, () -> InetAddresses.forString("3ffe:::1"));
assertFalse(InetAddresses.isInetAddress("016.016.016.016"));
}
public void testForStringIPv4Input() throws UnknownHostException {
String ipStr = "192.168.0.1";
// Shouldn't hit DNS, because it's an IP string literal.
InetAddress ipv4Addr = InetAddress.getByName(ipStr);
assertEquals(ipv4Addr, InetAddresses.forString(ipStr));
assertTrue(InetAddresses.isInetAddress(ipStr));
}
public void testForStringIPv4NonAsciiInput() throws UnknownHostException {
String ipStr = "૧૯૨.૧૬૮.૦.૧"; // 192.168.0.1 in Gujarati digits
// Shouldn't hit DNS, because it's an IP string literal.
InetAddress ipv4Addr;
try {
ipv4Addr = InetAddress.getByName(ipStr);
} catch (UnknownHostException e) {
// OK: this is probably Android, which is stricter.
return;
}
assertEquals(ipv4Addr, InetAddresses.forString(ipStr));
assertTrue(InetAddresses.isInetAddress(ipStr));
}
public void testForStringIPv6Input() throws UnknownHostException {
String ipStr = "3ffe::1";
// Shouldn't hit DNS, because it's an IP string literal.
InetAddress ipv6Addr = InetAddress.getByName(ipStr);
assertEquals(ipv6Addr, InetAddresses.forString(ipStr));
assertTrue(InetAddresses.isInetAddress(ipStr));
}
public void testForStringIPv6NonAsciiInput() throws UnknownHostException {
String ipStr = "૩ffe::૧"; // 3ffe::1 with Gujarati digits for 3 and 1
// Shouldn't hit DNS, because it's an IP string literal.
InetAddress ipv6Addr;
try {
ipv6Addr = InetAddress.getByName(ipStr);
} catch (UnknownHostException e) {
// OK: this is probably Android, which is stricter.
return;
}
assertEquals(ipv6Addr, InetAddresses.forString(ipStr));
assertTrue(InetAddresses.isInetAddress(ipStr));
}
public void testForStringIPv6EightColons() throws UnknownHostException {
ImmutableSet<String> eightColons =
ImmutableSet.of("::7:6:5:4:3:2:1", "::7:6:5:4:3:2:0", "7:6:5:4:3:2:1::", "0:6:5:4:3:2:1::");
for (String ipString : eightColons) {
// Shouldn't hit DNS, because it's an IP string literal.
InetAddress ipv6Addr = InetAddress.getByName(ipString);
assertEquals(ipv6Addr, InetAddresses.forString(ipString));
assertTrue(InetAddresses.isInetAddress(ipString));
}
}
public void testConvertDottedQuadToHex() throws UnknownHostException {
ImmutableSet<String> ipStrings =
ImmutableSet.of("7::0.128.0.127", "7::0.128.0.128", "7::128.128.0.127", "7::0.128.128.127");
for (String ipString : ipStrings) {
// Shouldn't hit DNS, because it's an IP string literal.
InetAddress ipv6Addr = InetAddress.getByName(ipString);
assertEquals(ipv6Addr, InetAddresses.forString(ipString));
assertTrue(InetAddresses.isInetAddress(ipString));
}
}
public void testIPv4AddressWithScopeId() throws SocketException {
ImmutableSet<String> ipStrings = ImmutableSet.of("1.2.3.4", "192.168.0.1");
for (String ipString : ipStrings) {
for (String scopeId : getMachineScopesAndInterfaces()) {
String withScopeId = ipString + "%" + scopeId;
assertFalse(
"InetAddresses.isInetAddress(" + withScopeId + ") should be false but was true",
InetAddresses.isInetAddress(withScopeId));
}
}
}
public void testDottedQuadAddressWithScopeId() throws SocketException {
ImmutableSet<String> ipStrings =
ImmutableSet.of("7::0.128.0.127", "7::0.128.0.128", "7::128.128.0.127", "7::0.128.128.127");
for (String ipString : ipStrings) {
for (String scopeId : getMachineScopesAndInterfaces()) {
String withScopeId = ipString + "%" + scopeId;
assertFalse(
"InetAddresses.isInetAddress(" + withScopeId + ") should be false but was true",
InetAddresses.isInetAddress(withScopeId));
}
}
}
public void testIPv6AddressWithScopeId() throws SocketException, UnknownHostException {
ImmutableSet<String> ipStrings =
ImmutableSet.of(
"::1",
"1180::a",
"1180::1",
"1180::2",
"1180::42",
"1180::3dd0:7f8e:57b7:34d5",
"1180::71a3:2b00:ddd3:753f",
"1180::8b2:d61e:e5c:b333",
"1180::b059:65f4:e877:c40",
"fe80::34",
"fec0::34");
boolean processedNamedInterface = false;
for (String ipString : ipStrings) {
for (String scopeId : getMachineScopesAndInterfaces()) {
String withScopeId = ipString + "%" + scopeId;
assertTrue(
"InetAddresses.isInetAddress(" + withScopeId + ") should be true but was false",
InetAddresses.isInetAddress(withScopeId));
Inet6Address parsed;
boolean isNumeric = scopeId.matches("\\d+");
try {
parsed = (Inet6Address) InetAddresses.forString(withScopeId);
} catch (IllegalArgumentException e) {
if (!isNumeric) {
// Android doesn't recognize %interface as valid
continue;
}
throw e;
}
processedNamedInterface |= !isNumeric;
assertThat(InetAddresses.toAddrString(parsed)).contains("%");
if (isNumeric) {
assertEquals(Integer.parseInt(scopeId), parsed.getScopeId());
} else {
assertEquals(scopeId, parsed.getScopedInterface().getName());
}
Inet6Address reparsed =
(Inet6Address) InetAddresses.forString(InetAddresses.toAddrString(parsed));
assertEquals(reparsed, parsed);
assertEquals(reparsed.getScopeId(), parsed.getScopeId());
}
}
assertTrue(processedNamedInterface);
}
public void testIPv6AddressWithScopeId_platformEquivalence()
throws SocketException, UnknownHostException {
ImmutableSet<String> ipStrings =
ImmutableSet.of(
"::1",
"1180::a",
"1180::1",
"1180::2",
"1180::42",
"1180::3dd0:7f8e:57b7:34d5",
"1180::71a3:2b00:ddd3:753f",
"1180::8b2:d61e:e5c:b333",
"1180::b059:65f4:e877:c40",
"fe80::34",
"fec0::34");
for (String ipString : ipStrings) {
for (String scopeId : getMachineScopesAndInterfaces()) {
String withScopeId = ipString + "%" + scopeId;
assertTrue(
"InetAddresses.isInetAddress(" + withScopeId + ") should be true but was false",
InetAddresses.isInetAddress(withScopeId));
Inet6Address parsed;
boolean isNumeric = scopeId.matches("\\d+");
try {
parsed = (Inet6Address) InetAddresses.forString(withScopeId);
} catch (IllegalArgumentException e) {
if (!isNumeric) {
// Android doesn't recognize %interface as valid
continue;
}
throw e;
}
Inet6Address platformValue;
try {
platformValue = (Inet6Address) InetAddress.getByName(withScopeId);
} catch (UnknownHostException e) {
// Android doesn't recognize %interface as valid
if (!isNumeric) {
continue;
}
throw e;
}
assertEquals(platformValue, parsed);
assertEquals(platformValue.getScopeId(), parsed.getScopeId());
}
}
}
public void testIPv6AddressWithBadScopeId() throws SocketException, UnknownHostException {
assertThrows(
IllegalArgumentException.class,
() -> InetAddresses.forString("1180::b059:65f4:e877:c40%eth9"));
}
public void testToAddrStringIPv4() {
// Don't need to test IPv4 much; it just calls getHostAddress().
assertEquals("1.2.3.4", InetAddresses.toAddrString(InetAddresses.forString("1.2.3.4")));
}
public void testToAddrStringIPv6() {
assertEquals(
"1:2:3:4:5:6:7:8", InetAddresses.toAddrString(InetAddresses.forString("1:2:3:4:5:6:7:8")));
assertEquals(
"2001:0:0:4::8", InetAddresses.toAddrString(InetAddresses.forString("2001:0:0:4:0:0:0:8")));
assertEquals(
"2001::4:5:6:7:8",
InetAddresses.toAddrString(InetAddresses.forString("2001:0:0:4:5:6:7:8")));
assertEquals(
"2001:0:3:4:5:6:7:8",
InetAddresses.toAddrString(InetAddresses.forString("2001:0:3:4:5:6:7:8")));
assertEquals(
"0:0:3::ffff", InetAddresses.toAddrString(InetAddresses.forString("0:0:3:0:0:0:0:ffff")));
assertEquals(
"::4:0:0:0:ffff",
InetAddresses.toAddrString(InetAddresses.forString("0:0:0:4:0:0:0:ffff")));
assertEquals(
"::5:0:0:ffff", InetAddresses.toAddrString(InetAddresses.forString("0:0:0:0:5:0:0:ffff")));
assertEquals(
"1::4:0:0:7:8", InetAddresses.toAddrString(InetAddresses.forString("1:0:0:4:0:0:7:8")));
assertEquals("::", InetAddresses.toAddrString(InetAddresses.forString("0:0:0:0:0:0:0:0")));
assertEquals("::1", InetAddresses.toAddrString(InetAddresses.forString("0:0:0:0:0:0:0:1")));
assertEquals(
"2001:658:22a:cafe::",
InetAddresses.toAddrString(InetAddresses.forString("2001:0658:022a:cafe::")));
assertEquals("::102:304", InetAddresses.toAddrString(InetAddresses.forString("::1.2.3.4")));
}
public void testToUriStringIPv4() {
String ipStr = "1.2.3.4";
InetAddress ip = InetAddresses.forString(ipStr);
assertEquals("1.2.3.4", InetAddresses.toUriString(ip));
}
public void testToUriStringIPv6() {
// Unfortunately the InetAddress.toString() method for IPv6 addresses
// does not collapse contiguous shorts of zeroes with the :: abbreviation.
String ipStr = "3ffe::1";
InetAddress ip = InetAddresses.forString(ipStr);
assertEquals("[3ffe::1]", InetAddresses.toUriString(ip));
}
public void testForUriStringIPv4() {
Inet4Address expected = (Inet4Address) InetAddresses.forString("192.168.1.1");
assertEquals(expected, InetAddresses.forUriString("192.168.1.1"));
}
public void testForUriStringIPv6() {
Inet6Address expected = (Inet6Address) InetAddresses.forString("3ffe:0:0:0:0:0:0:1");
assertEquals(expected, InetAddresses.forUriString("[3ffe:0:0:0:0:0:0:1]"));
}
public void testForUriStringIPv4Mapped() {
Inet4Address expected = (Inet4Address) InetAddresses.forString("192.0.2.1");
assertEquals(expected, InetAddresses.forUriString("[::ffff:192.0.2.1]"));
}
public void testIsUriInetAddress() {
assertTrue(InetAddresses.isUriInetAddress("192.168.1.1"));
assertTrue(InetAddresses.isUriInetAddress("[3ffe:0:0:0:0:0:0:1]"));
assertTrue(InetAddresses.isUriInetAddress("[::ffff:192.0.2.1]"));
assertFalse(InetAddresses.isUriInetAddress("[192.168.1.1"));
assertFalse(InetAddresses.isUriInetAddress("192.168.1.1]"));
assertFalse(InetAddresses.isUriInetAddress(""));
assertFalse(InetAddresses.isUriInetAddress("192.168.999.888"));
assertFalse(InetAddresses.isUriInetAddress("www.google.com"));
assertFalse(InetAddresses.isUriInetAddress("1:2e"));
assertFalse(InetAddresses.isUriInetAddress("[3ffe:0:0:0:0:0:0:1"));
assertFalse(InetAddresses.isUriInetAddress("3ffe:0:0:0:0:0:0:1]"));
assertFalse(InetAddresses.isUriInetAddress("3ffe:0:0:0:0:0:0:1"));
assertFalse(InetAddresses.isUriInetAddress("::ffff:192.0.2.1"));
}
public void testForUriStringBad() {
assertThrows(IllegalArgumentException.class, () -> InetAddresses.forUriString(""));
assertThrows(
IllegalArgumentException.class, () -> InetAddresses.forUriString("192.168.999.888"));
assertThrows(
IllegalArgumentException.class, () -> InetAddresses.forUriString("www.google.com"));
assertThrows(IllegalArgumentException.class, () -> InetAddresses.forUriString("[1:2e]"));
assertThrows(IllegalArgumentException.class, () -> InetAddresses.forUriString("[192.168.1.1]"));
assertThrows(IllegalArgumentException.class, () -> InetAddresses.forUriString("192.168.1.1]"));
assertThrows(IllegalArgumentException.class, () -> InetAddresses.forUriString("[192.168.1.1"));
assertThrows(
IllegalArgumentException.class, () -> InetAddresses.forUriString("[3ffe:0:0:0:0:0:0:1"));
assertThrows(
IllegalArgumentException.class, () -> InetAddresses.forUriString("3ffe:0:0:0:0:0:0:1]"));
assertThrows(
IllegalArgumentException.class, () -> InetAddresses.forUriString("3ffe:0:0:0:0:0:0:1"));
assertThrows(
IllegalArgumentException.class, () -> InetAddresses.forUriString("::ffff:192.0.2.1"));
}
public void testCompatIPv4Addresses() {
ImmutableSet<String> nonCompatAddresses = ImmutableSet.of("3ffe::1", "::", "::1");
for (String nonCompatAddress : nonCompatAddresses) {
InetAddress ip = InetAddresses.forString(nonCompatAddress);
assertFalse(InetAddresses.isCompatIPv4Address((Inet6Address) ip));
assertThrows(
"IllegalArgumentException expected for '" + nonCompatAddress + "'",
IllegalArgumentException.class,
() -> InetAddresses.getCompatIPv4Address((Inet6Address) ip));
}
ImmutableSet<String> validCompatAddresses = ImmutableSet.of("::1.2.3.4", "::102:304");
String compatStr = "1.2.3.4";
InetAddress compat = InetAddresses.forString(compatStr);
for (String validCompatAddress : validCompatAddresses) {
InetAddress ip = InetAddresses.forString(validCompatAddress);
assertTrue("checking '" + validCompatAddress + "'", ip instanceof Inet6Address);
assertTrue(
"checking '" + validCompatAddress + "'",
InetAddresses.isCompatIPv4Address((Inet6Address) ip));
assertEquals(
"checking '" + validCompatAddress + "'",
compat,
InetAddresses.getCompatIPv4Address((Inet6Address) ip));
}
}
public void testMappedIPv4Addresses() throws UnknownHostException {
/*
* Verify that it is not possible to instantiate an Inet6Address
* from an "IPv4 mapped" IPv6 address. Our String-based method can
* at least identify them, however.
*/
String mappedStr = "::ffff:192.168.0.1";
assertTrue(InetAddresses.isMappedIPv4Address(mappedStr));
InetAddress mapped = InetAddresses.forString(mappedStr);
assertThat(mapped).isNotInstanceOf(Inet6Address.class);
assertEquals(InetAddress.getByName("192.168.0.1"), mapped);
// check upper case
mappedStr = "::FFFF:192.168.0.1";
assertTrue(InetAddresses.isMappedIPv4Address(mappedStr));
mapped = InetAddresses.forString(mappedStr);
assertThat(mapped).isNotInstanceOf(Inet6Address.class);
assertEquals(InetAddress.getByName("192.168.0.1"), mapped);
mappedStr = "0:00:000:0000:0:ffff:1.2.3.4";
assertTrue(InetAddresses.isMappedIPv4Address(mappedStr));
mapped = InetAddresses.forString(mappedStr);
assertThat(mapped).isNotInstanceOf(Inet6Address.class);
assertEquals(InetAddress.getByName("1.2.3.4"), mapped);
mappedStr = "::ffff:0102:0304";
assertTrue(InetAddresses.isMappedIPv4Address(mappedStr));
mapped = InetAddresses.forString(mappedStr);
assertThat(mapped).isNotInstanceOf(Inet6Address.class);
assertEquals(InetAddress.getByName("1.2.3.4"), mapped);
assertFalse(InetAddresses.isMappedIPv4Address("::"));
assertFalse(InetAddresses.isMappedIPv4Address("::ffff"));
assertFalse(InetAddresses.isMappedIPv4Address("::ffff:0"));
assertFalse(InetAddresses.isMappedIPv4Address("::fffe:0:0"));
assertFalse(InetAddresses.isMappedIPv4Address("::1:ffff:0:0"));
assertFalse(InetAddresses.isMappedIPv4Address("foo"));
assertFalse(InetAddresses.isMappedIPv4Address("192.0.2.1"));
}
public void test6to4Addresses() {
ImmutableSet<String> non6to4Addresses = ImmutableSet.of("::1.2.3.4", "3ffe::1", "::", "::1");
for (String non6to4Address : non6to4Addresses) {
InetAddress ip = InetAddresses.forString(non6to4Address);
assertFalse(InetAddresses.is6to4Address((Inet6Address) ip));
assertThrows(
"IllegalArgumentException expected for '" + non6to4Address + "'",
IllegalArgumentException.class,
() -> InetAddresses.get6to4IPv4Address((Inet6Address) ip));
}
String valid6to4Address = "2002:0102:0304::1";
String ipv4Str = "1.2.3.4";
InetAddress ipv4 = InetAddresses.forString(ipv4Str);
InetAddress ip = InetAddresses.forString(valid6to4Address);
assertTrue(InetAddresses.is6to4Address((Inet6Address) ip));
assertEquals(ipv4, InetAddresses.get6to4IPv4Address((Inet6Address) ip));
}
public void testTeredoAddresses() {
ImmutableSet<String> nonTeredoAddresses = ImmutableSet.of("::1.2.3.4", "3ffe::1", "::", "::1");
for (String nonTeredoAddress : nonTeredoAddresses) {
InetAddress ip = InetAddresses.forString(nonTeredoAddress);
assertFalse(InetAddresses.isTeredoAddress((Inet6Address) ip));
assertThrows(
"IllegalArgumentException expected for '" + nonTeredoAddress + "'",
IllegalArgumentException.class,
() -> InetAddresses.getTeredoInfo((Inet6Address) ip));
}
String validTeredoAddress = "2001:0000:4136:e378:8000:63bf:3fff:fdd2";
String serverStr = "65.54.227.120";
String clientStr = "192.0.2.45";
int port = 40000;
int flags = 0x8000;
InetAddress ip = InetAddresses.forString(validTeredoAddress);
assertTrue(InetAddresses.isTeredoAddress((Inet6Address) ip));
InetAddresses.TeredoInfo teredo = InetAddresses.getTeredoInfo((Inet6Address) ip);
InetAddress server = InetAddresses.forString(serverStr);
assertEquals(server, teredo.getServer());
InetAddress client = InetAddresses.forString(clientStr);
assertEquals(client, teredo.getClient());
assertEquals(port, teredo.getPort());
assertEquals(flags, teredo.getFlags());
}
public void testTeredoAddress_nullServer() {
InetAddresses.TeredoInfo info = new InetAddresses.TeredoInfo(null, null, 80, 1000);
assertEquals(InetAddresses.forString("0.0.0.0"), info.getServer());
assertEquals(InetAddresses.forString("0.0.0.0"), info.getClient());
assertEquals(80, info.getPort());
assertEquals(1000, info.getFlags());
}
public void testIsatapAddresses() {
InetAddress ipv4 = InetAddresses.forString("1.2.3.4");
ImmutableSet<String> validIsatapAddresses =
ImmutableSet.of(
"2001:db8::5efe:102:304",
"2001:db8::100:5efe:102:304", // Private Multicast? Not likely.
"2001:db8::200:5efe:102:304",
"2001:db8::300:5efe:102:304" // Public Multicast? Also unlikely.
);
ImmutableSet<String> nonIsatapAddresses =
ImmutableSet.of(
"::1.2.3.4",
"3ffe::1",
"::",
"::1",
"2001:db8::0040:5efe:102:304",
"2001:db8::5ffe:102:304",
"2001:db8::5eff:102:304",
"2001:0:102:203:200:5efe:506:708" // Teredo address; not ISATAP
);
for (String validIsatapAddress : validIsatapAddresses) {
InetAddress ip = InetAddresses.forString(validIsatapAddress);
assertTrue(InetAddresses.isIsatapAddress((Inet6Address) ip));
assertEquals(
"checking '" + validIsatapAddress + "'",
ipv4,
InetAddresses.getIsatapIPv4Address((Inet6Address) ip));
}
for (String nonIsatapAddress : nonIsatapAddresses) {
InetAddress ip = InetAddresses.forString(nonIsatapAddress);
assertFalse(InetAddresses.isIsatapAddress((Inet6Address) ip));
assertThrows(
"IllegalArgumentException expected for '" + nonIsatapAddress + "'",
IllegalArgumentException.class,
() -> InetAddresses.getIsatapIPv4Address((Inet6Address) ip));
}
}
public void testGetEmbeddedIPv4ClientAddress() {
Inet6Address testIp;
// Test regular global unicast address.
testIp = (Inet6Address) InetAddresses.forString("2001:db8::1");
assertFalse(InetAddresses.hasEmbeddedIPv4ClientAddress(testIp));
// Test ISATAP address.
testIp = (Inet6Address) InetAddresses.forString("2001:db8::5efe:102:304");
assertFalse(InetAddresses.hasEmbeddedIPv4ClientAddress(testIp));
// Test compat address.
testIp = (Inet6Address) InetAddresses.forString("::1.2.3.4");
assertTrue(InetAddresses.hasEmbeddedIPv4ClientAddress(testIp));
InetAddress ipv4 = InetAddresses.forString("1.2.3.4");
assertEquals(ipv4, InetAddresses.getEmbeddedIPv4ClientAddress(testIp));
// Test 6to4 address.
testIp = (Inet6Address) InetAddresses.forString("2002:0102:0304::1");
assertTrue(InetAddresses.hasEmbeddedIPv4ClientAddress(testIp));
ipv4 = InetAddresses.forString("1.2.3.4");
assertEquals(ipv4, InetAddresses.getEmbeddedIPv4ClientAddress(testIp));
// Test Teredo address.
testIp = (Inet6Address) InetAddresses.forString("2001:0000:4136:e378:8000:63bf:3fff:fdd2");
assertTrue(InetAddresses.hasEmbeddedIPv4ClientAddress(testIp));
ipv4 = InetAddresses.forString("192.0.2.45");
assertEquals(ipv4, InetAddresses.getEmbeddedIPv4ClientAddress(testIp));
}
public void testGetCoercedIPv4Address() {
// Check that a coerced IPv4 address is unaltered.
assertThat(InetAddresses.getCoercedIPv4Address(InetAddresses.forString("127.0.0.1")))
.isEqualTo(InetAddresses.forString("127.0.0.1"));
// ::1 special case
assertThat(InetAddresses.getCoercedIPv4Address(InetAddresses.forString("::1")))
.isEqualTo(InetAddresses.forString("127.0.0.1"));
// :: special case
assertThat(InetAddresses.getCoercedIPv4Address(InetAddresses.forString("::")))
.isEqualTo(InetAddresses.forString("0.0.0.0"));
// test compat address (should be hashed)
assertThat(InetAddresses.getCoercedIPv4Address(InetAddresses.forString("::1.2.3.4")))
.isNotEqualTo(InetAddresses.forString("1.2.3.4"));
// test 6to4 address (should be hashed)
assertThat(InetAddresses.getCoercedIPv4Address(InetAddresses.forString("2002:0102:0304::1")))
.isNotEqualTo(InetAddresses.forString("1.2.3.4"));
// 2 6to4 addresses differing in the embedded IPv4 address should
// hash to the different values.
assertThat(InetAddresses.getCoercedIPv4Address(InetAddresses.forString("2002:0102:0304::1")))
.isNotEqualTo(
InetAddresses.getCoercedIPv4Address(InetAddresses.forString("2002:0506:0708::1")));
// 2 6to4 addresses NOT differing in the embedded IPv4 address should
// hash to the same value.
assertThat(InetAddresses.getCoercedIPv4Address(InetAddresses.forString("2002:0102:0304::1")))
.isEqualTo(
InetAddresses.getCoercedIPv4Address(InetAddresses.forString("2002:0102:0304::2")));
// test Teredo address (should be hashed)
assertThat(
InetAddresses.getCoercedIPv4Address(
InetAddresses.forString("2001:0000:4136:e378:8000:63bf:3fff:fdd2")))
.isNotEqualTo(InetAddresses.forString("192.0.2.45"));
// 2 Teredo addresses differing in their embedded IPv4 addresses should hash to different
// values.
assertThat(
InetAddresses.getCoercedIPv4Address(
InetAddresses.forString("2001:0000:4136:e378:8000:63bf:3fff:fdd2")))
.isNotEqualTo(
InetAddresses.getCoercedIPv4Address(
InetAddresses.forString("2001:0000:4136:e378:8000:63bf:3fff:fdd3")));
// 2 Teredo addresses NOT differing in their embedded IPv4 addresses should hash to the same
// value.
assertThat(
InetAddresses.getCoercedIPv4Address(
InetAddresses.forString("2001:0000:4136:e378:8000:63bf:3fff:fdd2")))
.isEqualTo(
InetAddresses.getCoercedIPv4Address(
InetAddresses.forString("2001:0000:5136:f378:9000:73bf:3fff:fdd2")));
// Test that an address hashes in to the 224.0.0.0/3 number-space.
int coercedInt =
InetAddresses.coerceToInteger(
InetAddresses.getCoercedIPv4Address(InetAddresses.forString("2001:4860::1")));
assertThat(coercedInt).isAtLeast(0xe0000000);
assertThat(coercedInt).isAtMost(0xfffffffe);
}
public void testCoerceToInteger() {
assertThat(InetAddresses.coerceToInteger(InetAddresses.forString("127.0.0.1")))
.isEqualTo(0x7f000001);
}
public void testFromInteger() {
assertThat(InetAddresses.fromInteger(0x7f000001))
.isEqualTo(InetAddresses.forString("127.0.0.1"));
}
public void testFromLittleEndianByteArray() throws UnknownHostException {
assertEquals(
InetAddresses.fromLittleEndianByteArray(new byte[] {1, 2, 3, 4}),
InetAddress.getByAddress(new byte[] {4, 3, 2, 1}));
assertEquals(
InetAddresses.fromLittleEndianByteArray(
new byte[] {1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16}),
InetAddress.getByAddress(
new byte[] {16, 15, 14, 13, 12, 11, 10, 9, 8, 7, 6, 5, 4, 3, 2, 1}));
assertThrows(
UnknownHostException.class, () -> InetAddresses.fromLittleEndianByteArray(new byte[3]));
}
public void testIsMaximum() throws UnknownHostException {
InetAddress address = InetAddress.getByName("255.255.255.254");
assertFalse(InetAddresses.isMaximum(address));
address = InetAddress.getByName("255.255.255.255");
assertTrue(InetAddresses.isMaximum(address));
address = InetAddress.getByName("ffff:ffff:ffff:ffff:ffff:ffff:ffff:fffe");
assertFalse(InetAddresses.isMaximum(address));
address = InetAddress.getByName("ffff:ffff:ffff:ffff:ffff:ffff:ffff:ffff");
assertTrue(InetAddresses.isMaximum(address));
}
@SuppressWarnings("IdentifierName") // the best we could do for adjacent digit blocks
public void testIncrementIPv4() throws UnknownHostException {
InetAddress address_66_0 = InetAddress.getByName("172.24.66.0");
InetAddress address_66_255 = InetAddress.getByName("172.24.66.255");
InetAddress address_67_0 = InetAddress.getByName("172.24.67.0");
InetAddress address = address_66_0;
for (int i = 0; i < 255; i++) {
address = InetAddresses.increment(address);
}
assertEquals(address_66_255, address);
address = InetAddresses.increment(address);
assertEquals(address_67_0, address);
InetAddress address_ffffff = InetAddress.getByName("255.255.255.255");
assertThrows(IllegalArgumentException.class, () -> InetAddresses.increment(address_ffffff));
}
@SuppressWarnings("IdentifierName") // the best we could do for adjacent digit blocks
public void testIncrementIPv6() throws UnknownHostException {
InetAddress addressV6_66_0 = InetAddress.getByName("2001:db8::6600");
InetAddress addressV6_66_ff = InetAddress.getByName("2001:db8::66ff");
InetAddress addressV6_67_0 = InetAddress.getByName("2001:db8::6700");
InetAddress address = addressV6_66_0;
for (int i = 0; i < 255; i++) {
address = InetAddresses.increment(address);
}
assertEquals(addressV6_66_ff, address);
address = InetAddresses.increment(address);
assertEquals(addressV6_67_0, address);
InetAddress addressV6_ffffff = InetAddress.getByName("ffff:ffff:ffff:ffff:ffff:ffff:ffff:ffff");
assertThrows(IllegalArgumentException.class, () -> InetAddresses.increment(addressV6_ffffff));
}
public void testDecrementIPv4() throws UnknownHostException {
InetAddress address660 = InetAddress.getByName("172.24.66.0");
InetAddress address66255 = InetAddress.getByName("172.24.66.255");
InetAddress address670 = InetAddress.getByName("172.24.67.0");
InetAddress address = address670;
address = InetAddresses.decrement(address);
assertEquals(address66255, address);
for (int i = 0; i < 255; i++) {
address = InetAddresses.decrement(address);
}
assertEquals(address660, address);
InetAddress address0000 = InetAddress.getByName("0.0.0.0");
assertThrows(IllegalArgumentException.class, () -> InetAddresses.decrement(address0000));
}
public void testDecrementIPv6() throws UnknownHostException {
InetAddress addressV6660 = InetAddress.getByName("2001:db8::6600");
InetAddress addressV666ff = InetAddress.getByName("2001:db8::66ff");
InetAddress addressV6670 = InetAddress.getByName("2001:db8::6700");
InetAddress address = addressV6670;
address = InetAddresses.decrement(address);
assertEquals(addressV666ff, address);
for (int i = 0; i < 255; i++) {
address = InetAddresses.decrement(address);
}
assertEquals(addressV6660, address);
InetAddress addressV6000000 = InetAddress.getByName("0:0:0:0:0:0:0:0");
assertThrows(IllegalArgumentException.class, () -> InetAddresses.decrement(addressV6000000));
}
public void testFromIpv4BigIntegerThrowsLessThanZero() {
IllegalArgumentException expected =
assertThrows(
IllegalArgumentException.class,
() -> InetAddresses.fromIPv4BigInteger(BigInteger.valueOf(-1L)));
assertThat(expected)
.hasMessageThat()
.isEqualTo("BigInteger must be greater than or equal to 0");
}
public void testFromIpv6BigIntegerThrowsLessThanZero() {
IllegalArgumentException expected =
assertThrows(
IllegalArgumentException.class,
() -> InetAddresses.fromIPv6BigInteger(BigInteger.valueOf(-1L)));
assertThat(expected)
.hasMessageThat()
.isEqualTo("BigInteger must be greater than or equal to 0");
}
public void testFromIpv4BigIntegerValid() {
checkBigIntegerConversion("0.0.0.0", BigInteger.ZERO);
checkBigIntegerConversion("0.0.0.1", BigInteger.ONE);
checkBigIntegerConversion("127.255.255.255", BigInteger.valueOf(Integer.MAX_VALUE));
checkBigIntegerConversion(
"255.255.255.254", BigInteger.valueOf(Integer.MAX_VALUE).multiply(BigInteger.valueOf(2)));
checkBigIntegerConversion(
"255.255.255.255", BigInteger.ONE.shiftLeft(32).subtract(BigInteger.ONE));
}
public void testFromIpv6BigIntegerValid() {
checkBigIntegerConversion("::", BigInteger.ZERO);
checkBigIntegerConversion("::1", BigInteger.ONE);
checkBigIntegerConversion("::7fff:ffff", BigInteger.valueOf(Integer.MAX_VALUE));
checkBigIntegerConversion("::7fff:ffff:ffff:ffff", BigInteger.valueOf(Long.MAX_VALUE));
checkBigIntegerConversion(
"::ffff:ffff:ffff:ffff", BigInteger.ONE.shiftLeft(64).subtract(BigInteger.ONE));
checkBigIntegerConversion(
"ffff:ffff:ffff:ffff:ffff:ffff:ffff:ffff",
BigInteger.ONE.shiftLeft(128).subtract(BigInteger.ONE));
}
public void testFromIpv4BigIntegerInputTooLarge() {
IllegalArgumentException expected =
assertThrows(
IllegalArgumentException.class,
() ->
InetAddresses.fromIPv4BigInteger(BigInteger.ONE.shiftLeft(32).add(BigInteger.ONE)));
assertThat(expected)
.hasMessageThat()
.isEqualTo(
"BigInteger cannot be converted to InetAddress because it has more than 4 bytes:"
+ " 4294967297");
}
public void testFromIpv6BigIntegerInputTooLarge() {
IllegalArgumentException expected =
assertThrows(
IllegalArgumentException.class,
() ->
InetAddresses.fromIPv6BigInteger(
BigInteger.ONE.shiftLeft(128).add(BigInteger.ONE)));
assertThat(expected)
.hasMessageThat()
.isEqualTo(
"BigInteger cannot be converted to InetAddress because it has more than 16 bytes:"
+ " 340282366920938463463374607431768211457");
}
// see https://github.com/google/guava/issues/2587
private static ImmutableSet<String> getMachineScopesAndInterfaces() throws SocketException {
ImmutableSet.Builder<String> builder = ImmutableSet.builder();
Enumeration<NetworkInterface> interfaces = NetworkInterface.getNetworkInterfaces();
assertTrue(interfaces.hasMoreElements());
while (interfaces.hasMoreElements()) {
NetworkInterface i = interfaces.nextElement();
builder.add(i.getName()).add(String.valueOf(i.getIndex()));
}
return builder.build();
}
/** Checks that the IP converts to the big integer and the big integer converts to the IP. */
private static void checkBigIntegerConversion(String ip, BigInteger bigIntegerIp) {
InetAddress address = InetAddresses.forString(ip);
boolean isIpv6 = address instanceof Inet6Address;
assertEquals(bigIntegerIp, InetAddresses.toBigInteger(address));
assertEquals(
address,
isIpv6
? InetAddresses.fromIPv6BigInteger(bigIntegerIp)
: InetAddresses.fromIPv4BigInteger(bigIntegerIp));
}
}
|
googleapis/google-cloud-java | 37,105 | java-retail/google-cloud-retail/src/main/java/com/google/cloud/retail/v2alpha/MerchantCenterAccountLinkServiceClient.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.cloud.retail.v2alpha;
import com.google.api.core.BetaApi;
import com.google.api.gax.core.BackgroundResource;
import com.google.api.gax.httpjson.longrunning.OperationsClient;
import com.google.api.gax.longrunning.OperationFuture;
import com.google.api.gax.rpc.OperationCallable;
import com.google.api.gax.rpc.UnaryCallable;
import com.google.cloud.retail.v2alpha.stub.MerchantCenterAccountLinkServiceStub;
import com.google.cloud.retail.v2alpha.stub.MerchantCenterAccountLinkServiceStubSettings;
import com.google.longrunning.Operation;
import com.google.protobuf.Empty;
import java.io.IOException;
import java.util.concurrent.TimeUnit;
import javax.annotation.Generated;
// AUTO-GENERATED DOCUMENTATION AND CLASS.
/**
* Service Description: Merchant Center Link service to link a Branch to a Merchant Center Account.
*
* <p>This class provides the ability to make remote calls to the backing service through method
* calls that map to API methods. Sample code to get started:
*
* <pre>{@code
* // This snippet has been automatically generated and should be regarded as a code template only.
* // It will require modifications to work:
* // - It may require correct/in-range values for request initialization.
* // - It may require specifying regional endpoints when creating the service client as shown in
* // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library
* try (MerchantCenterAccountLinkServiceClient merchantCenterAccountLinkServiceClient =
* MerchantCenterAccountLinkServiceClient.create()) {
* CatalogName parent = CatalogName.of("[PROJECT]", "[LOCATION]", "[CATALOG]");
* ListMerchantCenterAccountLinksResponse response =
* merchantCenterAccountLinkServiceClient.listMerchantCenterAccountLinks(parent);
* }
* }</pre>
*
* <p>Note: close() needs to be called on the MerchantCenterAccountLinkServiceClient object to clean
* up resources such as threads. In the example above, try-with-resources is used, which
* automatically calls close().
*
* <table>
* <caption>Methods</caption>
* <tr>
* <th>Method</th>
* <th>Description</th>
* <th>Method Variants</th>
* </tr>
* <tr>
* <td><p> ListMerchantCenterAccountLinks</td>
* <td><p> Lists all [MerchantCenterAccountLink][google.cloud.retail.v2alpha.MerchantCenterAccountLink]s under the specified parent [Catalog][google.cloud.retail.v2alpha.Catalog].</td>
* <td>
* <p>Request object method variants only take one parameter, a request object, which must be constructed before the call.</p>
* <ul>
* <li><p> listMerchantCenterAccountLinks(ListMerchantCenterAccountLinksRequest request)
* </ul>
* <p>"Flattened" method variants have converted the fields of the request object into function parameters to enable multiple ways to call the same method.</p>
* <ul>
* <li><p> listMerchantCenterAccountLinks(CatalogName parent)
* <li><p> listMerchantCenterAccountLinks(String parent)
* </ul>
* <p>Callable method variants take no parameters and return an immutable API callable object, which can be used to initiate calls to the service.</p>
* <ul>
* <li><p> listMerchantCenterAccountLinksCallable()
* </ul>
* </td>
* </tr>
* <tr>
* <td><p> CreateMerchantCenterAccountLink</td>
* <td><p> Creates a [MerchantCenterAccountLink][google.cloud.retail.v2alpha.MerchantCenterAccountLink].</td>
* <td>
* <p>Request object method variants only take one parameter, a request object, which must be constructed before the call.</p>
* <ul>
* <li><p> createMerchantCenterAccountLinkAsync(CreateMerchantCenterAccountLinkRequest request)
* </ul>
* <p>Methods that return long-running operations have "Async" method variants that return `OperationFuture`, which is used to track polling of the service.</p>
* <ul>
* <li><p> createMerchantCenterAccountLinkAsync(CatalogName parent, MerchantCenterAccountLink merchantCenterAccountLink)
* <li><p> createMerchantCenterAccountLinkAsync(String parent, MerchantCenterAccountLink merchantCenterAccountLink)
* </ul>
* <p>Callable method variants take no parameters and return an immutable API callable object, which can be used to initiate calls to the service.</p>
* <ul>
* <li><p> createMerchantCenterAccountLinkOperationCallable()
* <li><p> createMerchantCenterAccountLinkCallable()
* </ul>
* </td>
* </tr>
* <tr>
* <td><p> DeleteMerchantCenterAccountLink</td>
* <td><p> Deletes a [MerchantCenterAccountLink][google.cloud.retail.v2alpha.MerchantCenterAccountLink]. If the [MerchantCenterAccountLink][google.cloud.retail.v2alpha.MerchantCenterAccountLink] to delete does not exist, a NOT_FOUND error is returned.</td>
* <td>
* <p>Request object method variants only take one parameter, a request object, which must be constructed before the call.</p>
* <ul>
* <li><p> deleteMerchantCenterAccountLink(DeleteMerchantCenterAccountLinkRequest request)
* </ul>
* <p>"Flattened" method variants have converted the fields of the request object into function parameters to enable multiple ways to call the same method.</p>
* <ul>
* <li><p> deleteMerchantCenterAccountLink(MerchantCenterAccountLinkName name)
* <li><p> deleteMerchantCenterAccountLink(String name)
* </ul>
* <p>Callable method variants take no parameters and return an immutable API callable object, which can be used to initiate calls to the service.</p>
* <ul>
* <li><p> deleteMerchantCenterAccountLinkCallable()
* </ul>
* </td>
* </tr>
* </table>
*
* <p>See the individual methods for example code.
*
* <p>Many parameters require resource names to be formatted in a particular way. To assist with
* these names, this class includes a format method for each type of name, and additionally a parse
* method to extract the individual identifiers contained within names that are returned.
*
* <p>This class can be customized by passing in a custom instance of
* MerchantCenterAccountLinkServiceSettings to create(). For example:
*
* <p>To customize credentials:
*
* <pre>{@code
* // This snippet has been automatically generated and should be regarded as a code template only.
* // It will require modifications to work:
* // - It may require correct/in-range values for request initialization.
* // - It may require specifying regional endpoints when creating the service client as shown in
* // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library
* MerchantCenterAccountLinkServiceSettings merchantCenterAccountLinkServiceSettings =
* MerchantCenterAccountLinkServiceSettings.newBuilder()
* .setCredentialsProvider(FixedCredentialsProvider.create(myCredentials))
* .build();
* MerchantCenterAccountLinkServiceClient merchantCenterAccountLinkServiceClient =
* MerchantCenterAccountLinkServiceClient.create(merchantCenterAccountLinkServiceSettings);
* }</pre>
*
* <p>To customize the endpoint:
*
* <pre>{@code
* // This snippet has been automatically generated and should be regarded as a code template only.
* // It will require modifications to work:
* // - It may require correct/in-range values for request initialization.
* // - It may require specifying regional endpoints when creating the service client as shown in
* // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library
* MerchantCenterAccountLinkServiceSettings merchantCenterAccountLinkServiceSettings =
* MerchantCenterAccountLinkServiceSettings.newBuilder().setEndpoint(myEndpoint).build();
* MerchantCenterAccountLinkServiceClient merchantCenterAccountLinkServiceClient =
* MerchantCenterAccountLinkServiceClient.create(merchantCenterAccountLinkServiceSettings);
* }</pre>
*
* <p>To use REST (HTTP1.1/JSON) transport (instead of gRPC) for sending and receiving requests over
* the wire:
*
* <pre>{@code
* // This snippet has been automatically generated and should be regarded as a code template only.
* // It will require modifications to work:
* // - It may require correct/in-range values for request initialization.
* // - It may require specifying regional endpoints when creating the service client as shown in
* // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library
* MerchantCenterAccountLinkServiceSettings merchantCenterAccountLinkServiceSettings =
* MerchantCenterAccountLinkServiceSettings.newHttpJsonBuilder().build();
* MerchantCenterAccountLinkServiceClient merchantCenterAccountLinkServiceClient =
* MerchantCenterAccountLinkServiceClient.create(merchantCenterAccountLinkServiceSettings);
* }</pre>
*
* <p>Please refer to the GitHub repository's samples for more quickstart code snippets.
*/
@BetaApi
@Generated("by gapic-generator-java")
public class MerchantCenterAccountLinkServiceClient implements BackgroundResource {
private final MerchantCenterAccountLinkServiceSettings settings;
private final MerchantCenterAccountLinkServiceStub stub;
private final OperationsClient httpJsonOperationsClient;
private final com.google.longrunning.OperationsClient operationsClient;
/** Constructs an instance of MerchantCenterAccountLinkServiceClient with default settings. */
public static final MerchantCenterAccountLinkServiceClient create() throws IOException {
return create(MerchantCenterAccountLinkServiceSettings.newBuilder().build());
}
/**
* Constructs an instance of MerchantCenterAccountLinkServiceClient, using the given settings. The
* channels are created based on the settings passed in, or defaults for any settings that are not
* set.
*/
public static final MerchantCenterAccountLinkServiceClient create(
MerchantCenterAccountLinkServiceSettings settings) throws IOException {
return new MerchantCenterAccountLinkServiceClient(settings);
}
/**
* Constructs an instance of MerchantCenterAccountLinkServiceClient, using the given stub for
* making calls. This is for advanced usage - prefer using
* create(MerchantCenterAccountLinkServiceSettings).
*/
public static final MerchantCenterAccountLinkServiceClient create(
MerchantCenterAccountLinkServiceStub stub) {
return new MerchantCenterAccountLinkServiceClient(stub);
}
/**
* Constructs an instance of MerchantCenterAccountLinkServiceClient, using the given settings.
* This is protected so that it is easy to make a subclass, but otherwise, the static factory
* methods should be preferred.
*/
protected MerchantCenterAccountLinkServiceClient(
MerchantCenterAccountLinkServiceSettings settings) throws IOException {
this.settings = settings;
this.stub =
((MerchantCenterAccountLinkServiceStubSettings) settings.getStubSettings()).createStub();
this.operationsClient =
com.google.longrunning.OperationsClient.create(this.stub.getOperationsStub());
this.httpJsonOperationsClient = OperationsClient.create(this.stub.getHttpJsonOperationsStub());
}
protected MerchantCenterAccountLinkServiceClient(MerchantCenterAccountLinkServiceStub stub) {
this.settings = null;
this.stub = stub;
this.operationsClient =
com.google.longrunning.OperationsClient.create(this.stub.getOperationsStub());
this.httpJsonOperationsClient = OperationsClient.create(this.stub.getHttpJsonOperationsStub());
}
public final MerchantCenterAccountLinkServiceSettings getSettings() {
return settings;
}
public MerchantCenterAccountLinkServiceStub getStub() {
return stub;
}
/**
* Returns the OperationsClient that can be used to query the status of a long-running operation
* returned by another API method call.
*/
public final com.google.longrunning.OperationsClient getOperationsClient() {
return operationsClient;
}
/**
* Returns the OperationsClient that can be used to query the status of a long-running operation
* returned by another API method call.
*/
@BetaApi
public final OperationsClient getHttpJsonOperationsClient() {
return httpJsonOperationsClient;
}
// AUTO-GENERATED DOCUMENTATION AND METHOD.
/**
* Lists all [MerchantCenterAccountLink][google.cloud.retail.v2alpha.MerchantCenterAccountLink]s
* under the specified parent [Catalog][google.cloud.retail.v2alpha.Catalog].
*
* <p>Sample code:
*
* <pre>{@code
* // This snippet has been automatically generated and should be regarded as a code template only.
* // It will require modifications to work:
* // - It may require correct/in-range values for request initialization.
* // - It may require specifying regional endpoints when creating the service client as shown in
* // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library
* try (MerchantCenterAccountLinkServiceClient merchantCenterAccountLinkServiceClient =
* MerchantCenterAccountLinkServiceClient.create()) {
* CatalogName parent = CatalogName.of("[PROJECT]", "[LOCATION]", "[CATALOG]");
* ListMerchantCenterAccountLinksResponse response =
* merchantCenterAccountLinkServiceClient.listMerchantCenterAccountLinks(parent);
* }
* }</pre>
*
* @param parent Required. The parent Catalog of the resource. It must match this format:
* `projects/{PROJECT_NUMBER}/locations/global/catalogs/{CATALOG_ID}`
* @throws com.google.api.gax.rpc.ApiException if the remote call fails
*/
public final ListMerchantCenterAccountLinksResponse listMerchantCenterAccountLinks(
CatalogName parent) {
ListMerchantCenterAccountLinksRequest request =
ListMerchantCenterAccountLinksRequest.newBuilder()
.setParent(parent == null ? null : parent.toString())
.build();
return listMerchantCenterAccountLinks(request);
}
// AUTO-GENERATED DOCUMENTATION AND METHOD.
/**
* Lists all [MerchantCenterAccountLink][google.cloud.retail.v2alpha.MerchantCenterAccountLink]s
* under the specified parent [Catalog][google.cloud.retail.v2alpha.Catalog].
*
* <p>Sample code:
*
* <pre>{@code
* // This snippet has been automatically generated and should be regarded as a code template only.
* // It will require modifications to work:
* // - It may require correct/in-range values for request initialization.
* // - It may require specifying regional endpoints when creating the service client as shown in
* // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library
* try (MerchantCenterAccountLinkServiceClient merchantCenterAccountLinkServiceClient =
* MerchantCenterAccountLinkServiceClient.create()) {
* String parent = CatalogName.of("[PROJECT]", "[LOCATION]", "[CATALOG]").toString();
* ListMerchantCenterAccountLinksResponse response =
* merchantCenterAccountLinkServiceClient.listMerchantCenterAccountLinks(parent);
* }
* }</pre>
*
* @param parent Required. The parent Catalog of the resource. It must match this format:
* `projects/{PROJECT_NUMBER}/locations/global/catalogs/{CATALOG_ID}`
* @throws com.google.api.gax.rpc.ApiException if the remote call fails
*/
public final ListMerchantCenterAccountLinksResponse listMerchantCenterAccountLinks(
String parent) {
ListMerchantCenterAccountLinksRequest request =
ListMerchantCenterAccountLinksRequest.newBuilder().setParent(parent).build();
return listMerchantCenterAccountLinks(request);
}
// AUTO-GENERATED DOCUMENTATION AND METHOD.
/**
* Lists all [MerchantCenterAccountLink][google.cloud.retail.v2alpha.MerchantCenterAccountLink]s
* under the specified parent [Catalog][google.cloud.retail.v2alpha.Catalog].
*
* <p>Sample code:
*
* <pre>{@code
* // This snippet has been automatically generated and should be regarded as a code template only.
* // It will require modifications to work:
* // - It may require correct/in-range values for request initialization.
* // - It may require specifying regional endpoints when creating the service client as shown in
* // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library
* try (MerchantCenterAccountLinkServiceClient merchantCenterAccountLinkServiceClient =
* MerchantCenterAccountLinkServiceClient.create()) {
* ListMerchantCenterAccountLinksRequest request =
* ListMerchantCenterAccountLinksRequest.newBuilder()
* .setParent(CatalogName.of("[PROJECT]", "[LOCATION]", "[CATALOG]").toString())
* .build();
* ListMerchantCenterAccountLinksResponse response =
* merchantCenterAccountLinkServiceClient.listMerchantCenterAccountLinks(request);
* }
* }</pre>
*
* @param request The request object containing all of the parameters for the API call.
* @throws com.google.api.gax.rpc.ApiException if the remote call fails
*/
public final ListMerchantCenterAccountLinksResponse listMerchantCenterAccountLinks(
ListMerchantCenterAccountLinksRequest request) {
return listMerchantCenterAccountLinksCallable().call(request);
}
// AUTO-GENERATED DOCUMENTATION AND METHOD.
/**
* Lists all [MerchantCenterAccountLink][google.cloud.retail.v2alpha.MerchantCenterAccountLink]s
* under the specified parent [Catalog][google.cloud.retail.v2alpha.Catalog].
*
* <p>Sample code:
*
* <pre>{@code
* // This snippet has been automatically generated and should be regarded as a code template only.
* // It will require modifications to work:
* // - It may require correct/in-range values for request initialization.
* // - It may require specifying regional endpoints when creating the service client as shown in
* // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library
* try (MerchantCenterAccountLinkServiceClient merchantCenterAccountLinkServiceClient =
* MerchantCenterAccountLinkServiceClient.create()) {
* ListMerchantCenterAccountLinksRequest request =
* ListMerchantCenterAccountLinksRequest.newBuilder()
* .setParent(CatalogName.of("[PROJECT]", "[LOCATION]", "[CATALOG]").toString())
* .build();
* ApiFuture<ListMerchantCenterAccountLinksResponse> future =
* merchantCenterAccountLinkServiceClient
* .listMerchantCenterAccountLinksCallable()
* .futureCall(request);
* // Do something.
* ListMerchantCenterAccountLinksResponse response = future.get();
* }
* }</pre>
*/
public final UnaryCallable<
ListMerchantCenterAccountLinksRequest, ListMerchantCenterAccountLinksResponse>
listMerchantCenterAccountLinksCallable() {
return stub.listMerchantCenterAccountLinksCallable();
}
// AUTO-GENERATED DOCUMENTATION AND METHOD.
/**
* Creates a [MerchantCenterAccountLink][google.cloud.retail.v2alpha.MerchantCenterAccountLink].
*
* <p>Sample code:
*
* <pre>{@code
* // This snippet has been automatically generated and should be regarded as a code template only.
* // It will require modifications to work:
* // - It may require correct/in-range values for request initialization.
* // - It may require specifying regional endpoints when creating the service client as shown in
* // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library
* try (MerchantCenterAccountLinkServiceClient merchantCenterAccountLinkServiceClient =
* MerchantCenterAccountLinkServiceClient.create()) {
* CatalogName parent = CatalogName.of("[PROJECT]", "[LOCATION]", "[CATALOG]");
* MerchantCenterAccountLink merchantCenterAccountLink =
* MerchantCenterAccountLink.newBuilder().build();
* MerchantCenterAccountLink response =
* merchantCenterAccountLinkServiceClient
* .createMerchantCenterAccountLinkAsync(parent, merchantCenterAccountLink)
* .get();
* }
* }</pre>
*
* @param parent Required. The branch resource where this MerchantCenterAccountLink will be
* created. Format: `projects/{PROJECT_NUMBER}/locations/global/catalogs/{CATALOG_ID}`
* @param merchantCenterAccountLink Required. The
* [MerchantCenterAccountLink][google.cloud.retail.v2alpha.MerchantCenterAccountLink] to
* create.
* <p>If the caller does not have permission to create the
* [MerchantCenterAccountLink][google.cloud.retail.v2alpha.MerchantCenterAccountLink],
* regardless of whether or not it exists, a PERMISSION_DENIED error is returned.
* @throws com.google.api.gax.rpc.ApiException if the remote call fails
*/
public final OperationFuture<MerchantCenterAccountLink, CreateMerchantCenterAccountLinkMetadata>
createMerchantCenterAccountLinkAsync(
CatalogName parent, MerchantCenterAccountLink merchantCenterAccountLink) {
CreateMerchantCenterAccountLinkRequest request =
CreateMerchantCenterAccountLinkRequest.newBuilder()
.setParent(parent == null ? null : parent.toString())
.setMerchantCenterAccountLink(merchantCenterAccountLink)
.build();
return createMerchantCenterAccountLinkAsync(request);
}
// AUTO-GENERATED DOCUMENTATION AND METHOD.
/**
* Creates a [MerchantCenterAccountLink][google.cloud.retail.v2alpha.MerchantCenterAccountLink].
*
* <p>Sample code:
*
* <pre>{@code
* // This snippet has been automatically generated and should be regarded as a code template only.
* // It will require modifications to work:
* // - It may require correct/in-range values for request initialization.
* // - It may require specifying regional endpoints when creating the service client as shown in
* // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library
* try (MerchantCenterAccountLinkServiceClient merchantCenterAccountLinkServiceClient =
* MerchantCenterAccountLinkServiceClient.create()) {
* String parent = CatalogName.of("[PROJECT]", "[LOCATION]", "[CATALOG]").toString();
* MerchantCenterAccountLink merchantCenterAccountLink =
* MerchantCenterAccountLink.newBuilder().build();
* MerchantCenterAccountLink response =
* merchantCenterAccountLinkServiceClient
* .createMerchantCenterAccountLinkAsync(parent, merchantCenterAccountLink)
* .get();
* }
* }</pre>
*
* @param parent Required. The branch resource where this MerchantCenterAccountLink will be
* created. Format: `projects/{PROJECT_NUMBER}/locations/global/catalogs/{CATALOG_ID}`
* @param merchantCenterAccountLink Required. The
* [MerchantCenterAccountLink][google.cloud.retail.v2alpha.MerchantCenterAccountLink] to
* create.
* <p>If the caller does not have permission to create the
* [MerchantCenterAccountLink][google.cloud.retail.v2alpha.MerchantCenterAccountLink],
* regardless of whether or not it exists, a PERMISSION_DENIED error is returned.
* @throws com.google.api.gax.rpc.ApiException if the remote call fails
*/
public final OperationFuture<MerchantCenterAccountLink, CreateMerchantCenterAccountLinkMetadata>
createMerchantCenterAccountLinkAsync(
String parent, MerchantCenterAccountLink merchantCenterAccountLink) {
CreateMerchantCenterAccountLinkRequest request =
CreateMerchantCenterAccountLinkRequest.newBuilder()
.setParent(parent)
.setMerchantCenterAccountLink(merchantCenterAccountLink)
.build();
return createMerchantCenterAccountLinkAsync(request);
}
// AUTO-GENERATED DOCUMENTATION AND METHOD.
/**
* Creates a [MerchantCenterAccountLink][google.cloud.retail.v2alpha.MerchantCenterAccountLink].
*
* <p>Sample code:
*
* <pre>{@code
* // This snippet has been automatically generated and should be regarded as a code template only.
* // It will require modifications to work:
* // - It may require correct/in-range values for request initialization.
* // - It may require specifying regional endpoints when creating the service client as shown in
* // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library
* try (MerchantCenterAccountLinkServiceClient merchantCenterAccountLinkServiceClient =
* MerchantCenterAccountLinkServiceClient.create()) {
* CreateMerchantCenterAccountLinkRequest request =
* CreateMerchantCenterAccountLinkRequest.newBuilder()
* .setParent(CatalogName.of("[PROJECT]", "[LOCATION]", "[CATALOG]").toString())
* .setMerchantCenterAccountLink(MerchantCenterAccountLink.newBuilder().build())
* .build();
* MerchantCenterAccountLink response =
* merchantCenterAccountLinkServiceClient
* .createMerchantCenterAccountLinkAsync(request)
* .get();
* }
* }</pre>
*
* @param request The request object containing all of the parameters for the API call.
* @throws com.google.api.gax.rpc.ApiException if the remote call fails
*/
public final OperationFuture<MerchantCenterAccountLink, CreateMerchantCenterAccountLinkMetadata>
createMerchantCenterAccountLinkAsync(CreateMerchantCenterAccountLinkRequest request) {
return createMerchantCenterAccountLinkOperationCallable().futureCall(request);
}
// AUTO-GENERATED DOCUMENTATION AND METHOD.
/**
* Creates a [MerchantCenterAccountLink][google.cloud.retail.v2alpha.MerchantCenterAccountLink].
*
* <p>Sample code:
*
* <pre>{@code
* // This snippet has been automatically generated and should be regarded as a code template only.
* // It will require modifications to work:
* // - It may require correct/in-range values for request initialization.
* // - It may require specifying regional endpoints when creating the service client as shown in
* // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library
* try (MerchantCenterAccountLinkServiceClient merchantCenterAccountLinkServiceClient =
* MerchantCenterAccountLinkServiceClient.create()) {
* CreateMerchantCenterAccountLinkRequest request =
* CreateMerchantCenterAccountLinkRequest.newBuilder()
* .setParent(CatalogName.of("[PROJECT]", "[LOCATION]", "[CATALOG]").toString())
* .setMerchantCenterAccountLink(MerchantCenterAccountLink.newBuilder().build())
* .build();
* OperationFuture<MerchantCenterAccountLink, CreateMerchantCenterAccountLinkMetadata> future =
* merchantCenterAccountLinkServiceClient
* .createMerchantCenterAccountLinkOperationCallable()
* .futureCall(request);
* // Do something.
* MerchantCenterAccountLink response = future.get();
* }
* }</pre>
*/
public final OperationCallable<
CreateMerchantCenterAccountLinkRequest,
MerchantCenterAccountLink,
CreateMerchantCenterAccountLinkMetadata>
createMerchantCenterAccountLinkOperationCallable() {
return stub.createMerchantCenterAccountLinkOperationCallable();
}
// AUTO-GENERATED DOCUMENTATION AND METHOD.
/**
* Creates a [MerchantCenterAccountLink][google.cloud.retail.v2alpha.MerchantCenterAccountLink].
*
* <p>Sample code:
*
* <pre>{@code
* // This snippet has been automatically generated and should be regarded as a code template only.
* // It will require modifications to work:
* // - It may require correct/in-range values for request initialization.
* // - It may require specifying regional endpoints when creating the service client as shown in
* // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library
* try (MerchantCenterAccountLinkServiceClient merchantCenterAccountLinkServiceClient =
* MerchantCenterAccountLinkServiceClient.create()) {
* CreateMerchantCenterAccountLinkRequest request =
* CreateMerchantCenterAccountLinkRequest.newBuilder()
* .setParent(CatalogName.of("[PROJECT]", "[LOCATION]", "[CATALOG]").toString())
* .setMerchantCenterAccountLink(MerchantCenterAccountLink.newBuilder().build())
* .build();
* ApiFuture<Operation> future =
* merchantCenterAccountLinkServiceClient
* .createMerchantCenterAccountLinkCallable()
* .futureCall(request);
* // Do something.
* Operation response = future.get();
* }
* }</pre>
*/
public final UnaryCallable<CreateMerchantCenterAccountLinkRequest, Operation>
createMerchantCenterAccountLinkCallable() {
return stub.createMerchantCenterAccountLinkCallable();
}
// AUTO-GENERATED DOCUMENTATION AND METHOD.
/**
* Deletes a [MerchantCenterAccountLink][google.cloud.retail.v2alpha.MerchantCenterAccountLink].
* If the [MerchantCenterAccountLink][google.cloud.retail.v2alpha.MerchantCenterAccountLink] to
* delete does not exist, a NOT_FOUND error is returned.
*
* <p>Sample code:
*
* <pre>{@code
* // This snippet has been automatically generated and should be regarded as a code template only.
* // It will require modifications to work:
* // - It may require correct/in-range values for request initialization.
* // - It may require specifying regional endpoints when creating the service client as shown in
* // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library
* try (MerchantCenterAccountLinkServiceClient merchantCenterAccountLinkServiceClient =
* MerchantCenterAccountLinkServiceClient.create()) {
* MerchantCenterAccountLinkName name =
* MerchantCenterAccountLinkName.of(
* "[PROJECT]", "[LOCATION]", "[CATALOG]", "[MERCHANT_CENTER_ACCOUNT_LINK]");
* merchantCenterAccountLinkServiceClient.deleteMerchantCenterAccountLink(name);
* }
* }</pre>
*
* @param name Required. Full resource name. Format:
* `projects/{project_number}/locations/{location_id}/catalogs/{catalog_id}/merchantCenterAccountLinks/{merchant_center_account_link_id}`
* @throws com.google.api.gax.rpc.ApiException if the remote call fails
*/
public final void deleteMerchantCenterAccountLink(MerchantCenterAccountLinkName name) {
DeleteMerchantCenterAccountLinkRequest request =
DeleteMerchantCenterAccountLinkRequest.newBuilder()
.setName(name == null ? null : name.toString())
.build();
deleteMerchantCenterAccountLink(request);
}
// AUTO-GENERATED DOCUMENTATION AND METHOD.
/**
* Deletes a [MerchantCenterAccountLink][google.cloud.retail.v2alpha.MerchantCenterAccountLink].
* If the [MerchantCenterAccountLink][google.cloud.retail.v2alpha.MerchantCenterAccountLink] to
* delete does not exist, a NOT_FOUND error is returned.
*
* <p>Sample code:
*
* <pre>{@code
* // This snippet has been automatically generated and should be regarded as a code template only.
* // It will require modifications to work:
* // - It may require correct/in-range values for request initialization.
* // - It may require specifying regional endpoints when creating the service client as shown in
* // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library
* try (MerchantCenterAccountLinkServiceClient merchantCenterAccountLinkServiceClient =
* MerchantCenterAccountLinkServiceClient.create()) {
* String name =
* MerchantCenterAccountLinkName.of(
* "[PROJECT]", "[LOCATION]", "[CATALOG]", "[MERCHANT_CENTER_ACCOUNT_LINK]")
* .toString();
* merchantCenterAccountLinkServiceClient.deleteMerchantCenterAccountLink(name);
* }
* }</pre>
*
* @param name Required. Full resource name. Format:
* `projects/{project_number}/locations/{location_id}/catalogs/{catalog_id}/merchantCenterAccountLinks/{merchant_center_account_link_id}`
* @throws com.google.api.gax.rpc.ApiException if the remote call fails
*/
public final void deleteMerchantCenterAccountLink(String name) {
DeleteMerchantCenterAccountLinkRequest request =
DeleteMerchantCenterAccountLinkRequest.newBuilder().setName(name).build();
deleteMerchantCenterAccountLink(request);
}
// AUTO-GENERATED DOCUMENTATION AND METHOD.
/**
* Deletes a [MerchantCenterAccountLink][google.cloud.retail.v2alpha.MerchantCenterAccountLink].
* If the [MerchantCenterAccountLink][google.cloud.retail.v2alpha.MerchantCenterAccountLink] to
* delete does not exist, a NOT_FOUND error is returned.
*
* <p>Sample code:
*
* <pre>{@code
* // This snippet has been automatically generated and should be regarded as a code template only.
* // It will require modifications to work:
* // - It may require correct/in-range values for request initialization.
* // - It may require specifying regional endpoints when creating the service client as shown in
* // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library
* try (MerchantCenterAccountLinkServiceClient merchantCenterAccountLinkServiceClient =
* MerchantCenterAccountLinkServiceClient.create()) {
* DeleteMerchantCenterAccountLinkRequest request =
* DeleteMerchantCenterAccountLinkRequest.newBuilder()
* .setName(
* MerchantCenterAccountLinkName.of(
* "[PROJECT]", "[LOCATION]", "[CATALOG]", "[MERCHANT_CENTER_ACCOUNT_LINK]")
* .toString())
* .build();
* merchantCenterAccountLinkServiceClient.deleteMerchantCenterAccountLink(request);
* }
* }</pre>
*
* @param request The request object containing all of the parameters for the API call.
* @throws com.google.api.gax.rpc.ApiException if the remote call fails
*/
public final void deleteMerchantCenterAccountLink(
DeleteMerchantCenterAccountLinkRequest request) {
deleteMerchantCenterAccountLinkCallable().call(request);
}
// AUTO-GENERATED DOCUMENTATION AND METHOD.
/**
* Deletes a [MerchantCenterAccountLink][google.cloud.retail.v2alpha.MerchantCenterAccountLink].
* If the [MerchantCenterAccountLink][google.cloud.retail.v2alpha.MerchantCenterAccountLink] to
* delete does not exist, a NOT_FOUND error is returned.
*
* <p>Sample code:
*
* <pre>{@code
* // This snippet has been automatically generated and should be regarded as a code template only.
* // It will require modifications to work:
* // - It may require correct/in-range values for request initialization.
* // - It may require specifying regional endpoints when creating the service client as shown in
* // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library
* try (MerchantCenterAccountLinkServiceClient merchantCenterAccountLinkServiceClient =
* MerchantCenterAccountLinkServiceClient.create()) {
* DeleteMerchantCenterAccountLinkRequest request =
* DeleteMerchantCenterAccountLinkRequest.newBuilder()
* .setName(
* MerchantCenterAccountLinkName.of(
* "[PROJECT]", "[LOCATION]", "[CATALOG]", "[MERCHANT_CENTER_ACCOUNT_LINK]")
* .toString())
* .build();
* ApiFuture<Empty> future =
* merchantCenterAccountLinkServiceClient
* .deleteMerchantCenterAccountLinkCallable()
* .futureCall(request);
* // Do something.
* future.get();
* }
* }</pre>
*/
public final UnaryCallable<DeleteMerchantCenterAccountLinkRequest, Empty>
deleteMerchantCenterAccountLinkCallable() {
return stub.deleteMerchantCenterAccountLinkCallable();
}
@Override
public final void close() {
stub.close();
}
@Override
public void shutdown() {
stub.shutdown();
}
@Override
public boolean isShutdown() {
return stub.isShutdown();
}
@Override
public boolean isTerminated() {
return stub.isTerminated();
}
@Override
public void shutdownNow() {
stub.shutdownNow();
}
@Override
public boolean awaitTermination(long duration, TimeUnit unit) throws InterruptedException {
return stub.awaitTermination(duration, unit);
}
}
|
googleapis/google-cloud-java | 37,078 | java-talent/google-cloud-talent/src/main/java/com/google/cloud/talent/v4/stub/JobServiceStubSettings.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.cloud.talent.v4.stub;
import static com.google.cloud.talent.v4.JobServiceClient.ListJobsPagedResponse;
import com.google.api.core.ApiFunction;
import com.google.api.core.ApiFuture;
import com.google.api.core.BetaApi;
import com.google.api.core.ObsoleteApi;
import com.google.api.gax.core.GaxProperties;
import com.google.api.gax.core.GoogleCredentialsProvider;
import com.google.api.gax.core.InstantiatingExecutorProvider;
import com.google.api.gax.grpc.GaxGrpcProperties;
import com.google.api.gax.grpc.GrpcTransportChannel;
import com.google.api.gax.grpc.InstantiatingGrpcChannelProvider;
import com.google.api.gax.grpc.ProtoOperationTransformers;
import com.google.api.gax.httpjson.GaxHttpJsonProperties;
import com.google.api.gax.httpjson.HttpJsonTransportChannel;
import com.google.api.gax.httpjson.InstantiatingHttpJsonChannelProvider;
import com.google.api.gax.longrunning.OperationSnapshot;
import com.google.api.gax.longrunning.OperationTimedPollAlgorithm;
import com.google.api.gax.retrying.RetrySettings;
import com.google.api.gax.rpc.ApiCallContext;
import com.google.api.gax.rpc.ApiClientHeaderProvider;
import com.google.api.gax.rpc.ClientContext;
import com.google.api.gax.rpc.OperationCallSettings;
import com.google.api.gax.rpc.PageContext;
import com.google.api.gax.rpc.PagedCallSettings;
import com.google.api.gax.rpc.PagedListDescriptor;
import com.google.api.gax.rpc.PagedListResponseFactory;
import com.google.api.gax.rpc.StatusCode;
import com.google.api.gax.rpc.StubSettings;
import com.google.api.gax.rpc.TransportChannelProvider;
import com.google.api.gax.rpc.UnaryCallSettings;
import com.google.api.gax.rpc.UnaryCallable;
import com.google.cloud.talent.v4.BatchCreateJobsRequest;
import com.google.cloud.talent.v4.BatchCreateJobsResponse;
import com.google.cloud.talent.v4.BatchDeleteJobsRequest;
import com.google.cloud.talent.v4.BatchDeleteJobsResponse;
import com.google.cloud.talent.v4.BatchOperationMetadata;
import com.google.cloud.talent.v4.BatchUpdateJobsRequest;
import com.google.cloud.talent.v4.BatchUpdateJobsResponse;
import com.google.cloud.talent.v4.CreateJobRequest;
import com.google.cloud.talent.v4.DeleteJobRequest;
import com.google.cloud.talent.v4.GetJobRequest;
import com.google.cloud.talent.v4.Job;
import com.google.cloud.talent.v4.ListJobsRequest;
import com.google.cloud.talent.v4.ListJobsResponse;
import com.google.cloud.talent.v4.SearchJobsRequest;
import com.google.cloud.talent.v4.SearchJobsResponse;
import com.google.cloud.talent.v4.UpdateJobRequest;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.Lists;
import com.google.longrunning.Operation;
import com.google.protobuf.Empty;
import java.io.IOException;
import java.time.Duration;
import java.util.List;
import javax.annotation.Generated;
// AUTO-GENERATED DOCUMENTATION AND CLASS.
/**
* Settings class to configure an instance of {@link JobServiceStub}.
*
* <p>The default instance has everything set to sensible defaults:
*
* <ul>
* <li>The default service address (jobs.googleapis.com) and default port (443) are used.
* <li>Credentials are acquired automatically through Application Default Credentials.
* <li>Retries are configured for idempotent methods but not for non-idempotent methods.
* </ul>
*
* <p>The builder of this class is recursive, so contained classes are themselves builders. When
* build() is called, the tree of builders is called to create the complete settings object.
*
* <p>For example, to set the
* [RetrySettings](https://cloud.google.com/java/docs/reference/gax/latest/com.google.api.gax.retrying.RetrySettings)
* of createJob:
*
* <pre>{@code
* // This snippet has been automatically generated and should be regarded as a code template only.
* // It will require modifications to work:
* // - It may require correct/in-range values for request initialization.
* // - It may require specifying regional endpoints when creating the service client as shown in
* // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library
* JobServiceStubSettings.Builder jobServiceSettingsBuilder = JobServiceStubSettings.newBuilder();
* jobServiceSettingsBuilder
* .createJobSettings()
* .setRetrySettings(
* jobServiceSettingsBuilder
* .createJobSettings()
* .getRetrySettings()
* .toBuilder()
* .setInitialRetryDelayDuration(Duration.ofSeconds(1))
* .setInitialRpcTimeoutDuration(Duration.ofSeconds(5))
* .setMaxAttempts(5)
* .setMaxRetryDelayDuration(Duration.ofSeconds(30))
* .setMaxRpcTimeoutDuration(Duration.ofSeconds(60))
* .setRetryDelayMultiplier(1.3)
* .setRpcTimeoutMultiplier(1.5)
* .setTotalTimeoutDuration(Duration.ofSeconds(300))
* .build());
* JobServiceStubSettings jobServiceSettings = jobServiceSettingsBuilder.build();
* }</pre>
*
* Please refer to the [Client Side Retry
* Guide](https://github.com/googleapis/google-cloud-java/blob/main/docs/client_retries.md) for
* additional support in setting retries.
*
* <p>To configure the RetrySettings of a Long Running Operation method, create an
* OperationTimedPollAlgorithm object and update the RPC's polling algorithm. For example, to
* configure the RetrySettings for batchCreateJobs:
*
* <pre>{@code
* // This snippet has been automatically generated and should be regarded as a code template only.
* // It will require modifications to work:
* // - It may require correct/in-range values for request initialization.
* // - It may require specifying regional endpoints when creating the service client as shown in
* // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library
* JobServiceStubSettings.Builder jobServiceSettingsBuilder = JobServiceStubSettings.newBuilder();
* TimedRetryAlgorithm timedRetryAlgorithm =
* OperationalTimedPollAlgorithm.create(
* RetrySettings.newBuilder()
* .setInitialRetryDelayDuration(Duration.ofMillis(500))
* .setRetryDelayMultiplier(1.5)
* .setMaxRetryDelayDuration(Duration.ofMillis(5000))
* .setTotalTimeoutDuration(Duration.ofHours(24))
* .build());
* jobServiceSettingsBuilder
* .createClusterOperationSettings()
* .setPollingAlgorithm(timedRetryAlgorithm)
* .build();
* }</pre>
*/
@Generated("by gapic-generator-java")
public class JobServiceStubSettings extends StubSettings<JobServiceStubSettings> {
/** The default scopes of the service. */
private static final ImmutableList<String> DEFAULT_SERVICE_SCOPES =
ImmutableList.<String>builder()
.add("https://www.googleapis.com/auth/cloud-platform")
.add("https://www.googleapis.com/auth/jobs")
.build();
private final UnaryCallSettings<CreateJobRequest, Job> createJobSettings;
private final UnaryCallSettings<BatchCreateJobsRequest, Operation> batchCreateJobsSettings;
private final OperationCallSettings<
BatchCreateJobsRequest, BatchCreateJobsResponse, BatchOperationMetadata>
batchCreateJobsOperationSettings;
private final UnaryCallSettings<GetJobRequest, Job> getJobSettings;
private final UnaryCallSettings<UpdateJobRequest, Job> updateJobSettings;
private final UnaryCallSettings<BatchUpdateJobsRequest, Operation> batchUpdateJobsSettings;
private final OperationCallSettings<
BatchUpdateJobsRequest, BatchUpdateJobsResponse, BatchOperationMetadata>
batchUpdateJobsOperationSettings;
private final UnaryCallSettings<DeleteJobRequest, Empty> deleteJobSettings;
private final UnaryCallSettings<BatchDeleteJobsRequest, Operation> batchDeleteJobsSettings;
private final OperationCallSettings<
BatchDeleteJobsRequest, BatchDeleteJobsResponse, BatchOperationMetadata>
batchDeleteJobsOperationSettings;
private final PagedCallSettings<ListJobsRequest, ListJobsResponse, ListJobsPagedResponse>
listJobsSettings;
private final UnaryCallSettings<SearchJobsRequest, SearchJobsResponse> searchJobsSettings;
private final UnaryCallSettings<SearchJobsRequest, SearchJobsResponse> searchJobsForAlertSettings;
private static final PagedListDescriptor<ListJobsRequest, ListJobsResponse, Job>
LIST_JOBS_PAGE_STR_DESC =
new PagedListDescriptor<ListJobsRequest, ListJobsResponse, Job>() {
@Override
public String emptyToken() {
return "";
}
@Override
public ListJobsRequest injectToken(ListJobsRequest payload, String token) {
return ListJobsRequest.newBuilder(payload).setPageToken(token).build();
}
@Override
public ListJobsRequest injectPageSize(ListJobsRequest payload, int pageSize) {
return ListJobsRequest.newBuilder(payload).setPageSize(pageSize).build();
}
@Override
public Integer extractPageSize(ListJobsRequest payload) {
return payload.getPageSize();
}
@Override
public String extractNextToken(ListJobsResponse payload) {
return payload.getNextPageToken();
}
@Override
public Iterable<Job> extractResources(ListJobsResponse payload) {
return payload.getJobsList();
}
};
private static final PagedListResponseFactory<
ListJobsRequest, ListJobsResponse, ListJobsPagedResponse>
LIST_JOBS_PAGE_STR_FACT =
new PagedListResponseFactory<ListJobsRequest, ListJobsResponse, ListJobsPagedResponse>() {
@Override
public ApiFuture<ListJobsPagedResponse> getFuturePagedResponse(
UnaryCallable<ListJobsRequest, ListJobsResponse> callable,
ListJobsRequest request,
ApiCallContext context,
ApiFuture<ListJobsResponse> futureResponse) {
PageContext<ListJobsRequest, ListJobsResponse, Job> pageContext =
PageContext.create(callable, LIST_JOBS_PAGE_STR_DESC, request, context);
return ListJobsPagedResponse.createAsync(pageContext, futureResponse);
}
};
/** Returns the object with the settings used for calls to createJob. */
public UnaryCallSettings<CreateJobRequest, Job> createJobSettings() {
return createJobSettings;
}
/** Returns the object with the settings used for calls to batchCreateJobs. */
public UnaryCallSettings<BatchCreateJobsRequest, Operation> batchCreateJobsSettings() {
return batchCreateJobsSettings;
}
/** Returns the object with the settings used for calls to batchCreateJobs. */
public OperationCallSettings<
BatchCreateJobsRequest, BatchCreateJobsResponse, BatchOperationMetadata>
batchCreateJobsOperationSettings() {
return batchCreateJobsOperationSettings;
}
/** Returns the object with the settings used for calls to getJob. */
public UnaryCallSettings<GetJobRequest, Job> getJobSettings() {
return getJobSettings;
}
/** Returns the object with the settings used for calls to updateJob. */
public UnaryCallSettings<UpdateJobRequest, Job> updateJobSettings() {
return updateJobSettings;
}
/** Returns the object with the settings used for calls to batchUpdateJobs. */
public UnaryCallSettings<BatchUpdateJobsRequest, Operation> batchUpdateJobsSettings() {
return batchUpdateJobsSettings;
}
/** Returns the object with the settings used for calls to batchUpdateJobs. */
public OperationCallSettings<
BatchUpdateJobsRequest, BatchUpdateJobsResponse, BatchOperationMetadata>
batchUpdateJobsOperationSettings() {
return batchUpdateJobsOperationSettings;
}
/** Returns the object with the settings used for calls to deleteJob. */
public UnaryCallSettings<DeleteJobRequest, Empty> deleteJobSettings() {
return deleteJobSettings;
}
/** Returns the object with the settings used for calls to batchDeleteJobs. */
public UnaryCallSettings<BatchDeleteJobsRequest, Operation> batchDeleteJobsSettings() {
return batchDeleteJobsSettings;
}
/** Returns the object with the settings used for calls to batchDeleteJobs. */
public OperationCallSettings<
BatchDeleteJobsRequest, BatchDeleteJobsResponse, BatchOperationMetadata>
batchDeleteJobsOperationSettings() {
return batchDeleteJobsOperationSettings;
}
/** Returns the object with the settings used for calls to listJobs. */
public PagedCallSettings<ListJobsRequest, ListJobsResponse, ListJobsPagedResponse>
listJobsSettings() {
return listJobsSettings;
}
/** Returns the object with the settings used for calls to searchJobs. */
public UnaryCallSettings<SearchJobsRequest, SearchJobsResponse> searchJobsSettings() {
return searchJobsSettings;
}
/** Returns the object with the settings used for calls to searchJobsForAlert. */
public UnaryCallSettings<SearchJobsRequest, SearchJobsResponse> searchJobsForAlertSettings() {
return searchJobsForAlertSettings;
}
public JobServiceStub createStub() throws IOException {
if (getTransportChannelProvider()
.getTransportName()
.equals(GrpcTransportChannel.getGrpcTransportName())) {
return GrpcJobServiceStub.create(this);
}
if (getTransportChannelProvider()
.getTransportName()
.equals(HttpJsonTransportChannel.getHttpJsonTransportName())) {
return HttpJsonJobServiceStub.create(this);
}
throw new UnsupportedOperationException(
String.format(
"Transport not supported: %s", getTransportChannelProvider().getTransportName()));
}
/** Returns the default service name. */
@Override
public String getServiceName() {
return "jobs";
}
/** Returns a builder for the default ExecutorProvider for this service. */
public static InstantiatingExecutorProvider.Builder defaultExecutorProviderBuilder() {
return InstantiatingExecutorProvider.newBuilder();
}
/** Returns the default service endpoint. */
@ObsoleteApi("Use getEndpoint() instead")
public static String getDefaultEndpoint() {
return "jobs.googleapis.com:443";
}
/** Returns the default mTLS service endpoint. */
public static String getDefaultMtlsEndpoint() {
return "jobs.mtls.googleapis.com:443";
}
/** Returns the default service scopes. */
public static List<String> getDefaultServiceScopes() {
return DEFAULT_SERVICE_SCOPES;
}
/** Returns a builder for the default credentials for this service. */
public static GoogleCredentialsProvider.Builder defaultCredentialsProviderBuilder() {
return GoogleCredentialsProvider.newBuilder()
.setScopesToApply(DEFAULT_SERVICE_SCOPES)
.setUseJwtAccessWithScope(true);
}
/** Returns a builder for the default gRPC ChannelProvider for this service. */
public static InstantiatingGrpcChannelProvider.Builder defaultGrpcTransportProviderBuilder() {
return InstantiatingGrpcChannelProvider.newBuilder()
.setMaxInboundMessageSize(Integer.MAX_VALUE);
}
/** Returns a builder for the default REST ChannelProvider for this service. */
@BetaApi
public static InstantiatingHttpJsonChannelProvider.Builder
defaultHttpJsonTransportProviderBuilder() {
return InstantiatingHttpJsonChannelProvider.newBuilder();
}
public static TransportChannelProvider defaultTransportChannelProvider() {
return defaultGrpcTransportProviderBuilder().build();
}
public static ApiClientHeaderProvider.Builder defaultGrpcApiClientHeaderProviderBuilder() {
return ApiClientHeaderProvider.newBuilder()
.setGeneratedLibToken(
"gapic", GaxProperties.getLibraryVersion(JobServiceStubSettings.class))
.setTransportToken(
GaxGrpcProperties.getGrpcTokenName(), GaxGrpcProperties.getGrpcVersion());
}
public static ApiClientHeaderProvider.Builder defaultHttpJsonApiClientHeaderProviderBuilder() {
return ApiClientHeaderProvider.newBuilder()
.setGeneratedLibToken(
"gapic", GaxProperties.getLibraryVersion(JobServiceStubSettings.class))
.setTransportToken(
GaxHttpJsonProperties.getHttpJsonTokenName(),
GaxHttpJsonProperties.getHttpJsonVersion());
}
public static ApiClientHeaderProvider.Builder defaultApiClientHeaderProviderBuilder() {
return JobServiceStubSettings.defaultGrpcApiClientHeaderProviderBuilder();
}
/** Returns a new gRPC builder for this class. */
public static Builder newBuilder() {
return Builder.createDefault();
}
/** Returns a new REST builder for this class. */
public static Builder newHttpJsonBuilder() {
return Builder.createHttpJsonDefault();
}
/** Returns a new builder for this class. */
public static Builder newBuilder(ClientContext clientContext) {
return new Builder(clientContext);
}
/** Returns a builder containing all the values of this settings class. */
public Builder toBuilder() {
return new Builder(this);
}
protected JobServiceStubSettings(Builder settingsBuilder) throws IOException {
super(settingsBuilder);
createJobSettings = settingsBuilder.createJobSettings().build();
batchCreateJobsSettings = settingsBuilder.batchCreateJobsSettings().build();
batchCreateJobsOperationSettings = settingsBuilder.batchCreateJobsOperationSettings().build();
getJobSettings = settingsBuilder.getJobSettings().build();
updateJobSettings = settingsBuilder.updateJobSettings().build();
batchUpdateJobsSettings = settingsBuilder.batchUpdateJobsSettings().build();
batchUpdateJobsOperationSettings = settingsBuilder.batchUpdateJobsOperationSettings().build();
deleteJobSettings = settingsBuilder.deleteJobSettings().build();
batchDeleteJobsSettings = settingsBuilder.batchDeleteJobsSettings().build();
batchDeleteJobsOperationSettings = settingsBuilder.batchDeleteJobsOperationSettings().build();
listJobsSettings = settingsBuilder.listJobsSettings().build();
searchJobsSettings = settingsBuilder.searchJobsSettings().build();
searchJobsForAlertSettings = settingsBuilder.searchJobsForAlertSettings().build();
}
/** Builder for JobServiceStubSettings. */
public static class Builder extends StubSettings.Builder<JobServiceStubSettings, Builder> {
private final ImmutableList<UnaryCallSettings.Builder<?, ?>> unaryMethodSettingsBuilders;
private final UnaryCallSettings.Builder<CreateJobRequest, Job> createJobSettings;
private final UnaryCallSettings.Builder<BatchCreateJobsRequest, Operation>
batchCreateJobsSettings;
private final OperationCallSettings.Builder<
BatchCreateJobsRequest, BatchCreateJobsResponse, BatchOperationMetadata>
batchCreateJobsOperationSettings;
private final UnaryCallSettings.Builder<GetJobRequest, Job> getJobSettings;
private final UnaryCallSettings.Builder<UpdateJobRequest, Job> updateJobSettings;
private final UnaryCallSettings.Builder<BatchUpdateJobsRequest, Operation>
batchUpdateJobsSettings;
private final OperationCallSettings.Builder<
BatchUpdateJobsRequest, BatchUpdateJobsResponse, BatchOperationMetadata>
batchUpdateJobsOperationSettings;
private final UnaryCallSettings.Builder<DeleteJobRequest, Empty> deleteJobSettings;
private final UnaryCallSettings.Builder<BatchDeleteJobsRequest, Operation>
batchDeleteJobsSettings;
private final OperationCallSettings.Builder<
BatchDeleteJobsRequest, BatchDeleteJobsResponse, BatchOperationMetadata>
batchDeleteJobsOperationSettings;
private final PagedCallSettings.Builder<
ListJobsRequest, ListJobsResponse, ListJobsPagedResponse>
listJobsSettings;
private final UnaryCallSettings.Builder<SearchJobsRequest, SearchJobsResponse>
searchJobsSettings;
private final UnaryCallSettings.Builder<SearchJobsRequest, SearchJobsResponse>
searchJobsForAlertSettings;
private static final ImmutableMap<String, ImmutableSet<StatusCode.Code>>
RETRYABLE_CODE_DEFINITIONS;
static {
ImmutableMap.Builder<String, ImmutableSet<StatusCode.Code>> definitions =
ImmutableMap.builder();
definitions.put(
"no_retry_3_codes", ImmutableSet.copyOf(Lists.<StatusCode.Code>newArrayList()));
definitions.put(
"retry_policy_2_codes",
ImmutableSet.copyOf(
Lists.<StatusCode.Code>newArrayList(
StatusCode.Code.DEADLINE_EXCEEDED, StatusCode.Code.UNAVAILABLE)));
definitions.put("no_retry_codes", ImmutableSet.copyOf(Lists.<StatusCode.Code>newArrayList()));
RETRYABLE_CODE_DEFINITIONS = definitions.build();
}
private static final ImmutableMap<String, RetrySettings> RETRY_PARAM_DEFINITIONS;
static {
ImmutableMap.Builder<String, RetrySettings> definitions = ImmutableMap.builder();
RetrySettings settings = null;
settings =
RetrySettings.newBuilder()
.setInitialRpcTimeoutDuration(Duration.ofMillis(30000L))
.setRpcTimeoutMultiplier(1.0)
.setMaxRpcTimeoutDuration(Duration.ofMillis(30000L))
.setTotalTimeoutDuration(Duration.ofMillis(30000L))
.build();
definitions.put("no_retry_3_params", settings);
settings =
RetrySettings.newBuilder()
.setInitialRetryDelayDuration(Duration.ofMillis(100L))
.setRetryDelayMultiplier(1.3)
.setMaxRetryDelayDuration(Duration.ofMillis(60000L))
.setInitialRpcTimeoutDuration(Duration.ofMillis(30000L))
.setRpcTimeoutMultiplier(1.0)
.setMaxRpcTimeoutDuration(Duration.ofMillis(30000L))
.setTotalTimeoutDuration(Duration.ofMillis(30000L))
.build();
definitions.put("retry_policy_2_params", settings);
settings = RetrySettings.newBuilder().setRpcTimeoutMultiplier(1.0).build();
definitions.put("no_retry_params", settings);
RETRY_PARAM_DEFINITIONS = definitions.build();
}
protected Builder() {
this(((ClientContext) null));
}
protected Builder(ClientContext clientContext) {
super(clientContext);
createJobSettings = UnaryCallSettings.newUnaryCallSettingsBuilder();
batchCreateJobsSettings = UnaryCallSettings.newUnaryCallSettingsBuilder();
batchCreateJobsOperationSettings = OperationCallSettings.newBuilder();
getJobSettings = UnaryCallSettings.newUnaryCallSettingsBuilder();
updateJobSettings = UnaryCallSettings.newUnaryCallSettingsBuilder();
batchUpdateJobsSettings = UnaryCallSettings.newUnaryCallSettingsBuilder();
batchUpdateJobsOperationSettings = OperationCallSettings.newBuilder();
deleteJobSettings = UnaryCallSettings.newUnaryCallSettingsBuilder();
batchDeleteJobsSettings = UnaryCallSettings.newUnaryCallSettingsBuilder();
batchDeleteJobsOperationSettings = OperationCallSettings.newBuilder();
listJobsSettings = PagedCallSettings.newBuilder(LIST_JOBS_PAGE_STR_FACT);
searchJobsSettings = UnaryCallSettings.newUnaryCallSettingsBuilder();
searchJobsForAlertSettings = UnaryCallSettings.newUnaryCallSettingsBuilder();
unaryMethodSettingsBuilders =
ImmutableList.<UnaryCallSettings.Builder<?, ?>>of(
createJobSettings,
batchCreateJobsSettings,
getJobSettings,
updateJobSettings,
batchUpdateJobsSettings,
deleteJobSettings,
batchDeleteJobsSettings,
listJobsSettings,
searchJobsSettings,
searchJobsForAlertSettings);
initDefaults(this);
}
protected Builder(JobServiceStubSettings settings) {
super(settings);
createJobSettings = settings.createJobSettings.toBuilder();
batchCreateJobsSettings = settings.batchCreateJobsSettings.toBuilder();
batchCreateJobsOperationSettings = settings.batchCreateJobsOperationSettings.toBuilder();
getJobSettings = settings.getJobSettings.toBuilder();
updateJobSettings = settings.updateJobSettings.toBuilder();
batchUpdateJobsSettings = settings.batchUpdateJobsSettings.toBuilder();
batchUpdateJobsOperationSettings = settings.batchUpdateJobsOperationSettings.toBuilder();
deleteJobSettings = settings.deleteJobSettings.toBuilder();
batchDeleteJobsSettings = settings.batchDeleteJobsSettings.toBuilder();
batchDeleteJobsOperationSettings = settings.batchDeleteJobsOperationSettings.toBuilder();
listJobsSettings = settings.listJobsSettings.toBuilder();
searchJobsSettings = settings.searchJobsSettings.toBuilder();
searchJobsForAlertSettings = settings.searchJobsForAlertSettings.toBuilder();
unaryMethodSettingsBuilders =
ImmutableList.<UnaryCallSettings.Builder<?, ?>>of(
createJobSettings,
batchCreateJobsSettings,
getJobSettings,
updateJobSettings,
batchUpdateJobsSettings,
deleteJobSettings,
batchDeleteJobsSettings,
listJobsSettings,
searchJobsSettings,
searchJobsForAlertSettings);
}
private static Builder createDefault() {
Builder builder = new Builder(((ClientContext) null));
builder.setTransportChannelProvider(defaultTransportChannelProvider());
builder.setCredentialsProvider(defaultCredentialsProviderBuilder().build());
builder.setInternalHeaderProvider(defaultApiClientHeaderProviderBuilder().build());
builder.setMtlsEndpoint(getDefaultMtlsEndpoint());
builder.setSwitchToMtlsEndpointAllowed(true);
return initDefaults(builder);
}
private static Builder createHttpJsonDefault() {
Builder builder = new Builder(((ClientContext) null));
builder.setTransportChannelProvider(defaultHttpJsonTransportProviderBuilder().build());
builder.setCredentialsProvider(defaultCredentialsProviderBuilder().build());
builder.setInternalHeaderProvider(defaultHttpJsonApiClientHeaderProviderBuilder().build());
builder.setMtlsEndpoint(getDefaultMtlsEndpoint());
builder.setSwitchToMtlsEndpointAllowed(true);
return initDefaults(builder);
}
private static Builder initDefaults(Builder builder) {
builder
.createJobSettings()
.setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("no_retry_3_codes"))
.setRetrySettings(RETRY_PARAM_DEFINITIONS.get("no_retry_3_params"));
builder
.batchCreateJobsSettings()
.setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("no_retry_3_codes"))
.setRetrySettings(RETRY_PARAM_DEFINITIONS.get("no_retry_3_params"));
builder
.getJobSettings()
.setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_2_codes"))
.setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_2_params"));
builder
.updateJobSettings()
.setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("no_retry_3_codes"))
.setRetrySettings(RETRY_PARAM_DEFINITIONS.get("no_retry_3_params"));
builder
.batchUpdateJobsSettings()
.setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("no_retry_3_codes"))
.setRetrySettings(RETRY_PARAM_DEFINITIONS.get("no_retry_3_params"));
builder
.deleteJobSettings()
.setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_2_codes"))
.setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_2_params"));
builder
.batchDeleteJobsSettings()
.setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("no_retry_3_codes"))
.setRetrySettings(RETRY_PARAM_DEFINITIONS.get("no_retry_3_params"));
builder
.listJobsSettings()
.setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_2_codes"))
.setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_2_params"));
builder
.searchJobsSettings()
.setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("no_retry_3_codes"))
.setRetrySettings(RETRY_PARAM_DEFINITIONS.get("no_retry_3_params"));
builder
.searchJobsForAlertSettings()
.setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("no_retry_codes"))
.setRetrySettings(RETRY_PARAM_DEFINITIONS.get("no_retry_params"));
builder
.batchCreateJobsOperationSettings()
.setInitialCallSettings(
UnaryCallSettings
.<BatchCreateJobsRequest, OperationSnapshot>newUnaryCallSettingsBuilder()
.setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("no_retry_3_codes"))
.setRetrySettings(RETRY_PARAM_DEFINITIONS.get("no_retry_3_params"))
.build())
.setResponseTransformer(
ProtoOperationTransformers.ResponseTransformer.create(BatchCreateJobsResponse.class))
.setMetadataTransformer(
ProtoOperationTransformers.MetadataTransformer.create(BatchOperationMetadata.class))
.setPollingAlgorithm(
OperationTimedPollAlgorithm.create(
RetrySettings.newBuilder()
.setInitialRetryDelayDuration(Duration.ofMillis(5000L))
.setRetryDelayMultiplier(1.5)
.setMaxRetryDelayDuration(Duration.ofMillis(45000L))
.setInitialRpcTimeoutDuration(Duration.ZERO)
.setRpcTimeoutMultiplier(1.0)
.setMaxRpcTimeoutDuration(Duration.ZERO)
.setTotalTimeoutDuration(Duration.ofMillis(300000L))
.build()));
builder
.batchUpdateJobsOperationSettings()
.setInitialCallSettings(
UnaryCallSettings
.<BatchUpdateJobsRequest, OperationSnapshot>newUnaryCallSettingsBuilder()
.setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("no_retry_3_codes"))
.setRetrySettings(RETRY_PARAM_DEFINITIONS.get("no_retry_3_params"))
.build())
.setResponseTransformer(
ProtoOperationTransformers.ResponseTransformer.create(BatchUpdateJobsResponse.class))
.setMetadataTransformer(
ProtoOperationTransformers.MetadataTransformer.create(BatchOperationMetadata.class))
.setPollingAlgorithm(
OperationTimedPollAlgorithm.create(
RetrySettings.newBuilder()
.setInitialRetryDelayDuration(Duration.ofMillis(5000L))
.setRetryDelayMultiplier(1.5)
.setMaxRetryDelayDuration(Duration.ofMillis(45000L))
.setInitialRpcTimeoutDuration(Duration.ZERO)
.setRpcTimeoutMultiplier(1.0)
.setMaxRpcTimeoutDuration(Duration.ZERO)
.setTotalTimeoutDuration(Duration.ofMillis(300000L))
.build()));
builder
.batchDeleteJobsOperationSettings()
.setInitialCallSettings(
UnaryCallSettings
.<BatchDeleteJobsRequest, OperationSnapshot>newUnaryCallSettingsBuilder()
.setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("no_retry_3_codes"))
.setRetrySettings(RETRY_PARAM_DEFINITIONS.get("no_retry_3_params"))
.build())
.setResponseTransformer(
ProtoOperationTransformers.ResponseTransformer.create(BatchDeleteJobsResponse.class))
.setMetadataTransformer(
ProtoOperationTransformers.MetadataTransformer.create(BatchOperationMetadata.class))
.setPollingAlgorithm(
OperationTimedPollAlgorithm.create(
RetrySettings.newBuilder()
.setInitialRetryDelayDuration(Duration.ofMillis(5000L))
.setRetryDelayMultiplier(1.5)
.setMaxRetryDelayDuration(Duration.ofMillis(45000L))
.setInitialRpcTimeoutDuration(Duration.ZERO)
.setRpcTimeoutMultiplier(1.0)
.setMaxRpcTimeoutDuration(Duration.ZERO)
.setTotalTimeoutDuration(Duration.ofMillis(300000L))
.build()));
return builder;
}
/**
* Applies the given settings updater function to all of the unary API methods in this service.
*
* <p>Note: This method does not support applying settings to streaming methods.
*/
public Builder applyToAllUnaryMethods(
ApiFunction<UnaryCallSettings.Builder<?, ?>, Void> settingsUpdater) {
super.applyToAllUnaryMethods(unaryMethodSettingsBuilders, settingsUpdater);
return this;
}
public ImmutableList<UnaryCallSettings.Builder<?, ?>> unaryMethodSettingsBuilders() {
return unaryMethodSettingsBuilders;
}
/** Returns the builder for the settings used for calls to createJob. */
public UnaryCallSettings.Builder<CreateJobRequest, Job> createJobSettings() {
return createJobSettings;
}
/** Returns the builder for the settings used for calls to batchCreateJobs. */
public UnaryCallSettings.Builder<BatchCreateJobsRequest, Operation> batchCreateJobsSettings() {
return batchCreateJobsSettings;
}
/** Returns the builder for the settings used for calls to batchCreateJobs. */
public OperationCallSettings.Builder<
BatchCreateJobsRequest, BatchCreateJobsResponse, BatchOperationMetadata>
batchCreateJobsOperationSettings() {
return batchCreateJobsOperationSettings;
}
/** Returns the builder for the settings used for calls to getJob. */
public UnaryCallSettings.Builder<GetJobRequest, Job> getJobSettings() {
return getJobSettings;
}
/** Returns the builder for the settings used for calls to updateJob. */
public UnaryCallSettings.Builder<UpdateJobRequest, Job> updateJobSettings() {
return updateJobSettings;
}
/** Returns the builder for the settings used for calls to batchUpdateJobs. */
public UnaryCallSettings.Builder<BatchUpdateJobsRequest, Operation> batchUpdateJobsSettings() {
return batchUpdateJobsSettings;
}
/** Returns the builder for the settings used for calls to batchUpdateJobs. */
public OperationCallSettings.Builder<
BatchUpdateJobsRequest, BatchUpdateJobsResponse, BatchOperationMetadata>
batchUpdateJobsOperationSettings() {
return batchUpdateJobsOperationSettings;
}
/** Returns the builder for the settings used for calls to deleteJob. */
public UnaryCallSettings.Builder<DeleteJobRequest, Empty> deleteJobSettings() {
return deleteJobSettings;
}
/** Returns the builder for the settings used for calls to batchDeleteJobs. */
public UnaryCallSettings.Builder<BatchDeleteJobsRequest, Operation> batchDeleteJobsSettings() {
return batchDeleteJobsSettings;
}
/** Returns the builder for the settings used for calls to batchDeleteJobs. */
public OperationCallSettings.Builder<
BatchDeleteJobsRequest, BatchDeleteJobsResponse, BatchOperationMetadata>
batchDeleteJobsOperationSettings() {
return batchDeleteJobsOperationSettings;
}
/** Returns the builder for the settings used for calls to listJobs. */
public PagedCallSettings.Builder<ListJobsRequest, ListJobsResponse, ListJobsPagedResponse>
listJobsSettings() {
return listJobsSettings;
}
/** Returns the builder for the settings used for calls to searchJobs. */
public UnaryCallSettings.Builder<SearchJobsRequest, SearchJobsResponse> searchJobsSettings() {
return searchJobsSettings;
}
/** Returns the builder for the settings used for calls to searchJobsForAlert. */
public UnaryCallSettings.Builder<SearchJobsRequest, SearchJobsResponse>
searchJobsForAlertSettings() {
return searchJobsForAlertSettings;
}
@Override
public JobServiceStubSettings build() throws IOException {
return new JobServiceStubSettings(this);
}
}
}
|
googleapis/google-cloud-java | 36,969 | java-discoveryengine/grpc-google-cloud-discoveryengine-v1/src/main/java/com/google/cloud/discoveryengine/v1/EngineServiceGrpc.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.cloud.discoveryengine.v1;
import static io.grpc.MethodDescriptor.generateFullMethodName;
/**
*
*
* <pre>
* Service for managing [Engine][google.cloud.discoveryengine.v1.Engine]
* configuration.
* </pre>
*/
@javax.annotation.Generated(
value = "by gRPC proto compiler",
comments = "Source: google/cloud/discoveryengine/v1/engine_service.proto")
@io.grpc.stub.annotations.GrpcGenerated
public final class EngineServiceGrpc {
private EngineServiceGrpc() {}
public static final java.lang.String SERVICE_NAME =
"google.cloud.discoveryengine.v1.EngineService";
// Static method descriptors that strictly reflect the proto.
private static volatile io.grpc.MethodDescriptor<
com.google.cloud.discoveryengine.v1.CreateEngineRequest, com.google.longrunning.Operation>
getCreateEngineMethod;
@io.grpc.stub.annotations.RpcMethod(
fullMethodName = SERVICE_NAME + '/' + "CreateEngine",
requestType = com.google.cloud.discoveryengine.v1.CreateEngineRequest.class,
responseType = com.google.longrunning.Operation.class,
methodType = io.grpc.MethodDescriptor.MethodType.UNARY)
public static io.grpc.MethodDescriptor<
com.google.cloud.discoveryengine.v1.CreateEngineRequest, com.google.longrunning.Operation>
getCreateEngineMethod() {
io.grpc.MethodDescriptor<
com.google.cloud.discoveryengine.v1.CreateEngineRequest,
com.google.longrunning.Operation>
getCreateEngineMethod;
if ((getCreateEngineMethod = EngineServiceGrpc.getCreateEngineMethod) == null) {
synchronized (EngineServiceGrpc.class) {
if ((getCreateEngineMethod = EngineServiceGrpc.getCreateEngineMethod) == null) {
EngineServiceGrpc.getCreateEngineMethod =
getCreateEngineMethod =
io.grpc.MethodDescriptor
.<com.google.cloud.discoveryengine.v1.CreateEngineRequest,
com.google.longrunning.Operation>
newBuilder()
.setType(io.grpc.MethodDescriptor.MethodType.UNARY)
.setFullMethodName(generateFullMethodName(SERVICE_NAME, "CreateEngine"))
.setSampledToLocalTracing(true)
.setRequestMarshaller(
io.grpc.protobuf.ProtoUtils.marshaller(
com.google.cloud.discoveryengine.v1.CreateEngineRequest
.getDefaultInstance()))
.setResponseMarshaller(
io.grpc.protobuf.ProtoUtils.marshaller(
com.google.longrunning.Operation.getDefaultInstance()))
.setSchemaDescriptor(
new EngineServiceMethodDescriptorSupplier("CreateEngine"))
.build();
}
}
}
return getCreateEngineMethod;
}
private static volatile io.grpc.MethodDescriptor<
com.google.cloud.discoveryengine.v1.DeleteEngineRequest, com.google.longrunning.Operation>
getDeleteEngineMethod;
@io.grpc.stub.annotations.RpcMethod(
fullMethodName = SERVICE_NAME + '/' + "DeleteEngine",
requestType = com.google.cloud.discoveryengine.v1.DeleteEngineRequest.class,
responseType = com.google.longrunning.Operation.class,
methodType = io.grpc.MethodDescriptor.MethodType.UNARY)
public static io.grpc.MethodDescriptor<
com.google.cloud.discoveryengine.v1.DeleteEngineRequest, com.google.longrunning.Operation>
getDeleteEngineMethod() {
io.grpc.MethodDescriptor<
com.google.cloud.discoveryengine.v1.DeleteEngineRequest,
com.google.longrunning.Operation>
getDeleteEngineMethod;
if ((getDeleteEngineMethod = EngineServiceGrpc.getDeleteEngineMethod) == null) {
synchronized (EngineServiceGrpc.class) {
if ((getDeleteEngineMethod = EngineServiceGrpc.getDeleteEngineMethod) == null) {
EngineServiceGrpc.getDeleteEngineMethod =
getDeleteEngineMethod =
io.grpc.MethodDescriptor
.<com.google.cloud.discoveryengine.v1.DeleteEngineRequest,
com.google.longrunning.Operation>
newBuilder()
.setType(io.grpc.MethodDescriptor.MethodType.UNARY)
.setFullMethodName(generateFullMethodName(SERVICE_NAME, "DeleteEngine"))
.setSampledToLocalTracing(true)
.setRequestMarshaller(
io.grpc.protobuf.ProtoUtils.marshaller(
com.google.cloud.discoveryengine.v1.DeleteEngineRequest
.getDefaultInstance()))
.setResponseMarshaller(
io.grpc.protobuf.ProtoUtils.marshaller(
com.google.longrunning.Operation.getDefaultInstance()))
.setSchemaDescriptor(
new EngineServiceMethodDescriptorSupplier("DeleteEngine"))
.build();
}
}
}
return getDeleteEngineMethod;
}
private static volatile io.grpc.MethodDescriptor<
com.google.cloud.discoveryengine.v1.UpdateEngineRequest,
com.google.cloud.discoveryengine.v1.Engine>
getUpdateEngineMethod;
@io.grpc.stub.annotations.RpcMethod(
fullMethodName = SERVICE_NAME + '/' + "UpdateEngine",
requestType = com.google.cloud.discoveryengine.v1.UpdateEngineRequest.class,
responseType = com.google.cloud.discoveryengine.v1.Engine.class,
methodType = io.grpc.MethodDescriptor.MethodType.UNARY)
public static io.grpc.MethodDescriptor<
com.google.cloud.discoveryengine.v1.UpdateEngineRequest,
com.google.cloud.discoveryengine.v1.Engine>
getUpdateEngineMethod() {
io.grpc.MethodDescriptor<
com.google.cloud.discoveryengine.v1.UpdateEngineRequest,
com.google.cloud.discoveryengine.v1.Engine>
getUpdateEngineMethod;
if ((getUpdateEngineMethod = EngineServiceGrpc.getUpdateEngineMethod) == null) {
synchronized (EngineServiceGrpc.class) {
if ((getUpdateEngineMethod = EngineServiceGrpc.getUpdateEngineMethod) == null) {
EngineServiceGrpc.getUpdateEngineMethod =
getUpdateEngineMethod =
io.grpc.MethodDescriptor
.<com.google.cloud.discoveryengine.v1.UpdateEngineRequest,
com.google.cloud.discoveryengine.v1.Engine>
newBuilder()
.setType(io.grpc.MethodDescriptor.MethodType.UNARY)
.setFullMethodName(generateFullMethodName(SERVICE_NAME, "UpdateEngine"))
.setSampledToLocalTracing(true)
.setRequestMarshaller(
io.grpc.protobuf.ProtoUtils.marshaller(
com.google.cloud.discoveryengine.v1.UpdateEngineRequest
.getDefaultInstance()))
.setResponseMarshaller(
io.grpc.protobuf.ProtoUtils.marshaller(
com.google.cloud.discoveryengine.v1.Engine.getDefaultInstance()))
.setSchemaDescriptor(
new EngineServiceMethodDescriptorSupplier("UpdateEngine"))
.build();
}
}
}
return getUpdateEngineMethod;
}
private static volatile io.grpc.MethodDescriptor<
com.google.cloud.discoveryengine.v1.GetEngineRequest,
com.google.cloud.discoveryengine.v1.Engine>
getGetEngineMethod;
@io.grpc.stub.annotations.RpcMethod(
fullMethodName = SERVICE_NAME + '/' + "GetEngine",
requestType = com.google.cloud.discoveryengine.v1.GetEngineRequest.class,
responseType = com.google.cloud.discoveryengine.v1.Engine.class,
methodType = io.grpc.MethodDescriptor.MethodType.UNARY)
public static io.grpc.MethodDescriptor<
com.google.cloud.discoveryengine.v1.GetEngineRequest,
com.google.cloud.discoveryengine.v1.Engine>
getGetEngineMethod() {
io.grpc.MethodDescriptor<
com.google.cloud.discoveryengine.v1.GetEngineRequest,
com.google.cloud.discoveryengine.v1.Engine>
getGetEngineMethod;
if ((getGetEngineMethod = EngineServiceGrpc.getGetEngineMethod) == null) {
synchronized (EngineServiceGrpc.class) {
if ((getGetEngineMethod = EngineServiceGrpc.getGetEngineMethod) == null) {
EngineServiceGrpc.getGetEngineMethod =
getGetEngineMethod =
io.grpc.MethodDescriptor
.<com.google.cloud.discoveryengine.v1.GetEngineRequest,
com.google.cloud.discoveryengine.v1.Engine>
newBuilder()
.setType(io.grpc.MethodDescriptor.MethodType.UNARY)
.setFullMethodName(generateFullMethodName(SERVICE_NAME, "GetEngine"))
.setSampledToLocalTracing(true)
.setRequestMarshaller(
io.grpc.protobuf.ProtoUtils.marshaller(
com.google.cloud.discoveryengine.v1.GetEngineRequest
.getDefaultInstance()))
.setResponseMarshaller(
io.grpc.protobuf.ProtoUtils.marshaller(
com.google.cloud.discoveryengine.v1.Engine.getDefaultInstance()))
.setSchemaDescriptor(new EngineServiceMethodDescriptorSupplier("GetEngine"))
.build();
}
}
}
return getGetEngineMethod;
}
private static volatile io.grpc.MethodDescriptor<
com.google.cloud.discoveryengine.v1.ListEnginesRequest,
com.google.cloud.discoveryengine.v1.ListEnginesResponse>
getListEnginesMethod;
@io.grpc.stub.annotations.RpcMethod(
fullMethodName = SERVICE_NAME + '/' + "ListEngines",
requestType = com.google.cloud.discoveryengine.v1.ListEnginesRequest.class,
responseType = com.google.cloud.discoveryengine.v1.ListEnginesResponse.class,
methodType = io.grpc.MethodDescriptor.MethodType.UNARY)
public static io.grpc.MethodDescriptor<
com.google.cloud.discoveryengine.v1.ListEnginesRequest,
com.google.cloud.discoveryengine.v1.ListEnginesResponse>
getListEnginesMethod() {
io.grpc.MethodDescriptor<
com.google.cloud.discoveryengine.v1.ListEnginesRequest,
com.google.cloud.discoveryengine.v1.ListEnginesResponse>
getListEnginesMethod;
if ((getListEnginesMethod = EngineServiceGrpc.getListEnginesMethod) == null) {
synchronized (EngineServiceGrpc.class) {
if ((getListEnginesMethod = EngineServiceGrpc.getListEnginesMethod) == null) {
EngineServiceGrpc.getListEnginesMethod =
getListEnginesMethod =
io.grpc.MethodDescriptor
.<com.google.cloud.discoveryengine.v1.ListEnginesRequest,
com.google.cloud.discoveryengine.v1.ListEnginesResponse>
newBuilder()
.setType(io.grpc.MethodDescriptor.MethodType.UNARY)
.setFullMethodName(generateFullMethodName(SERVICE_NAME, "ListEngines"))
.setSampledToLocalTracing(true)
.setRequestMarshaller(
io.grpc.protobuf.ProtoUtils.marshaller(
com.google.cloud.discoveryengine.v1.ListEnginesRequest
.getDefaultInstance()))
.setResponseMarshaller(
io.grpc.protobuf.ProtoUtils.marshaller(
com.google.cloud.discoveryengine.v1.ListEnginesResponse
.getDefaultInstance()))
.setSchemaDescriptor(new EngineServiceMethodDescriptorSupplier("ListEngines"))
.build();
}
}
}
return getListEnginesMethod;
}
/** Creates a new async stub that supports all call types for the service */
public static EngineServiceStub newStub(io.grpc.Channel channel) {
io.grpc.stub.AbstractStub.StubFactory<EngineServiceStub> factory =
new io.grpc.stub.AbstractStub.StubFactory<EngineServiceStub>() {
@java.lang.Override
public EngineServiceStub newStub(
io.grpc.Channel channel, io.grpc.CallOptions callOptions) {
return new EngineServiceStub(channel, callOptions);
}
};
return EngineServiceStub.newStub(factory, channel);
}
/** Creates a new blocking-style stub that supports all types of calls on the service */
public static EngineServiceBlockingV2Stub newBlockingV2Stub(io.grpc.Channel channel) {
io.grpc.stub.AbstractStub.StubFactory<EngineServiceBlockingV2Stub> factory =
new io.grpc.stub.AbstractStub.StubFactory<EngineServiceBlockingV2Stub>() {
@java.lang.Override
public EngineServiceBlockingV2Stub newStub(
io.grpc.Channel channel, io.grpc.CallOptions callOptions) {
return new EngineServiceBlockingV2Stub(channel, callOptions);
}
};
return EngineServiceBlockingV2Stub.newStub(factory, channel);
}
/**
* Creates a new blocking-style stub that supports unary and streaming output calls on the service
*/
public static EngineServiceBlockingStub newBlockingStub(io.grpc.Channel channel) {
io.grpc.stub.AbstractStub.StubFactory<EngineServiceBlockingStub> factory =
new io.grpc.stub.AbstractStub.StubFactory<EngineServiceBlockingStub>() {
@java.lang.Override
public EngineServiceBlockingStub newStub(
io.grpc.Channel channel, io.grpc.CallOptions callOptions) {
return new EngineServiceBlockingStub(channel, callOptions);
}
};
return EngineServiceBlockingStub.newStub(factory, channel);
}
/** Creates a new ListenableFuture-style stub that supports unary calls on the service */
public static EngineServiceFutureStub newFutureStub(io.grpc.Channel channel) {
io.grpc.stub.AbstractStub.StubFactory<EngineServiceFutureStub> factory =
new io.grpc.stub.AbstractStub.StubFactory<EngineServiceFutureStub>() {
@java.lang.Override
public EngineServiceFutureStub newStub(
io.grpc.Channel channel, io.grpc.CallOptions callOptions) {
return new EngineServiceFutureStub(channel, callOptions);
}
};
return EngineServiceFutureStub.newStub(factory, channel);
}
/**
*
*
* <pre>
* Service for managing [Engine][google.cloud.discoveryengine.v1.Engine]
* configuration.
* </pre>
*/
public interface AsyncService {
/**
*
*
* <pre>
* Creates a [Engine][google.cloud.discoveryengine.v1.Engine].
* </pre>
*/
default void createEngine(
com.google.cloud.discoveryengine.v1.CreateEngineRequest request,
io.grpc.stub.StreamObserver<com.google.longrunning.Operation> responseObserver) {
io.grpc.stub.ServerCalls.asyncUnimplementedUnaryCall(
getCreateEngineMethod(), responseObserver);
}
/**
*
*
* <pre>
* Deletes a [Engine][google.cloud.discoveryengine.v1.Engine].
* </pre>
*/
default void deleteEngine(
com.google.cloud.discoveryengine.v1.DeleteEngineRequest request,
io.grpc.stub.StreamObserver<com.google.longrunning.Operation> responseObserver) {
io.grpc.stub.ServerCalls.asyncUnimplementedUnaryCall(
getDeleteEngineMethod(), responseObserver);
}
/**
*
*
* <pre>
* Updates an [Engine][google.cloud.discoveryengine.v1.Engine]
* </pre>
*/
default void updateEngine(
com.google.cloud.discoveryengine.v1.UpdateEngineRequest request,
io.grpc.stub.StreamObserver<com.google.cloud.discoveryengine.v1.Engine> responseObserver) {
io.grpc.stub.ServerCalls.asyncUnimplementedUnaryCall(
getUpdateEngineMethod(), responseObserver);
}
/**
*
*
* <pre>
* Gets a [Engine][google.cloud.discoveryengine.v1.Engine].
* </pre>
*/
default void getEngine(
com.google.cloud.discoveryengine.v1.GetEngineRequest request,
io.grpc.stub.StreamObserver<com.google.cloud.discoveryengine.v1.Engine> responseObserver) {
io.grpc.stub.ServerCalls.asyncUnimplementedUnaryCall(getGetEngineMethod(), responseObserver);
}
/**
*
*
* <pre>
* Lists all the [Engine][google.cloud.discoveryengine.v1.Engine]s associated
* with the project.
* </pre>
*/
default void listEngines(
com.google.cloud.discoveryengine.v1.ListEnginesRequest request,
io.grpc.stub.StreamObserver<com.google.cloud.discoveryengine.v1.ListEnginesResponse>
responseObserver) {
io.grpc.stub.ServerCalls.asyncUnimplementedUnaryCall(
getListEnginesMethod(), responseObserver);
}
}
/**
* Base class for the server implementation of the service EngineService.
*
* <pre>
* Service for managing [Engine][google.cloud.discoveryengine.v1.Engine]
* configuration.
* </pre>
*/
public abstract static class EngineServiceImplBase
implements io.grpc.BindableService, AsyncService {
@java.lang.Override
public final io.grpc.ServerServiceDefinition bindService() {
return EngineServiceGrpc.bindService(this);
}
}
/**
* A stub to allow clients to do asynchronous rpc calls to service EngineService.
*
* <pre>
* Service for managing [Engine][google.cloud.discoveryengine.v1.Engine]
* configuration.
* </pre>
*/
public static final class EngineServiceStub
extends io.grpc.stub.AbstractAsyncStub<EngineServiceStub> {
private EngineServiceStub(io.grpc.Channel channel, io.grpc.CallOptions callOptions) {
super(channel, callOptions);
}
@java.lang.Override
protected EngineServiceStub build(io.grpc.Channel channel, io.grpc.CallOptions callOptions) {
return new EngineServiceStub(channel, callOptions);
}
/**
*
*
* <pre>
* Creates a [Engine][google.cloud.discoveryengine.v1.Engine].
* </pre>
*/
public void createEngine(
com.google.cloud.discoveryengine.v1.CreateEngineRequest request,
io.grpc.stub.StreamObserver<com.google.longrunning.Operation> responseObserver) {
io.grpc.stub.ClientCalls.asyncUnaryCall(
getChannel().newCall(getCreateEngineMethod(), getCallOptions()),
request,
responseObserver);
}
/**
*
*
* <pre>
* Deletes a [Engine][google.cloud.discoveryengine.v1.Engine].
* </pre>
*/
public void deleteEngine(
com.google.cloud.discoveryengine.v1.DeleteEngineRequest request,
io.grpc.stub.StreamObserver<com.google.longrunning.Operation> responseObserver) {
io.grpc.stub.ClientCalls.asyncUnaryCall(
getChannel().newCall(getDeleteEngineMethod(), getCallOptions()),
request,
responseObserver);
}
/**
*
*
* <pre>
* Updates an [Engine][google.cloud.discoveryengine.v1.Engine]
* </pre>
*/
public void updateEngine(
com.google.cloud.discoveryengine.v1.UpdateEngineRequest request,
io.grpc.stub.StreamObserver<com.google.cloud.discoveryengine.v1.Engine> responseObserver) {
io.grpc.stub.ClientCalls.asyncUnaryCall(
getChannel().newCall(getUpdateEngineMethod(), getCallOptions()),
request,
responseObserver);
}
/**
*
*
* <pre>
* Gets a [Engine][google.cloud.discoveryengine.v1.Engine].
* </pre>
*/
public void getEngine(
com.google.cloud.discoveryengine.v1.GetEngineRequest request,
io.grpc.stub.StreamObserver<com.google.cloud.discoveryengine.v1.Engine> responseObserver) {
io.grpc.stub.ClientCalls.asyncUnaryCall(
getChannel().newCall(getGetEngineMethod(), getCallOptions()), request, responseObserver);
}
/**
*
*
* <pre>
* Lists all the [Engine][google.cloud.discoveryengine.v1.Engine]s associated
* with the project.
* </pre>
*/
public void listEngines(
com.google.cloud.discoveryengine.v1.ListEnginesRequest request,
io.grpc.stub.StreamObserver<com.google.cloud.discoveryengine.v1.ListEnginesResponse>
responseObserver) {
io.grpc.stub.ClientCalls.asyncUnaryCall(
getChannel().newCall(getListEnginesMethod(), getCallOptions()),
request,
responseObserver);
}
}
/**
* A stub to allow clients to do synchronous rpc calls to service EngineService.
*
* <pre>
* Service for managing [Engine][google.cloud.discoveryengine.v1.Engine]
* configuration.
* </pre>
*/
public static final class EngineServiceBlockingV2Stub
extends io.grpc.stub.AbstractBlockingStub<EngineServiceBlockingV2Stub> {
private EngineServiceBlockingV2Stub(io.grpc.Channel channel, io.grpc.CallOptions callOptions) {
super(channel, callOptions);
}
@java.lang.Override
protected EngineServiceBlockingV2Stub build(
io.grpc.Channel channel, io.grpc.CallOptions callOptions) {
return new EngineServiceBlockingV2Stub(channel, callOptions);
}
/**
*
*
* <pre>
* Creates a [Engine][google.cloud.discoveryengine.v1.Engine].
* </pre>
*/
public com.google.longrunning.Operation createEngine(
com.google.cloud.discoveryengine.v1.CreateEngineRequest request) {
return io.grpc.stub.ClientCalls.blockingUnaryCall(
getChannel(), getCreateEngineMethod(), getCallOptions(), request);
}
/**
*
*
* <pre>
* Deletes a [Engine][google.cloud.discoveryengine.v1.Engine].
* </pre>
*/
public com.google.longrunning.Operation deleteEngine(
com.google.cloud.discoveryengine.v1.DeleteEngineRequest request) {
return io.grpc.stub.ClientCalls.blockingUnaryCall(
getChannel(), getDeleteEngineMethod(), getCallOptions(), request);
}
/**
*
*
* <pre>
* Updates an [Engine][google.cloud.discoveryengine.v1.Engine]
* </pre>
*/
public com.google.cloud.discoveryengine.v1.Engine updateEngine(
com.google.cloud.discoveryengine.v1.UpdateEngineRequest request) {
return io.grpc.stub.ClientCalls.blockingUnaryCall(
getChannel(), getUpdateEngineMethod(), getCallOptions(), request);
}
/**
*
*
* <pre>
* Gets a [Engine][google.cloud.discoveryengine.v1.Engine].
* </pre>
*/
public com.google.cloud.discoveryengine.v1.Engine getEngine(
com.google.cloud.discoveryengine.v1.GetEngineRequest request) {
return io.grpc.stub.ClientCalls.blockingUnaryCall(
getChannel(), getGetEngineMethod(), getCallOptions(), request);
}
/**
*
*
* <pre>
* Lists all the [Engine][google.cloud.discoveryengine.v1.Engine]s associated
* with the project.
* </pre>
*/
public com.google.cloud.discoveryengine.v1.ListEnginesResponse listEngines(
com.google.cloud.discoveryengine.v1.ListEnginesRequest request) {
return io.grpc.stub.ClientCalls.blockingUnaryCall(
getChannel(), getListEnginesMethod(), getCallOptions(), request);
}
}
/**
* A stub to allow clients to do limited synchronous rpc calls to service EngineService.
*
* <pre>
* Service for managing [Engine][google.cloud.discoveryengine.v1.Engine]
* configuration.
* </pre>
*/
public static final class EngineServiceBlockingStub
extends io.grpc.stub.AbstractBlockingStub<EngineServiceBlockingStub> {
private EngineServiceBlockingStub(io.grpc.Channel channel, io.grpc.CallOptions callOptions) {
super(channel, callOptions);
}
@java.lang.Override
protected EngineServiceBlockingStub build(
io.grpc.Channel channel, io.grpc.CallOptions callOptions) {
return new EngineServiceBlockingStub(channel, callOptions);
}
/**
*
*
* <pre>
* Creates a [Engine][google.cloud.discoveryengine.v1.Engine].
* </pre>
*/
public com.google.longrunning.Operation createEngine(
com.google.cloud.discoveryengine.v1.CreateEngineRequest request) {
return io.grpc.stub.ClientCalls.blockingUnaryCall(
getChannel(), getCreateEngineMethod(), getCallOptions(), request);
}
/**
*
*
* <pre>
* Deletes a [Engine][google.cloud.discoveryengine.v1.Engine].
* </pre>
*/
public com.google.longrunning.Operation deleteEngine(
com.google.cloud.discoveryengine.v1.DeleteEngineRequest request) {
return io.grpc.stub.ClientCalls.blockingUnaryCall(
getChannel(), getDeleteEngineMethod(), getCallOptions(), request);
}
/**
*
*
* <pre>
* Updates an [Engine][google.cloud.discoveryengine.v1.Engine]
* </pre>
*/
public com.google.cloud.discoveryengine.v1.Engine updateEngine(
com.google.cloud.discoveryengine.v1.UpdateEngineRequest request) {
return io.grpc.stub.ClientCalls.blockingUnaryCall(
getChannel(), getUpdateEngineMethod(), getCallOptions(), request);
}
/**
*
*
* <pre>
* Gets a [Engine][google.cloud.discoveryengine.v1.Engine].
* </pre>
*/
public com.google.cloud.discoveryengine.v1.Engine getEngine(
com.google.cloud.discoveryengine.v1.GetEngineRequest request) {
return io.grpc.stub.ClientCalls.blockingUnaryCall(
getChannel(), getGetEngineMethod(), getCallOptions(), request);
}
/**
*
*
* <pre>
* Lists all the [Engine][google.cloud.discoveryengine.v1.Engine]s associated
* with the project.
* </pre>
*/
public com.google.cloud.discoveryengine.v1.ListEnginesResponse listEngines(
com.google.cloud.discoveryengine.v1.ListEnginesRequest request) {
return io.grpc.stub.ClientCalls.blockingUnaryCall(
getChannel(), getListEnginesMethod(), getCallOptions(), request);
}
}
/**
* A stub to allow clients to do ListenableFuture-style rpc calls to service EngineService.
*
* <pre>
* Service for managing [Engine][google.cloud.discoveryengine.v1.Engine]
* configuration.
* </pre>
*/
public static final class EngineServiceFutureStub
extends io.grpc.stub.AbstractFutureStub<EngineServiceFutureStub> {
private EngineServiceFutureStub(io.grpc.Channel channel, io.grpc.CallOptions callOptions) {
super(channel, callOptions);
}
@java.lang.Override
protected EngineServiceFutureStub build(
io.grpc.Channel channel, io.grpc.CallOptions callOptions) {
return new EngineServiceFutureStub(channel, callOptions);
}
/**
*
*
* <pre>
* Creates a [Engine][google.cloud.discoveryengine.v1.Engine].
* </pre>
*/
public com.google.common.util.concurrent.ListenableFuture<com.google.longrunning.Operation>
createEngine(com.google.cloud.discoveryengine.v1.CreateEngineRequest request) {
return io.grpc.stub.ClientCalls.futureUnaryCall(
getChannel().newCall(getCreateEngineMethod(), getCallOptions()), request);
}
/**
*
*
* <pre>
* Deletes a [Engine][google.cloud.discoveryengine.v1.Engine].
* </pre>
*/
public com.google.common.util.concurrent.ListenableFuture<com.google.longrunning.Operation>
deleteEngine(com.google.cloud.discoveryengine.v1.DeleteEngineRequest request) {
return io.grpc.stub.ClientCalls.futureUnaryCall(
getChannel().newCall(getDeleteEngineMethod(), getCallOptions()), request);
}
/**
*
*
* <pre>
* Updates an [Engine][google.cloud.discoveryengine.v1.Engine]
* </pre>
*/
public com.google.common.util.concurrent.ListenableFuture<
com.google.cloud.discoveryengine.v1.Engine>
updateEngine(com.google.cloud.discoveryengine.v1.UpdateEngineRequest request) {
return io.grpc.stub.ClientCalls.futureUnaryCall(
getChannel().newCall(getUpdateEngineMethod(), getCallOptions()), request);
}
/**
*
*
* <pre>
* Gets a [Engine][google.cloud.discoveryengine.v1.Engine].
* </pre>
*/
public com.google.common.util.concurrent.ListenableFuture<
com.google.cloud.discoveryengine.v1.Engine>
getEngine(com.google.cloud.discoveryengine.v1.GetEngineRequest request) {
return io.grpc.stub.ClientCalls.futureUnaryCall(
getChannel().newCall(getGetEngineMethod(), getCallOptions()), request);
}
/**
*
*
* <pre>
* Lists all the [Engine][google.cloud.discoveryengine.v1.Engine]s associated
* with the project.
* </pre>
*/
public com.google.common.util.concurrent.ListenableFuture<
com.google.cloud.discoveryengine.v1.ListEnginesResponse>
listEngines(com.google.cloud.discoveryengine.v1.ListEnginesRequest request) {
return io.grpc.stub.ClientCalls.futureUnaryCall(
getChannel().newCall(getListEnginesMethod(), getCallOptions()), request);
}
}
private static final int METHODID_CREATE_ENGINE = 0;
private static final int METHODID_DELETE_ENGINE = 1;
private static final int METHODID_UPDATE_ENGINE = 2;
private static final int METHODID_GET_ENGINE = 3;
private static final int METHODID_LIST_ENGINES = 4;
private static final class MethodHandlers<Req, Resp>
implements io.grpc.stub.ServerCalls.UnaryMethod<Req, Resp>,
io.grpc.stub.ServerCalls.ServerStreamingMethod<Req, Resp>,
io.grpc.stub.ServerCalls.ClientStreamingMethod<Req, Resp>,
io.grpc.stub.ServerCalls.BidiStreamingMethod<Req, Resp> {
private final AsyncService serviceImpl;
private final int methodId;
MethodHandlers(AsyncService serviceImpl, int methodId) {
this.serviceImpl = serviceImpl;
this.methodId = methodId;
}
@java.lang.Override
@java.lang.SuppressWarnings("unchecked")
public void invoke(Req request, io.grpc.stub.StreamObserver<Resp> responseObserver) {
switch (methodId) {
case METHODID_CREATE_ENGINE:
serviceImpl.createEngine(
(com.google.cloud.discoveryengine.v1.CreateEngineRequest) request,
(io.grpc.stub.StreamObserver<com.google.longrunning.Operation>) responseObserver);
break;
case METHODID_DELETE_ENGINE:
serviceImpl.deleteEngine(
(com.google.cloud.discoveryengine.v1.DeleteEngineRequest) request,
(io.grpc.stub.StreamObserver<com.google.longrunning.Operation>) responseObserver);
break;
case METHODID_UPDATE_ENGINE:
serviceImpl.updateEngine(
(com.google.cloud.discoveryengine.v1.UpdateEngineRequest) request,
(io.grpc.stub.StreamObserver<com.google.cloud.discoveryengine.v1.Engine>)
responseObserver);
break;
case METHODID_GET_ENGINE:
serviceImpl.getEngine(
(com.google.cloud.discoveryengine.v1.GetEngineRequest) request,
(io.grpc.stub.StreamObserver<com.google.cloud.discoveryengine.v1.Engine>)
responseObserver);
break;
case METHODID_LIST_ENGINES:
serviceImpl.listEngines(
(com.google.cloud.discoveryengine.v1.ListEnginesRequest) request,
(io.grpc.stub.StreamObserver<com.google.cloud.discoveryengine.v1.ListEnginesResponse>)
responseObserver);
break;
default:
throw new AssertionError();
}
}
@java.lang.Override
@java.lang.SuppressWarnings("unchecked")
public io.grpc.stub.StreamObserver<Req> invoke(
io.grpc.stub.StreamObserver<Resp> responseObserver) {
switch (methodId) {
default:
throw new AssertionError();
}
}
}
public static final io.grpc.ServerServiceDefinition bindService(AsyncService service) {
return io.grpc.ServerServiceDefinition.builder(getServiceDescriptor())
.addMethod(
getCreateEngineMethod(),
io.grpc.stub.ServerCalls.asyncUnaryCall(
new MethodHandlers<
com.google.cloud.discoveryengine.v1.CreateEngineRequest,
com.google.longrunning.Operation>(service, METHODID_CREATE_ENGINE)))
.addMethod(
getDeleteEngineMethod(),
io.grpc.stub.ServerCalls.asyncUnaryCall(
new MethodHandlers<
com.google.cloud.discoveryengine.v1.DeleteEngineRequest,
com.google.longrunning.Operation>(service, METHODID_DELETE_ENGINE)))
.addMethod(
getUpdateEngineMethod(),
io.grpc.stub.ServerCalls.asyncUnaryCall(
new MethodHandlers<
com.google.cloud.discoveryengine.v1.UpdateEngineRequest,
com.google.cloud.discoveryengine.v1.Engine>(service, METHODID_UPDATE_ENGINE)))
.addMethod(
getGetEngineMethod(),
io.grpc.stub.ServerCalls.asyncUnaryCall(
new MethodHandlers<
com.google.cloud.discoveryengine.v1.GetEngineRequest,
com.google.cloud.discoveryengine.v1.Engine>(service, METHODID_GET_ENGINE)))
.addMethod(
getListEnginesMethod(),
io.grpc.stub.ServerCalls.asyncUnaryCall(
new MethodHandlers<
com.google.cloud.discoveryengine.v1.ListEnginesRequest,
com.google.cloud.discoveryengine.v1.ListEnginesResponse>(
service, METHODID_LIST_ENGINES)))
.build();
}
private abstract static class EngineServiceBaseDescriptorSupplier
implements io.grpc.protobuf.ProtoFileDescriptorSupplier,
io.grpc.protobuf.ProtoServiceDescriptorSupplier {
EngineServiceBaseDescriptorSupplier() {}
@java.lang.Override
public com.google.protobuf.Descriptors.FileDescriptor getFileDescriptor() {
return com.google.cloud.discoveryengine.v1.EngineServiceProto.getDescriptor();
}
@java.lang.Override
public com.google.protobuf.Descriptors.ServiceDescriptor getServiceDescriptor() {
return getFileDescriptor().findServiceByName("EngineService");
}
}
private static final class EngineServiceFileDescriptorSupplier
extends EngineServiceBaseDescriptorSupplier {
EngineServiceFileDescriptorSupplier() {}
}
private static final class EngineServiceMethodDescriptorSupplier
extends EngineServiceBaseDescriptorSupplier
implements io.grpc.protobuf.ProtoMethodDescriptorSupplier {
private final java.lang.String methodName;
EngineServiceMethodDescriptorSupplier(java.lang.String methodName) {
this.methodName = methodName;
}
@java.lang.Override
public com.google.protobuf.Descriptors.MethodDescriptor getMethodDescriptor() {
return getServiceDescriptor().findMethodByName(methodName);
}
}
private static volatile io.grpc.ServiceDescriptor serviceDescriptor;
public static io.grpc.ServiceDescriptor getServiceDescriptor() {
io.grpc.ServiceDescriptor result = serviceDescriptor;
if (result == null) {
synchronized (EngineServiceGrpc.class) {
result = serviceDescriptor;
if (result == null) {
serviceDescriptor =
result =
io.grpc.ServiceDescriptor.newBuilder(SERVICE_NAME)
.setSchemaDescriptor(new EngineServiceFileDescriptorSupplier())
.addMethod(getCreateEngineMethod())
.addMethod(getDeleteEngineMethod())
.addMethod(getUpdateEngineMethod())
.addMethod(getGetEngineMethod())
.addMethod(getListEnginesMethod())
.build();
}
}
}
return result;
}
}
|
apache/hive | 36,496 | jdbc/src/java/org/apache/hive/jdbc/HiveCallableStatement.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hive.jdbc;
import java.io.InputStream;
import java.io.Reader;
import java.math.BigDecimal;
import java.net.URL;
import java.sql.Array;
import java.sql.Blob;
import java.sql.Clob;
import java.sql.Connection;
import java.sql.Date;
import java.sql.NClob;
import java.sql.ParameterMetaData;
import java.sql.Ref;
import java.sql.ResultSet;
import java.sql.ResultSetMetaData;
import java.sql.RowId;
import java.sql.SQLException;
import java.sql.SQLFeatureNotSupportedException;
import java.sql.SQLWarning;
import java.sql.SQLXML;
import java.sql.Time;
import java.sql.Timestamp;
import java.util.Calendar;
import java.util.Map;
/**
* The Statement used to execute SQL stored procedures. The JDBC API provides a
* stored procedure SQL escape syntax that allows stored procedures to be called
* in a standard way for all RDBMSs. Hive does not support SQL stored
* procedures.
*/
public class HiveCallableStatement implements java.sql.CallableStatement {
private final Connection connection;
/**
* Constructor.
*
* @param connection the connection
*/
public HiveCallableStatement(Connection connection) {
this.connection = connection;
}
@Override
public Array getArray(int i) throws SQLException {
throw new SQLFeatureNotSupportedException("Method not supported");
}
@Override
public Array getArray(String parameterName) throws SQLException {
throw new SQLFeatureNotSupportedException("Method not supported");
}
@Override
public BigDecimal getBigDecimal(int parameterIndex) throws SQLException {
throw new SQLFeatureNotSupportedException("Method not supported");
}
@Override
public BigDecimal getBigDecimal(String parameterName) throws SQLException {
throw new SQLFeatureNotSupportedException("Method not supported");
}
@Override
public BigDecimal getBigDecimal(int parameterIndex, int scale) throws SQLException {
throw new SQLFeatureNotSupportedException("Method not supported");
}
@Override
public Blob getBlob(int i) throws SQLException {
throw new SQLFeatureNotSupportedException("Method not supported");
}
@Override
public Blob getBlob(String parameterName) throws SQLException {
throw new SQLFeatureNotSupportedException("Method not supported");
}
@Override
public boolean getBoolean(int parameterIndex) throws SQLException {
throw new SQLFeatureNotSupportedException("Method not supported");
}
@Override
public boolean getBoolean(String parameterName) throws SQLException {
throw new SQLFeatureNotSupportedException("Method not supported");
}
@Override
public byte getByte(int parameterIndex) throws SQLException {
throw new SQLFeatureNotSupportedException("Method not supported");
}
@Override
public byte getByte(String parameterName) throws SQLException {
throw new SQLFeatureNotSupportedException("Method not supported");
}
@Override
public byte[] getBytes(int parameterIndex) throws SQLException {
throw new SQLFeatureNotSupportedException("Method not supported");
}
@Override
public byte[] getBytes(String parameterName) throws SQLException {
throw new SQLFeatureNotSupportedException("Method not supported");
}
@Override
public Reader getCharacterStream(int arg0) throws SQLException {
throw new SQLFeatureNotSupportedException("Method not supported");
}
@Override
public Reader getCharacterStream(String arg0) throws SQLException {
throw new SQLFeatureNotSupportedException("Method not supported");
}
@Override
public Clob getClob(int i) throws SQLException {
throw new SQLFeatureNotSupportedException("Method not supported");
}
@Override
public Clob getClob(String parameterName) throws SQLException {
throw new SQLFeatureNotSupportedException("Method not supported");
}
@Override
public Date getDate(int parameterIndex) throws SQLException {
throw new SQLFeatureNotSupportedException("Method not supported");
}
@Override
public Date getDate(String parameterName) throws SQLException {
throw new SQLFeatureNotSupportedException("Method not supported");
}
@Override
public Date getDate(int parameterIndex, Calendar cal) throws SQLException {
throw new SQLFeatureNotSupportedException("Method not supported");
}
@Override
public Date getDate(String parameterName, Calendar cal) throws SQLException {
throw new SQLFeatureNotSupportedException("Method not supported");
}
@Override
public double getDouble(int parameterIndex) throws SQLException {
throw new SQLFeatureNotSupportedException("Method not supported");
}
@Override
public double getDouble(String parameterName) throws SQLException {
throw new SQLFeatureNotSupportedException("Method not supported");
}
@Override
public float getFloat(int parameterIndex) throws SQLException {
throw new SQLFeatureNotSupportedException("Method not supported");
}
@Override
public float getFloat(String parameterName) throws SQLException {
throw new SQLFeatureNotSupportedException("Method not supported");
}
@Override
public int getInt(int parameterIndex) throws SQLException {
throw new SQLFeatureNotSupportedException("Method not supported");
}
@Override
public int getInt(String parameterName) throws SQLException {
throw new SQLFeatureNotSupportedException("Method not supported");
}
@Override
public long getLong(int parameterIndex) throws SQLException {
throw new SQLFeatureNotSupportedException("Method not supported");
}
@Override
public long getLong(String parameterName) throws SQLException {
throw new SQLFeatureNotSupportedException("Method not supported");
}
@Override
public Reader getNCharacterStream(int parameterIndex) throws SQLException {
throw new SQLFeatureNotSupportedException("Method not supported");
}
@Override
public Reader getNCharacterStream(String parameterName) throws SQLException {
throw new SQLFeatureNotSupportedException("Method not supported");
}
@Override
public NClob getNClob(int parameterIndex) throws SQLException {
throw new SQLFeatureNotSupportedException("Method not supported");
}
@Override
public NClob getNClob(String parameterName) throws SQLException {
throw new SQLFeatureNotSupportedException("Method not supported");
}
@Override
public String getNString(int parameterIndex) throws SQLException {
throw new SQLFeatureNotSupportedException("Method not supported");
}
@Override
public String getNString(String parameterName) throws SQLException {
throw new SQLFeatureNotSupportedException("Method not supported");
}
@Override
public Object getObject(int parameterIndex) throws SQLException {
throw new SQLFeatureNotSupportedException("Method not supported");
}
@Override
public Object getObject(String parameterName) throws SQLException {
throw new SQLFeatureNotSupportedException("Method not supported");
}
@Override
public <T> T getObject(int parameterIndex, Class<T> type) throws SQLException {
throw new SQLFeatureNotSupportedException("Method not supported");
}
@Override
public <T> T getObject(String parameterName, Class<T> type) throws SQLException {
throw new SQLFeatureNotSupportedException("Method not supported");
}
@Override
public Object getObject(int i, Map<String, Class<?>> map) throws SQLException {
throw new SQLFeatureNotSupportedException("Method not supported");
}
@Override
public Object getObject(String parameterName, Map<String, Class<?>> map) throws SQLException {
throw new SQLFeatureNotSupportedException("Method not supported");
}
@Override
public Ref getRef(int i) throws SQLException {
throw new SQLFeatureNotSupportedException("Method not supported");
}
@Override
public Ref getRef(String parameterName) throws SQLException {
throw new SQLFeatureNotSupportedException("Method not supported");
}
@Override
public RowId getRowId(int parameterIndex) throws SQLException {
throw new SQLFeatureNotSupportedException("Method not supported");
}
@Override
public RowId getRowId(String parameterName) throws SQLException {
throw new SQLFeatureNotSupportedException("Method not supported");
}
@Override
public SQLXML getSQLXML(int parameterIndex) throws SQLException {
throw new SQLFeatureNotSupportedException("Method not supported");
}
@Override
public SQLXML getSQLXML(String parameterName) throws SQLException {
throw new SQLFeatureNotSupportedException("Method not supported");
}
@Override
public short getShort(int parameterIndex) throws SQLException {
throw new SQLFeatureNotSupportedException("Method not supported");
}
@Override
public short getShort(String parameterName) throws SQLException {
throw new SQLFeatureNotSupportedException("Method not supported");
}
@Override
public String getString(int parameterIndex) throws SQLException {
throw new SQLFeatureNotSupportedException("Method not supported");
}
@Override
public String getString(String parameterName) throws SQLException {
throw new SQLFeatureNotSupportedException("Method not supported");
}
@Override
public Time getTime(int parameterIndex) throws SQLException {
throw new SQLFeatureNotSupportedException("Method not supported");
}
@Override
public Time getTime(String parameterName) throws SQLException {
throw new SQLFeatureNotSupportedException("Method not supported");
}
@Override
public Time getTime(int parameterIndex, Calendar cal) throws SQLException {
throw new SQLFeatureNotSupportedException("Method not supported");
}
@Override
public Time getTime(String parameterName, Calendar cal) throws SQLException {
throw new SQLFeatureNotSupportedException("Method not supported");
}
@Override
public Timestamp getTimestamp(int parameterIndex) throws SQLException {
throw new SQLFeatureNotSupportedException("Method not supported");
}
@Override
public Timestamp getTimestamp(String parameterName) throws SQLException {
throw new SQLFeatureNotSupportedException("Method not supported");
}
@Override
public Timestamp getTimestamp(int parameterIndex, Calendar cal) throws SQLException {
throw new SQLFeatureNotSupportedException("Method not supported");
}
@Override
public Timestamp getTimestamp(String parameterName, Calendar cal) throws SQLException {
throw new SQLFeatureNotSupportedException("Method not supported");
}
@Override
public URL getURL(int parameterIndex) throws SQLException {
throw new SQLFeatureNotSupportedException("Method not supported");
}
@Override
public URL getURL(String parameterName) throws SQLException {
throw new SQLFeatureNotSupportedException("Method not supported");
}
@Override
public void registerOutParameter(int parameterIndex, int sqlType) throws SQLException {
throw new SQLFeatureNotSupportedException("Method not supported");
}
@Override
public void registerOutParameter(String parameterName, int sqlType) throws SQLException {
throw new SQLFeatureNotSupportedException("Method not supported");
}
@Override
public void registerOutParameter(int parameterIndex, int sqlType, int scale) throws SQLException {
throw new SQLFeatureNotSupportedException("Method not supported");
}
@Override
public void registerOutParameter(int paramIndex, int sqlType, String typeName) throws SQLException {
throw new SQLFeatureNotSupportedException("Method not supported");
}
@Override
public void registerOutParameter(String parameterName, int sqlType, int scale) throws SQLException {
throw new SQLFeatureNotSupportedException("Method not supported");
}
@Override
public void registerOutParameter(String parameterName, int sqlType, String typeName) throws SQLException {
throw new SQLFeatureNotSupportedException("Method not supported");
}
@Override
public void setAsciiStream(String parameterName, InputStream x) throws SQLException {
throw new SQLFeatureNotSupportedException("Method not supported");
}
@Override
public void setAsciiStream(String parameterName, InputStream x, int length) throws SQLException {
throw new SQLFeatureNotSupportedException("Method not supported");
}
@Override
public void setAsciiStream(String parameterName, InputStream x, long length) throws SQLException {
throw new SQLFeatureNotSupportedException("Method not supported");
}
@Override
public void setBigDecimal(String parameterName, BigDecimal x) throws SQLException {
throw new SQLFeatureNotSupportedException("Method not supported");
}
@Override
public void setBinaryStream(String parameterName, InputStream x) throws SQLException {
throw new SQLFeatureNotSupportedException("Method not supported");
}
@Override
public void setBinaryStream(String parameterName, InputStream x, int length) throws SQLException {
throw new SQLFeatureNotSupportedException("Method not supported");
}
@Override
public void setBinaryStream(String parameterName, InputStream x, long length) throws SQLException {
throw new SQLFeatureNotSupportedException("Method not supported");
}
@Override
public void setBlob(String parameterName, Blob x) throws SQLException {
throw new SQLFeatureNotSupportedException("Method not supported");
}
@Override
public void setBlob(String parameterName, InputStream inputStream) throws SQLException {
throw new SQLFeatureNotSupportedException("Method not supported");
}
@Override
public void setBlob(String parameterName, InputStream inputStream, long length) throws SQLException {
throw new SQLFeatureNotSupportedException("Method not supported");
}
@Override
public void setBoolean(String parameterName, boolean x) throws SQLException {
throw new SQLFeatureNotSupportedException("Method not supported");
}
@Override
public void setByte(String parameterName, byte x) throws SQLException {
throw new SQLFeatureNotSupportedException("Method not supported");
}
@Override
public void setBytes(String parameterName, byte[] x) throws SQLException {
throw new SQLFeatureNotSupportedException("Method not supported");
}
@Override
public void setCharacterStream(String parameterName, Reader reader) throws SQLException {
throw new SQLFeatureNotSupportedException("Method not supported");
}
@Override
public void setCharacterStream(String parameterName, Reader reader, int length) throws SQLException {
throw new SQLFeatureNotSupportedException("Method not supported");
}
@Override
public void setCharacterStream(String parameterName, Reader reader, long length) throws SQLException {
throw new SQLFeatureNotSupportedException("Method not supported");
}
@Override
public void setClob(String parameterName, Clob x) throws SQLException {
throw new SQLFeatureNotSupportedException("Method not supported");
}
@Override
public void setClob(String parameterName, Reader reader) throws SQLException {
throw new SQLFeatureNotSupportedException("Method not supported");
}
@Override
public void setClob(String parameterName, Reader reader, long length) throws SQLException {
throw new SQLFeatureNotSupportedException("Method not supported");
}
@Override
public void setDate(String parameterName, Date x) throws SQLException {
throw new SQLFeatureNotSupportedException("Method not supported");
}
@Override
public void setDate(String parameterName, Date x, Calendar cal) throws SQLException {
throw new SQLFeatureNotSupportedException("Method not supported");
}
@Override
public void setDouble(String parameterName, double x) throws SQLException {
throw new SQLFeatureNotSupportedException("Method not supported");
}
@Override
public void setFloat(String parameterName, float x) throws SQLException {
throw new SQLFeatureNotSupportedException("Method not supported");
}
@Override
public void setInt(String parameterName, int x) throws SQLException {
throw new SQLFeatureNotSupportedException("Method not supported");
}
@Override
public void setLong(String parameterName, long x) throws SQLException {
throw new SQLFeatureNotSupportedException("Method not supported");
}
@Override
public void setNCharacterStream(String parameterName, Reader value) throws SQLException {
throw new SQLFeatureNotSupportedException("Method not supported");
}
@Override
public void setNCharacterStream(String parameterName, Reader value, long length) throws SQLException {
throw new SQLFeatureNotSupportedException("Method not supported");
}
@Override
public void setNClob(String parameterName, NClob value) throws SQLException {
throw new SQLFeatureNotSupportedException("Method not supported");
}
@Override
public void setNClob(String parameterName, Reader reader) throws SQLException {
throw new SQLFeatureNotSupportedException("Method not supported");
}
@Override
public void setNClob(String parameterName, Reader reader, long length) throws SQLException {
throw new SQLFeatureNotSupportedException("Method not supported");
}
@Override
public void setNString(String parameterName, String value) throws SQLException {
throw new SQLFeatureNotSupportedException("Method not supported");
}
@Override
public void setNull(String parameterName, int sqlType) throws SQLException {
throw new SQLFeatureNotSupportedException("Method not supported");
}
@Override
public void setNull(String parameterName, int sqlType, String typeName) throws SQLException {
throw new SQLFeatureNotSupportedException("Method not supported");
}
@Override
public void setObject(String parameterName, Object x) throws SQLException {
throw new SQLFeatureNotSupportedException("Method not supported");
}
@Override
public void setObject(String parameterName, Object x, int targetSqlType) throws SQLException {
throw new SQLFeatureNotSupportedException("Method not supported");
}
@Override
public void setObject(String parameterName, Object x, int targetSqlType, int scale) throws SQLException {
throw new SQLFeatureNotSupportedException("Method not supported");
}
@Override
public void setRowId(String parameterName, RowId x) throws SQLException {
throw new SQLFeatureNotSupportedException("Method not supported");
}
@Override
public void setSQLXML(String parameterName, SQLXML xmlObject) throws SQLException {
throw new SQLFeatureNotSupportedException("Method not supported");
}
@Override
public void setShort(String parameterName, short x) throws SQLException {
throw new SQLFeatureNotSupportedException("Method not supported");
}
@Override
public void setString(String parameterName, String x) throws SQLException {
throw new SQLFeatureNotSupportedException("Method not supported");
}
@Override
public void setTime(String parameterName, Time x) throws SQLException {
throw new SQLFeatureNotSupportedException("Method not supported");
}
@Override
public void setTime(String parameterName, Time x, Calendar cal) throws SQLException {
throw new SQLFeatureNotSupportedException("Method not supported");
}
@Override
public void setTimestamp(String parameterName, Timestamp x) throws SQLException {
throw new SQLFeatureNotSupportedException("Method not supported");
}
@Override
public void setTimestamp(String parameterName, Timestamp x, Calendar cal) throws SQLException {
throw new SQLFeatureNotSupportedException("Method not supported");
}
@Override
public void setURL(String parameterName, URL val) throws SQLException {
throw new SQLFeatureNotSupportedException("Method not supported");
}
@Override
public boolean wasNull() throws SQLException {
throw new SQLFeatureNotSupportedException("Method not supported");
}
@Override
public void addBatch() throws SQLException {
throw new SQLFeatureNotSupportedException("Method not supported");
}
@Override
public void clearParameters() throws SQLException {
throw new SQLFeatureNotSupportedException("Method not supported");
}
@Override
public boolean execute() throws SQLException {
throw new SQLFeatureNotSupportedException("Method not supported");
}
@Override
public ResultSet executeQuery() throws SQLException {
return new HiveQueryResultSet.Builder(this).build();
}
@Override
public int executeUpdate() throws SQLException {
throw new SQLFeatureNotSupportedException("Method not supported");
}
@Override
public ResultSetMetaData getMetaData() throws SQLException {
throw new SQLFeatureNotSupportedException("Method not supported");
}
@Override
public ParameterMetaData getParameterMetaData() throws SQLException {
throw new SQLFeatureNotSupportedException("Method not supported");
}
@Override
public void setArray(int i, Array x) throws SQLException {
throw new SQLFeatureNotSupportedException("Method not supported");
}
@Override
public void setAsciiStream(int arg0, InputStream arg1) throws SQLException {
throw new SQLFeatureNotSupportedException("Method not supported");
}
@Override
public void setAsciiStream(int parameterIndex, InputStream x, int length) throws SQLException {
throw new SQLFeatureNotSupportedException("Method not supported");
}
@Override
public void setAsciiStream(int arg0, InputStream arg1, long arg2) throws SQLException {
throw new SQLFeatureNotSupportedException("Method not supported");
}
@Override
public void setBigDecimal(int parameterIndex, BigDecimal x) throws SQLException {
throw new SQLFeatureNotSupportedException("Method not supported");
}
@Override
public void setBinaryStream(int parameterIndex, InputStream x) throws SQLException {
throw new SQLFeatureNotSupportedException("Method not supported");
}
@Override
public void setBinaryStream(int parameterIndex, InputStream x, int length) throws SQLException {
throw new SQLFeatureNotSupportedException("Method not supported");
}
@Override
public void setBinaryStream(int parameterIndex, InputStream x, long length) throws SQLException {
throw new SQLFeatureNotSupportedException("Method not supported");
}
@Override
public void setBlob(int i, Blob x) throws SQLException {
throw new SQLFeatureNotSupportedException("Method not supported");
}
@Override
public void setBlob(int parameterIndex, InputStream inputStream) throws SQLException {
throw new SQLFeatureNotSupportedException("Method not supported");
}
@Override
public void setBlob(int parameterIndex, InputStream inputStream, long length) throws SQLException {
throw new SQLFeatureNotSupportedException("Method not supported");
}
@Override
public void setBoolean(int parameterIndex, boolean x) throws SQLException {
throw new SQLFeatureNotSupportedException("Method not supported");
}
@Override
public void setByte(int parameterIndex, byte x) throws SQLException {
throw new SQLFeatureNotSupportedException("Method not supported");
}
@Override
public void setBytes(int parameterIndex, byte[] x) throws SQLException {
throw new SQLFeatureNotSupportedException("Method not supported");
}
@Override
public void setCharacterStream(int parameterIndex, Reader reader) throws SQLException {
throw new SQLFeatureNotSupportedException("Method not supported");
}
@Override
public void setCharacterStream(int parameterIndex, Reader reader, int length) throws SQLException {
throw new SQLFeatureNotSupportedException("Method not supported");
}
@Override
public void setCharacterStream(int parameterIndex, Reader reader, long length) throws SQLException {
throw new SQLFeatureNotSupportedException("Method not supported");
}
@Override
public void setClob(int i, Clob x) throws SQLException {
throw new SQLFeatureNotSupportedException("Method not supported");
}
@Override
public void setClob(int parameterIndex, Reader reader) throws SQLException {
throw new SQLFeatureNotSupportedException("Method not supported");
}
@Override
public void setClob(int parameterIndex, Reader reader, long length) throws SQLException {
throw new SQLFeatureNotSupportedException("Method not supported");
}
@Override
public void setDate(int parameterIndex, Date x) throws SQLException {
throw new SQLFeatureNotSupportedException("Method not supported");
}
@Override
public void setDate(int parameterIndex, Date x, Calendar cal) throws SQLException {
throw new SQLFeatureNotSupportedException("Method not supported");
}
@Override
public void setDouble(int parameterIndex, double x) throws SQLException {
throw new SQLFeatureNotSupportedException("Method not supported");
}
@Override
public void setFloat(int parameterIndex, float x) throws SQLException {
throw new SQLFeatureNotSupportedException("Method not supported");
}
@Override
public void setInt(int parameterIndex, int x) throws SQLException {
throw new SQLFeatureNotSupportedException("Method not supported");
}
@Override
public void setLong(int parameterIndex, long x) throws SQLException {
throw new SQLFeatureNotSupportedException("Method not supported");
}
@Override
public void setNCharacterStream(int parameterIndex, Reader value) throws SQLException {
throw new SQLFeatureNotSupportedException("Method not supported");
}
@Override
public void setNCharacterStream(int parameterIndex, Reader value, long length) throws SQLException {
throw new SQLFeatureNotSupportedException("Method not supported");
}
@Override
public void setNClob(int parameterIndex, NClob value) throws SQLException {
throw new SQLFeatureNotSupportedException("Method not supported");
}
@Override
public void setNClob(int parameterIndex, Reader reader) throws SQLException {
throw new SQLFeatureNotSupportedException("Method not supported");
}
@Override
public void setNClob(int parameterIndex, Reader reader, long length) throws SQLException {
throw new SQLFeatureNotSupportedException("Method not supported");
}
@Override
public void setNString(int parameterIndex, String value) throws SQLException {
throw new SQLFeatureNotSupportedException("Method not supported");
}
@Override
public void setNull(int parameterIndex, int sqlType) throws SQLException {
throw new SQLFeatureNotSupportedException("Method not supported");
}
@Override
public void setNull(int paramIndex, int sqlType, String typeName) throws SQLException {
throw new SQLFeatureNotSupportedException("Method not supported");
}
@Override
public void setObject(int parameterIndex, Object x) throws SQLException {
throw new SQLFeatureNotSupportedException("Method not supported");
}
@Override
public void setObject(int parameterIndex, Object x, int targetSqlType) throws SQLException {
throw new SQLFeatureNotSupportedException("Method not supported");
}
@Override
public void setObject(int parameterIndex, Object x, int targetSqlType, int scale) throws SQLException {
throw new SQLFeatureNotSupportedException("Method not supported");
}
@Override
public void setRef(int i, Ref x) throws SQLException {
throw new SQLFeatureNotSupportedException("Method not supported");
}
@Override
public void setRowId(int parameterIndex, RowId x) throws SQLException {
throw new SQLFeatureNotSupportedException("Method not supported");
}
@Override
public void setSQLXML(int parameterIndex, SQLXML xmlObject) throws SQLException {
throw new SQLFeatureNotSupportedException("Method not supported");
}
@Override
public void setShort(int parameterIndex, short x) throws SQLException {
throw new SQLFeatureNotSupportedException("Method not supported");
}
@Override
public void setString(int parameterIndex, String x) throws SQLException {
throw new SQLFeatureNotSupportedException("Method not supported");
}
@Override
public void setTime(int parameterIndex, Time x) throws SQLException {
throw new SQLFeatureNotSupportedException("Method not supported");
}
@Override
public void setTime(int parameterIndex, Time x, Calendar cal) throws SQLException {
throw new SQLFeatureNotSupportedException("Method not supported");
}
@Override
public void setTimestamp(int parameterIndex, Timestamp x) throws SQLException {
throw new SQLFeatureNotSupportedException("Method not supported");
}
@Override
public void setTimestamp(int parameterIndex, Timestamp x, Calendar cal) throws SQLException {
throw new SQLFeatureNotSupportedException("Method not supported");
}
@Override
public void setURL(int parameterIndex, URL x) throws SQLException {
throw new SQLFeatureNotSupportedException("Method not supported");
}
@Override
public void setUnicodeStream(int parameterIndex, InputStream x, int length) throws SQLException {
throw new SQLFeatureNotSupportedException("Method not supported");
}
@Override
public void addBatch(String sql) throws SQLException {
throw new SQLFeatureNotSupportedException("Method not supported");
}
@Override
public void cancel() throws SQLException {
throw new SQLFeatureNotSupportedException("Method not supported");
}
@Override
public void clearBatch() throws SQLException {
throw new SQLFeatureNotSupportedException("Method not supported");
}
@Override
public void clearWarnings() throws SQLException {
throw new SQLFeatureNotSupportedException("Method not supported");
}
@Override
public void close() throws SQLException {
throw new SQLFeatureNotSupportedException("Method not supported");
}
@Override
public void closeOnCompletion() throws SQLException {
// JDK 1.7
throw new SQLFeatureNotSupportedException("Method not supported");
}
@Override
public boolean isCloseOnCompletion() throws SQLException {
// JDK 1.7
throw new SQLFeatureNotSupportedException("Method not supported");
}
@Override
public boolean execute(String sql) throws SQLException {
throw new SQLFeatureNotSupportedException("Method not supported");
}
@Override
public boolean execute(String sql, int autoGeneratedKeys) throws SQLException {
throw new SQLFeatureNotSupportedException("Method not supported");
}
@Override
public boolean execute(String sql, int[] columnIndexes) throws SQLException {
throw new SQLFeatureNotSupportedException("Method not supported");
}
@Override
public boolean execute(String sql, String[] columnNames) throws SQLException {
throw new SQLFeatureNotSupportedException("Method not supported");
}
@Override
public int[] executeBatch() throws SQLException {
throw new SQLFeatureNotSupportedException("Method not supported");
}
@Override
public ResultSet executeQuery(String sql) throws SQLException {
throw new SQLFeatureNotSupportedException("Method not supported");
}
@Override
public int executeUpdate(String sql) throws SQLException {
throw new SQLFeatureNotSupportedException("Method not supported");
}
@Override
public int executeUpdate(String sql, int autoGeneratedKeys) throws SQLException {
throw new SQLFeatureNotSupportedException("Method not supported");
}
@Override
public int executeUpdate(String sql, int[] columnIndexes) throws SQLException {
throw new SQLFeatureNotSupportedException("Method not supported");
}
@Override
public int executeUpdate(String sql, String[] columnNames) throws SQLException {
throw new SQLFeatureNotSupportedException("Method not supported");
}
@Override
public Connection getConnection() throws SQLException {
return this.connection;
}
@Override
public int getFetchDirection() throws SQLException {
throw new SQLFeatureNotSupportedException("Method not supported");
}
@Override
public int getFetchSize() throws SQLException {
throw new SQLFeatureNotSupportedException("Method not supported");
}
@Override
public ResultSet getGeneratedKeys() throws SQLException {
throw new SQLFeatureNotSupportedException("Method not supported");
}
@Override
public int getMaxFieldSize() throws SQLException {
throw new SQLFeatureNotSupportedException("Method not supported");
}
@Override
public int getMaxRows() throws SQLException {
throw new SQLFeatureNotSupportedException("Method not supported");
}
@Override
public boolean getMoreResults() throws SQLException {
throw new SQLFeatureNotSupportedException("Method not supported");
}
@Override
public boolean getMoreResults(int current) throws SQLException {
throw new SQLFeatureNotSupportedException("Method not supported");
}
@Override
public int getQueryTimeout() throws SQLException {
throw new SQLFeatureNotSupportedException("Method not supported");
}
@Override
public ResultSet getResultSet() throws SQLException {
throw new SQLFeatureNotSupportedException("Method not supported");
}
@Override
public int getResultSetConcurrency() throws SQLException {
throw new SQLFeatureNotSupportedException("Method not supported");
}
@Override
public int getResultSetHoldability() throws SQLException {
throw new SQLFeatureNotSupportedException("Method not supported");
}
@Override
public int getResultSetType() throws SQLException {
throw new SQLFeatureNotSupportedException("Method not supported");
}
@Override
public int getUpdateCount() throws SQLException {
throw new SQLFeatureNotSupportedException("Method not supported");
}
@Override
public SQLWarning getWarnings() throws SQLException {
throw new SQLFeatureNotSupportedException("Method not supported");
}
@Override
public boolean isClosed() throws SQLException {
throw new SQLFeatureNotSupportedException("Method not supported");
}
@Override
public boolean isPoolable() throws SQLException {
throw new SQLFeatureNotSupportedException("Method not supported");
}
@Override
public void setCursorName(String name) throws SQLException {
throw new SQLFeatureNotSupportedException("Method not supported");
}
@Override
public void setEscapeProcessing(boolean enable) throws SQLException {
throw new SQLFeatureNotSupportedException("Method not supported");
}
@Override
public void setFetchDirection(int direction) throws SQLException {
throw new SQLFeatureNotSupportedException("Method not supported");
}
@Override
public void setFetchSize(int rows) throws SQLException {
throw new SQLFeatureNotSupportedException("Method not supported");
}
@Override
public void setMaxFieldSize(int max) throws SQLException {
throw new SQLFeatureNotSupportedException("Method not supported");
}
@Override
public void setMaxRows(int max) throws SQLException {
throw new SQLFeatureNotSupportedException("Method not supported");
}
@Override
public void setPoolable(boolean arg0) throws SQLException {
throw new SQLFeatureNotSupportedException("Method not supported");
}
@Override
public void setQueryTimeout(int seconds) throws SQLException {
throw new SQLFeatureNotSupportedException("Method not supported");
}
@Override
public boolean isWrapperFor(Class<?> iface) throws SQLException {
throw new SQLFeatureNotSupportedException("Method not supported");
}
@Override
public <T> T unwrap(Class<T> iface) throws SQLException {
throw new SQLFeatureNotSupportedException("Method not supported");
}
}
|
googleapis/google-cloud-java | 36,784 | java-resourcemanager/proto-google-cloud-resourcemanager-v3/src/main/java/com/google/cloud/resourcemanager/v3/ListTagKeysResponse.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/resourcemanager/v3/tag_keys.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.resourcemanager.v3;
/**
*
*
* <pre>
* The ListTagKeys response message.
* </pre>
*
* Protobuf type {@code google.cloud.resourcemanager.v3.ListTagKeysResponse}
*/
public final class ListTagKeysResponse extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.resourcemanager.v3.ListTagKeysResponse)
ListTagKeysResponseOrBuilder {
private static final long serialVersionUID = 0L;
// Use ListTagKeysResponse.newBuilder() to construct.
private ListTagKeysResponse(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private ListTagKeysResponse() {
tagKeys_ = java.util.Collections.emptyList();
nextPageToken_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new ListTagKeysResponse();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.resourcemanager.v3.TagKeysProto
.internal_static_google_cloud_resourcemanager_v3_ListTagKeysResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.resourcemanager.v3.TagKeysProto
.internal_static_google_cloud_resourcemanager_v3_ListTagKeysResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.resourcemanager.v3.ListTagKeysResponse.class,
com.google.cloud.resourcemanager.v3.ListTagKeysResponse.Builder.class);
}
public static final int TAG_KEYS_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private java.util.List<com.google.cloud.resourcemanager.v3.TagKey> tagKeys_;
/**
*
*
* <pre>
* List of TagKeys that live under the specified parent in the request.
* </pre>
*
* <code>repeated .google.cloud.resourcemanager.v3.TagKey tag_keys = 1;</code>
*/
@java.lang.Override
public java.util.List<com.google.cloud.resourcemanager.v3.TagKey> getTagKeysList() {
return tagKeys_;
}
/**
*
*
* <pre>
* List of TagKeys that live under the specified parent in the request.
* </pre>
*
* <code>repeated .google.cloud.resourcemanager.v3.TagKey tag_keys = 1;</code>
*/
@java.lang.Override
public java.util.List<? extends com.google.cloud.resourcemanager.v3.TagKeyOrBuilder>
getTagKeysOrBuilderList() {
return tagKeys_;
}
/**
*
*
* <pre>
* List of TagKeys that live under the specified parent in the request.
* </pre>
*
* <code>repeated .google.cloud.resourcemanager.v3.TagKey tag_keys = 1;</code>
*/
@java.lang.Override
public int getTagKeysCount() {
return tagKeys_.size();
}
/**
*
*
* <pre>
* List of TagKeys that live under the specified parent in the request.
* </pre>
*
* <code>repeated .google.cloud.resourcemanager.v3.TagKey tag_keys = 1;</code>
*/
@java.lang.Override
public com.google.cloud.resourcemanager.v3.TagKey getTagKeys(int index) {
return tagKeys_.get(index);
}
/**
*
*
* <pre>
* List of TagKeys that live under the specified parent in the request.
* </pre>
*
* <code>repeated .google.cloud.resourcemanager.v3.TagKey tag_keys = 1;</code>
*/
@java.lang.Override
public com.google.cloud.resourcemanager.v3.TagKeyOrBuilder getTagKeysOrBuilder(int index) {
return tagKeys_.get(index);
}
public static final int NEXT_PAGE_TOKEN_FIELD_NUMBER = 2;
@SuppressWarnings("serial")
private volatile java.lang.Object nextPageToken_ = "";
/**
*
*
* <pre>
* A pagination token returned from a previous call to `ListTagKeys`
* that indicates from where listing should continue.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The nextPageToken.
*/
@java.lang.Override
public java.lang.String getNextPageToken() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
nextPageToken_ = s;
return s;
}
}
/**
*
*
* <pre>
* A pagination token returned from a previous call to `ListTagKeys`
* that indicates from where listing should continue.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The bytes for nextPageToken.
*/
@java.lang.Override
public com.google.protobuf.ByteString getNextPageTokenBytes() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
nextPageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
for (int i = 0; i < tagKeys_.size(); i++) {
output.writeMessage(1, tagKeys_.get(i));
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 2, nextPageToken_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
for (int i = 0; i < tagKeys_.size(); i++) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, tagKeys_.get(i));
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, nextPageToken_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.resourcemanager.v3.ListTagKeysResponse)) {
return super.equals(obj);
}
com.google.cloud.resourcemanager.v3.ListTagKeysResponse other =
(com.google.cloud.resourcemanager.v3.ListTagKeysResponse) obj;
if (!getTagKeysList().equals(other.getTagKeysList())) return false;
if (!getNextPageToken().equals(other.getNextPageToken())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (getTagKeysCount() > 0) {
hash = (37 * hash) + TAG_KEYS_FIELD_NUMBER;
hash = (53 * hash) + getTagKeysList().hashCode();
}
hash = (37 * hash) + NEXT_PAGE_TOKEN_FIELD_NUMBER;
hash = (53 * hash) + getNextPageToken().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.resourcemanager.v3.ListTagKeysResponse parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.resourcemanager.v3.ListTagKeysResponse parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.resourcemanager.v3.ListTagKeysResponse parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.resourcemanager.v3.ListTagKeysResponse parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.resourcemanager.v3.ListTagKeysResponse parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.resourcemanager.v3.ListTagKeysResponse parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.resourcemanager.v3.ListTagKeysResponse parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.resourcemanager.v3.ListTagKeysResponse parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.resourcemanager.v3.ListTagKeysResponse parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.resourcemanager.v3.ListTagKeysResponse parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.resourcemanager.v3.ListTagKeysResponse parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.resourcemanager.v3.ListTagKeysResponse parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.resourcemanager.v3.ListTagKeysResponse prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* The ListTagKeys response message.
* </pre>
*
* Protobuf type {@code google.cloud.resourcemanager.v3.ListTagKeysResponse}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.resourcemanager.v3.ListTagKeysResponse)
com.google.cloud.resourcemanager.v3.ListTagKeysResponseOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.resourcemanager.v3.TagKeysProto
.internal_static_google_cloud_resourcemanager_v3_ListTagKeysResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.resourcemanager.v3.TagKeysProto
.internal_static_google_cloud_resourcemanager_v3_ListTagKeysResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.resourcemanager.v3.ListTagKeysResponse.class,
com.google.cloud.resourcemanager.v3.ListTagKeysResponse.Builder.class);
}
// Construct using com.google.cloud.resourcemanager.v3.ListTagKeysResponse.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
if (tagKeysBuilder_ == null) {
tagKeys_ = java.util.Collections.emptyList();
} else {
tagKeys_ = null;
tagKeysBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000001);
nextPageToken_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.resourcemanager.v3.TagKeysProto
.internal_static_google_cloud_resourcemanager_v3_ListTagKeysResponse_descriptor;
}
@java.lang.Override
public com.google.cloud.resourcemanager.v3.ListTagKeysResponse getDefaultInstanceForType() {
return com.google.cloud.resourcemanager.v3.ListTagKeysResponse.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.resourcemanager.v3.ListTagKeysResponse build() {
com.google.cloud.resourcemanager.v3.ListTagKeysResponse result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.resourcemanager.v3.ListTagKeysResponse buildPartial() {
com.google.cloud.resourcemanager.v3.ListTagKeysResponse result =
new com.google.cloud.resourcemanager.v3.ListTagKeysResponse(this);
buildPartialRepeatedFields(result);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartialRepeatedFields(
com.google.cloud.resourcemanager.v3.ListTagKeysResponse result) {
if (tagKeysBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0)) {
tagKeys_ = java.util.Collections.unmodifiableList(tagKeys_);
bitField0_ = (bitField0_ & ~0x00000001);
}
result.tagKeys_ = tagKeys_;
} else {
result.tagKeys_ = tagKeysBuilder_.build();
}
}
private void buildPartial0(com.google.cloud.resourcemanager.v3.ListTagKeysResponse result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000002) != 0)) {
result.nextPageToken_ = nextPageToken_;
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.resourcemanager.v3.ListTagKeysResponse) {
return mergeFrom((com.google.cloud.resourcemanager.v3.ListTagKeysResponse) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.resourcemanager.v3.ListTagKeysResponse other) {
if (other == com.google.cloud.resourcemanager.v3.ListTagKeysResponse.getDefaultInstance())
return this;
if (tagKeysBuilder_ == null) {
if (!other.tagKeys_.isEmpty()) {
if (tagKeys_.isEmpty()) {
tagKeys_ = other.tagKeys_;
bitField0_ = (bitField0_ & ~0x00000001);
} else {
ensureTagKeysIsMutable();
tagKeys_.addAll(other.tagKeys_);
}
onChanged();
}
} else {
if (!other.tagKeys_.isEmpty()) {
if (tagKeysBuilder_.isEmpty()) {
tagKeysBuilder_.dispose();
tagKeysBuilder_ = null;
tagKeys_ = other.tagKeys_;
bitField0_ = (bitField0_ & ~0x00000001);
tagKeysBuilder_ =
com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders
? getTagKeysFieldBuilder()
: null;
} else {
tagKeysBuilder_.addAllMessages(other.tagKeys_);
}
}
}
if (!other.getNextPageToken().isEmpty()) {
nextPageToken_ = other.nextPageToken_;
bitField0_ |= 0x00000002;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
com.google.cloud.resourcemanager.v3.TagKey m =
input.readMessage(
com.google.cloud.resourcemanager.v3.TagKey.parser(), extensionRegistry);
if (tagKeysBuilder_ == null) {
ensureTagKeysIsMutable();
tagKeys_.add(m);
} else {
tagKeysBuilder_.addMessage(m);
}
break;
} // case 10
case 18:
{
nextPageToken_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000002;
break;
} // case 18
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.util.List<com.google.cloud.resourcemanager.v3.TagKey> tagKeys_ =
java.util.Collections.emptyList();
private void ensureTagKeysIsMutable() {
if (!((bitField0_ & 0x00000001) != 0)) {
tagKeys_ = new java.util.ArrayList<com.google.cloud.resourcemanager.v3.TagKey>(tagKeys_);
bitField0_ |= 0x00000001;
}
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.resourcemanager.v3.TagKey,
com.google.cloud.resourcemanager.v3.TagKey.Builder,
com.google.cloud.resourcemanager.v3.TagKeyOrBuilder>
tagKeysBuilder_;
/**
*
*
* <pre>
* List of TagKeys that live under the specified parent in the request.
* </pre>
*
* <code>repeated .google.cloud.resourcemanager.v3.TagKey tag_keys = 1;</code>
*/
public java.util.List<com.google.cloud.resourcemanager.v3.TagKey> getTagKeysList() {
if (tagKeysBuilder_ == null) {
return java.util.Collections.unmodifiableList(tagKeys_);
} else {
return tagKeysBuilder_.getMessageList();
}
}
/**
*
*
* <pre>
* List of TagKeys that live under the specified parent in the request.
* </pre>
*
* <code>repeated .google.cloud.resourcemanager.v3.TagKey tag_keys = 1;</code>
*/
public int getTagKeysCount() {
if (tagKeysBuilder_ == null) {
return tagKeys_.size();
} else {
return tagKeysBuilder_.getCount();
}
}
/**
*
*
* <pre>
* List of TagKeys that live under the specified parent in the request.
* </pre>
*
* <code>repeated .google.cloud.resourcemanager.v3.TagKey tag_keys = 1;</code>
*/
public com.google.cloud.resourcemanager.v3.TagKey getTagKeys(int index) {
if (tagKeysBuilder_ == null) {
return tagKeys_.get(index);
} else {
return tagKeysBuilder_.getMessage(index);
}
}
/**
*
*
* <pre>
* List of TagKeys that live under the specified parent in the request.
* </pre>
*
* <code>repeated .google.cloud.resourcemanager.v3.TagKey tag_keys = 1;</code>
*/
public Builder setTagKeys(int index, com.google.cloud.resourcemanager.v3.TagKey value) {
if (tagKeysBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureTagKeysIsMutable();
tagKeys_.set(index, value);
onChanged();
} else {
tagKeysBuilder_.setMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* List of TagKeys that live under the specified parent in the request.
* </pre>
*
* <code>repeated .google.cloud.resourcemanager.v3.TagKey tag_keys = 1;</code>
*/
public Builder setTagKeys(
int index, com.google.cloud.resourcemanager.v3.TagKey.Builder builderForValue) {
if (tagKeysBuilder_ == null) {
ensureTagKeysIsMutable();
tagKeys_.set(index, builderForValue.build());
onChanged();
} else {
tagKeysBuilder_.setMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* List of TagKeys that live under the specified parent in the request.
* </pre>
*
* <code>repeated .google.cloud.resourcemanager.v3.TagKey tag_keys = 1;</code>
*/
public Builder addTagKeys(com.google.cloud.resourcemanager.v3.TagKey value) {
if (tagKeysBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureTagKeysIsMutable();
tagKeys_.add(value);
onChanged();
} else {
tagKeysBuilder_.addMessage(value);
}
return this;
}
/**
*
*
* <pre>
* List of TagKeys that live under the specified parent in the request.
* </pre>
*
* <code>repeated .google.cloud.resourcemanager.v3.TagKey tag_keys = 1;</code>
*/
public Builder addTagKeys(int index, com.google.cloud.resourcemanager.v3.TagKey value) {
if (tagKeysBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureTagKeysIsMutable();
tagKeys_.add(index, value);
onChanged();
} else {
tagKeysBuilder_.addMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* List of TagKeys that live under the specified parent in the request.
* </pre>
*
* <code>repeated .google.cloud.resourcemanager.v3.TagKey tag_keys = 1;</code>
*/
public Builder addTagKeys(com.google.cloud.resourcemanager.v3.TagKey.Builder builderForValue) {
if (tagKeysBuilder_ == null) {
ensureTagKeysIsMutable();
tagKeys_.add(builderForValue.build());
onChanged();
} else {
tagKeysBuilder_.addMessage(builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* List of TagKeys that live under the specified parent in the request.
* </pre>
*
* <code>repeated .google.cloud.resourcemanager.v3.TagKey tag_keys = 1;</code>
*/
public Builder addTagKeys(
int index, com.google.cloud.resourcemanager.v3.TagKey.Builder builderForValue) {
if (tagKeysBuilder_ == null) {
ensureTagKeysIsMutable();
tagKeys_.add(index, builderForValue.build());
onChanged();
} else {
tagKeysBuilder_.addMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* List of TagKeys that live under the specified parent in the request.
* </pre>
*
* <code>repeated .google.cloud.resourcemanager.v3.TagKey tag_keys = 1;</code>
*/
public Builder addAllTagKeys(
java.lang.Iterable<? extends com.google.cloud.resourcemanager.v3.TagKey> values) {
if (tagKeysBuilder_ == null) {
ensureTagKeysIsMutable();
com.google.protobuf.AbstractMessageLite.Builder.addAll(values, tagKeys_);
onChanged();
} else {
tagKeysBuilder_.addAllMessages(values);
}
return this;
}
/**
*
*
* <pre>
* List of TagKeys that live under the specified parent in the request.
* </pre>
*
* <code>repeated .google.cloud.resourcemanager.v3.TagKey tag_keys = 1;</code>
*/
public Builder clearTagKeys() {
if (tagKeysBuilder_ == null) {
tagKeys_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
} else {
tagKeysBuilder_.clear();
}
return this;
}
/**
*
*
* <pre>
* List of TagKeys that live under the specified parent in the request.
* </pre>
*
* <code>repeated .google.cloud.resourcemanager.v3.TagKey tag_keys = 1;</code>
*/
public Builder removeTagKeys(int index) {
if (tagKeysBuilder_ == null) {
ensureTagKeysIsMutable();
tagKeys_.remove(index);
onChanged();
} else {
tagKeysBuilder_.remove(index);
}
return this;
}
/**
*
*
* <pre>
* List of TagKeys that live under the specified parent in the request.
* </pre>
*
* <code>repeated .google.cloud.resourcemanager.v3.TagKey tag_keys = 1;</code>
*/
public com.google.cloud.resourcemanager.v3.TagKey.Builder getTagKeysBuilder(int index) {
return getTagKeysFieldBuilder().getBuilder(index);
}
/**
*
*
* <pre>
* List of TagKeys that live under the specified parent in the request.
* </pre>
*
* <code>repeated .google.cloud.resourcemanager.v3.TagKey tag_keys = 1;</code>
*/
public com.google.cloud.resourcemanager.v3.TagKeyOrBuilder getTagKeysOrBuilder(int index) {
if (tagKeysBuilder_ == null) {
return tagKeys_.get(index);
} else {
return tagKeysBuilder_.getMessageOrBuilder(index);
}
}
/**
*
*
* <pre>
* List of TagKeys that live under the specified parent in the request.
* </pre>
*
* <code>repeated .google.cloud.resourcemanager.v3.TagKey tag_keys = 1;</code>
*/
public java.util.List<? extends com.google.cloud.resourcemanager.v3.TagKeyOrBuilder>
getTagKeysOrBuilderList() {
if (tagKeysBuilder_ != null) {
return tagKeysBuilder_.getMessageOrBuilderList();
} else {
return java.util.Collections.unmodifiableList(tagKeys_);
}
}
/**
*
*
* <pre>
* List of TagKeys that live under the specified parent in the request.
* </pre>
*
* <code>repeated .google.cloud.resourcemanager.v3.TagKey tag_keys = 1;</code>
*/
public com.google.cloud.resourcemanager.v3.TagKey.Builder addTagKeysBuilder() {
return getTagKeysFieldBuilder()
.addBuilder(com.google.cloud.resourcemanager.v3.TagKey.getDefaultInstance());
}
/**
*
*
* <pre>
* List of TagKeys that live under the specified parent in the request.
* </pre>
*
* <code>repeated .google.cloud.resourcemanager.v3.TagKey tag_keys = 1;</code>
*/
public com.google.cloud.resourcemanager.v3.TagKey.Builder addTagKeysBuilder(int index) {
return getTagKeysFieldBuilder()
.addBuilder(index, com.google.cloud.resourcemanager.v3.TagKey.getDefaultInstance());
}
/**
*
*
* <pre>
* List of TagKeys that live under the specified parent in the request.
* </pre>
*
* <code>repeated .google.cloud.resourcemanager.v3.TagKey tag_keys = 1;</code>
*/
public java.util.List<com.google.cloud.resourcemanager.v3.TagKey.Builder>
getTagKeysBuilderList() {
return getTagKeysFieldBuilder().getBuilderList();
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.resourcemanager.v3.TagKey,
com.google.cloud.resourcemanager.v3.TagKey.Builder,
com.google.cloud.resourcemanager.v3.TagKeyOrBuilder>
getTagKeysFieldBuilder() {
if (tagKeysBuilder_ == null) {
tagKeysBuilder_ =
new com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.resourcemanager.v3.TagKey,
com.google.cloud.resourcemanager.v3.TagKey.Builder,
com.google.cloud.resourcemanager.v3.TagKeyOrBuilder>(
tagKeys_, ((bitField0_ & 0x00000001) != 0), getParentForChildren(), isClean());
tagKeys_ = null;
}
return tagKeysBuilder_;
}
private java.lang.Object nextPageToken_ = "";
/**
*
*
* <pre>
* A pagination token returned from a previous call to `ListTagKeys`
* that indicates from where listing should continue.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The nextPageToken.
*/
public java.lang.String getNextPageToken() {
java.lang.Object ref = nextPageToken_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
nextPageToken_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* A pagination token returned from a previous call to `ListTagKeys`
* that indicates from where listing should continue.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The bytes for nextPageToken.
*/
public com.google.protobuf.ByteString getNextPageTokenBytes() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
nextPageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* A pagination token returned from a previous call to `ListTagKeys`
* that indicates from where listing should continue.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @param value The nextPageToken to set.
* @return This builder for chaining.
*/
public Builder setNextPageToken(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
nextPageToken_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* A pagination token returned from a previous call to `ListTagKeys`
* that indicates from where listing should continue.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return This builder for chaining.
*/
public Builder clearNextPageToken() {
nextPageToken_ = getDefaultInstance().getNextPageToken();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
return this;
}
/**
*
*
* <pre>
* A pagination token returned from a previous call to `ListTagKeys`
* that indicates from where listing should continue.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @param value The bytes for nextPageToken to set.
* @return This builder for chaining.
*/
public Builder setNextPageTokenBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
nextPageToken_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.resourcemanager.v3.ListTagKeysResponse)
}
// @@protoc_insertion_point(class_scope:google.cloud.resourcemanager.v3.ListTagKeysResponse)
private static final com.google.cloud.resourcemanager.v3.ListTagKeysResponse DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.resourcemanager.v3.ListTagKeysResponse();
}
public static com.google.cloud.resourcemanager.v3.ListTagKeysResponse getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<ListTagKeysResponse> PARSER =
new com.google.protobuf.AbstractParser<ListTagKeysResponse>() {
@java.lang.Override
public ListTagKeysResponse parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<ListTagKeysResponse> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<ListTagKeysResponse> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.resourcemanager.v3.ListTagKeysResponse getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleapis/google-cloud-java | 36,787 | java-cloudsupport/proto-google-cloud-cloudsupport-v2beta/src/main/java/com/google/cloud/support/v2beta/ShowFeedResponse.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/support/v2beta/feed_service.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.support.v2beta;
/**
*
*
* <pre>
* The response message for the ShowFeed endpoint.
* </pre>
*
* Protobuf type {@code google.cloud.support.v2beta.ShowFeedResponse}
*/
public final class ShowFeedResponse extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.support.v2beta.ShowFeedResponse)
ShowFeedResponseOrBuilder {
private static final long serialVersionUID = 0L;
// Use ShowFeedResponse.newBuilder() to construct.
private ShowFeedResponse(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private ShowFeedResponse() {
feedItems_ = java.util.Collections.emptyList();
nextPageToken_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new ShowFeedResponse();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.support.v2beta.FeedServiceProto
.internal_static_google_cloud_support_v2beta_ShowFeedResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.support.v2beta.FeedServiceProto
.internal_static_google_cloud_support_v2beta_ShowFeedResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.support.v2beta.ShowFeedResponse.class,
com.google.cloud.support.v2beta.ShowFeedResponse.Builder.class);
}
public static final int FEED_ITEMS_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private java.util.List<com.google.cloud.support.v2beta.FeedItem> feedItems_;
/**
*
*
* <pre>
* The list of feed items associated with the given Case.
* </pre>
*
* <code>repeated .google.cloud.support.v2beta.FeedItem feed_items = 1;</code>
*/
@java.lang.Override
public java.util.List<com.google.cloud.support.v2beta.FeedItem> getFeedItemsList() {
return feedItems_;
}
/**
*
*
* <pre>
* The list of feed items associated with the given Case.
* </pre>
*
* <code>repeated .google.cloud.support.v2beta.FeedItem feed_items = 1;</code>
*/
@java.lang.Override
public java.util.List<? extends com.google.cloud.support.v2beta.FeedItemOrBuilder>
getFeedItemsOrBuilderList() {
return feedItems_;
}
/**
*
*
* <pre>
* The list of feed items associated with the given Case.
* </pre>
*
* <code>repeated .google.cloud.support.v2beta.FeedItem feed_items = 1;</code>
*/
@java.lang.Override
public int getFeedItemsCount() {
return feedItems_.size();
}
/**
*
*
* <pre>
* The list of feed items associated with the given Case.
* </pre>
*
* <code>repeated .google.cloud.support.v2beta.FeedItem feed_items = 1;</code>
*/
@java.lang.Override
public com.google.cloud.support.v2beta.FeedItem getFeedItems(int index) {
return feedItems_.get(index);
}
/**
*
*
* <pre>
* The list of feed items associated with the given Case.
* </pre>
*
* <code>repeated .google.cloud.support.v2beta.FeedItem feed_items = 1;</code>
*/
@java.lang.Override
public com.google.cloud.support.v2beta.FeedItemOrBuilder getFeedItemsOrBuilder(int index) {
return feedItems_.get(index);
}
public static final int NEXT_PAGE_TOKEN_FIELD_NUMBER = 2;
@SuppressWarnings("serial")
private volatile java.lang.Object nextPageToken_ = "";
/**
*
*
* <pre>
* A token to retrieve the next page of results. This should be set in the
* `page_token` field of subsequent `ShowFeedRequests`.
* If unspecified, there are no more results to retrieve.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The nextPageToken.
*/
@java.lang.Override
public java.lang.String getNextPageToken() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
nextPageToken_ = s;
return s;
}
}
/**
*
*
* <pre>
* A token to retrieve the next page of results. This should be set in the
* `page_token` field of subsequent `ShowFeedRequests`.
* If unspecified, there are no more results to retrieve.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The bytes for nextPageToken.
*/
@java.lang.Override
public com.google.protobuf.ByteString getNextPageTokenBytes() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
nextPageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
for (int i = 0; i < feedItems_.size(); i++) {
output.writeMessage(1, feedItems_.get(i));
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 2, nextPageToken_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
for (int i = 0; i < feedItems_.size(); i++) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, feedItems_.get(i));
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, nextPageToken_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.support.v2beta.ShowFeedResponse)) {
return super.equals(obj);
}
com.google.cloud.support.v2beta.ShowFeedResponse other =
(com.google.cloud.support.v2beta.ShowFeedResponse) obj;
if (!getFeedItemsList().equals(other.getFeedItemsList())) return false;
if (!getNextPageToken().equals(other.getNextPageToken())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (getFeedItemsCount() > 0) {
hash = (37 * hash) + FEED_ITEMS_FIELD_NUMBER;
hash = (53 * hash) + getFeedItemsList().hashCode();
}
hash = (37 * hash) + NEXT_PAGE_TOKEN_FIELD_NUMBER;
hash = (53 * hash) + getNextPageToken().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.support.v2beta.ShowFeedResponse parseFrom(java.nio.ByteBuffer data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.support.v2beta.ShowFeedResponse parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.support.v2beta.ShowFeedResponse parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.support.v2beta.ShowFeedResponse parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.support.v2beta.ShowFeedResponse parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.support.v2beta.ShowFeedResponse parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.support.v2beta.ShowFeedResponse parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.support.v2beta.ShowFeedResponse parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.support.v2beta.ShowFeedResponse parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.support.v2beta.ShowFeedResponse parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.support.v2beta.ShowFeedResponse parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.support.v2beta.ShowFeedResponse parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.cloud.support.v2beta.ShowFeedResponse prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* The response message for the ShowFeed endpoint.
* </pre>
*
* Protobuf type {@code google.cloud.support.v2beta.ShowFeedResponse}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.support.v2beta.ShowFeedResponse)
com.google.cloud.support.v2beta.ShowFeedResponseOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.support.v2beta.FeedServiceProto
.internal_static_google_cloud_support_v2beta_ShowFeedResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.support.v2beta.FeedServiceProto
.internal_static_google_cloud_support_v2beta_ShowFeedResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.support.v2beta.ShowFeedResponse.class,
com.google.cloud.support.v2beta.ShowFeedResponse.Builder.class);
}
// Construct using com.google.cloud.support.v2beta.ShowFeedResponse.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
if (feedItemsBuilder_ == null) {
feedItems_ = java.util.Collections.emptyList();
} else {
feedItems_ = null;
feedItemsBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000001);
nextPageToken_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.support.v2beta.FeedServiceProto
.internal_static_google_cloud_support_v2beta_ShowFeedResponse_descriptor;
}
@java.lang.Override
public com.google.cloud.support.v2beta.ShowFeedResponse getDefaultInstanceForType() {
return com.google.cloud.support.v2beta.ShowFeedResponse.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.support.v2beta.ShowFeedResponse build() {
com.google.cloud.support.v2beta.ShowFeedResponse result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.support.v2beta.ShowFeedResponse buildPartial() {
com.google.cloud.support.v2beta.ShowFeedResponse result =
new com.google.cloud.support.v2beta.ShowFeedResponse(this);
buildPartialRepeatedFields(result);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartialRepeatedFields(
com.google.cloud.support.v2beta.ShowFeedResponse result) {
if (feedItemsBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0)) {
feedItems_ = java.util.Collections.unmodifiableList(feedItems_);
bitField0_ = (bitField0_ & ~0x00000001);
}
result.feedItems_ = feedItems_;
} else {
result.feedItems_ = feedItemsBuilder_.build();
}
}
private void buildPartial0(com.google.cloud.support.v2beta.ShowFeedResponse result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000002) != 0)) {
result.nextPageToken_ = nextPageToken_;
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.support.v2beta.ShowFeedResponse) {
return mergeFrom((com.google.cloud.support.v2beta.ShowFeedResponse) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.support.v2beta.ShowFeedResponse other) {
if (other == com.google.cloud.support.v2beta.ShowFeedResponse.getDefaultInstance())
return this;
if (feedItemsBuilder_ == null) {
if (!other.feedItems_.isEmpty()) {
if (feedItems_.isEmpty()) {
feedItems_ = other.feedItems_;
bitField0_ = (bitField0_ & ~0x00000001);
} else {
ensureFeedItemsIsMutable();
feedItems_.addAll(other.feedItems_);
}
onChanged();
}
} else {
if (!other.feedItems_.isEmpty()) {
if (feedItemsBuilder_.isEmpty()) {
feedItemsBuilder_.dispose();
feedItemsBuilder_ = null;
feedItems_ = other.feedItems_;
bitField0_ = (bitField0_ & ~0x00000001);
feedItemsBuilder_ =
com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders
? getFeedItemsFieldBuilder()
: null;
} else {
feedItemsBuilder_.addAllMessages(other.feedItems_);
}
}
}
if (!other.getNextPageToken().isEmpty()) {
nextPageToken_ = other.nextPageToken_;
bitField0_ |= 0x00000002;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
com.google.cloud.support.v2beta.FeedItem m =
input.readMessage(
com.google.cloud.support.v2beta.FeedItem.parser(), extensionRegistry);
if (feedItemsBuilder_ == null) {
ensureFeedItemsIsMutable();
feedItems_.add(m);
} else {
feedItemsBuilder_.addMessage(m);
}
break;
} // case 10
case 18:
{
nextPageToken_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000002;
break;
} // case 18
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.util.List<com.google.cloud.support.v2beta.FeedItem> feedItems_ =
java.util.Collections.emptyList();
private void ensureFeedItemsIsMutable() {
if (!((bitField0_ & 0x00000001) != 0)) {
feedItems_ = new java.util.ArrayList<com.google.cloud.support.v2beta.FeedItem>(feedItems_);
bitField0_ |= 0x00000001;
}
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.support.v2beta.FeedItem,
com.google.cloud.support.v2beta.FeedItem.Builder,
com.google.cloud.support.v2beta.FeedItemOrBuilder>
feedItemsBuilder_;
/**
*
*
* <pre>
* The list of feed items associated with the given Case.
* </pre>
*
* <code>repeated .google.cloud.support.v2beta.FeedItem feed_items = 1;</code>
*/
public java.util.List<com.google.cloud.support.v2beta.FeedItem> getFeedItemsList() {
if (feedItemsBuilder_ == null) {
return java.util.Collections.unmodifiableList(feedItems_);
} else {
return feedItemsBuilder_.getMessageList();
}
}
/**
*
*
* <pre>
* The list of feed items associated with the given Case.
* </pre>
*
* <code>repeated .google.cloud.support.v2beta.FeedItem feed_items = 1;</code>
*/
public int getFeedItemsCount() {
if (feedItemsBuilder_ == null) {
return feedItems_.size();
} else {
return feedItemsBuilder_.getCount();
}
}
/**
*
*
* <pre>
* The list of feed items associated with the given Case.
* </pre>
*
* <code>repeated .google.cloud.support.v2beta.FeedItem feed_items = 1;</code>
*/
public com.google.cloud.support.v2beta.FeedItem getFeedItems(int index) {
if (feedItemsBuilder_ == null) {
return feedItems_.get(index);
} else {
return feedItemsBuilder_.getMessage(index);
}
}
/**
*
*
* <pre>
* The list of feed items associated with the given Case.
* </pre>
*
* <code>repeated .google.cloud.support.v2beta.FeedItem feed_items = 1;</code>
*/
public Builder setFeedItems(int index, com.google.cloud.support.v2beta.FeedItem value) {
if (feedItemsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureFeedItemsIsMutable();
feedItems_.set(index, value);
onChanged();
} else {
feedItemsBuilder_.setMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* The list of feed items associated with the given Case.
* </pre>
*
* <code>repeated .google.cloud.support.v2beta.FeedItem feed_items = 1;</code>
*/
public Builder setFeedItems(
int index, com.google.cloud.support.v2beta.FeedItem.Builder builderForValue) {
if (feedItemsBuilder_ == null) {
ensureFeedItemsIsMutable();
feedItems_.set(index, builderForValue.build());
onChanged();
} else {
feedItemsBuilder_.setMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* The list of feed items associated with the given Case.
* </pre>
*
* <code>repeated .google.cloud.support.v2beta.FeedItem feed_items = 1;</code>
*/
public Builder addFeedItems(com.google.cloud.support.v2beta.FeedItem value) {
if (feedItemsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureFeedItemsIsMutable();
feedItems_.add(value);
onChanged();
} else {
feedItemsBuilder_.addMessage(value);
}
return this;
}
/**
*
*
* <pre>
* The list of feed items associated with the given Case.
* </pre>
*
* <code>repeated .google.cloud.support.v2beta.FeedItem feed_items = 1;</code>
*/
public Builder addFeedItems(int index, com.google.cloud.support.v2beta.FeedItem value) {
if (feedItemsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureFeedItemsIsMutable();
feedItems_.add(index, value);
onChanged();
} else {
feedItemsBuilder_.addMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* The list of feed items associated with the given Case.
* </pre>
*
* <code>repeated .google.cloud.support.v2beta.FeedItem feed_items = 1;</code>
*/
public Builder addFeedItems(com.google.cloud.support.v2beta.FeedItem.Builder builderForValue) {
if (feedItemsBuilder_ == null) {
ensureFeedItemsIsMutable();
feedItems_.add(builderForValue.build());
onChanged();
} else {
feedItemsBuilder_.addMessage(builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* The list of feed items associated with the given Case.
* </pre>
*
* <code>repeated .google.cloud.support.v2beta.FeedItem feed_items = 1;</code>
*/
public Builder addFeedItems(
int index, com.google.cloud.support.v2beta.FeedItem.Builder builderForValue) {
if (feedItemsBuilder_ == null) {
ensureFeedItemsIsMutable();
feedItems_.add(index, builderForValue.build());
onChanged();
} else {
feedItemsBuilder_.addMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* The list of feed items associated with the given Case.
* </pre>
*
* <code>repeated .google.cloud.support.v2beta.FeedItem feed_items = 1;</code>
*/
public Builder addAllFeedItems(
java.lang.Iterable<? extends com.google.cloud.support.v2beta.FeedItem> values) {
if (feedItemsBuilder_ == null) {
ensureFeedItemsIsMutable();
com.google.protobuf.AbstractMessageLite.Builder.addAll(values, feedItems_);
onChanged();
} else {
feedItemsBuilder_.addAllMessages(values);
}
return this;
}
/**
*
*
* <pre>
* The list of feed items associated with the given Case.
* </pre>
*
* <code>repeated .google.cloud.support.v2beta.FeedItem feed_items = 1;</code>
*/
public Builder clearFeedItems() {
if (feedItemsBuilder_ == null) {
feedItems_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
} else {
feedItemsBuilder_.clear();
}
return this;
}
/**
*
*
* <pre>
* The list of feed items associated with the given Case.
* </pre>
*
* <code>repeated .google.cloud.support.v2beta.FeedItem feed_items = 1;</code>
*/
public Builder removeFeedItems(int index) {
if (feedItemsBuilder_ == null) {
ensureFeedItemsIsMutable();
feedItems_.remove(index);
onChanged();
} else {
feedItemsBuilder_.remove(index);
}
return this;
}
/**
*
*
* <pre>
* The list of feed items associated with the given Case.
* </pre>
*
* <code>repeated .google.cloud.support.v2beta.FeedItem feed_items = 1;</code>
*/
public com.google.cloud.support.v2beta.FeedItem.Builder getFeedItemsBuilder(int index) {
return getFeedItemsFieldBuilder().getBuilder(index);
}
/**
*
*
* <pre>
* The list of feed items associated with the given Case.
* </pre>
*
* <code>repeated .google.cloud.support.v2beta.FeedItem feed_items = 1;</code>
*/
public com.google.cloud.support.v2beta.FeedItemOrBuilder getFeedItemsOrBuilder(int index) {
if (feedItemsBuilder_ == null) {
return feedItems_.get(index);
} else {
return feedItemsBuilder_.getMessageOrBuilder(index);
}
}
/**
*
*
* <pre>
* The list of feed items associated with the given Case.
* </pre>
*
* <code>repeated .google.cloud.support.v2beta.FeedItem feed_items = 1;</code>
*/
public java.util.List<? extends com.google.cloud.support.v2beta.FeedItemOrBuilder>
getFeedItemsOrBuilderList() {
if (feedItemsBuilder_ != null) {
return feedItemsBuilder_.getMessageOrBuilderList();
} else {
return java.util.Collections.unmodifiableList(feedItems_);
}
}
/**
*
*
* <pre>
* The list of feed items associated with the given Case.
* </pre>
*
* <code>repeated .google.cloud.support.v2beta.FeedItem feed_items = 1;</code>
*/
public com.google.cloud.support.v2beta.FeedItem.Builder addFeedItemsBuilder() {
return getFeedItemsFieldBuilder()
.addBuilder(com.google.cloud.support.v2beta.FeedItem.getDefaultInstance());
}
/**
*
*
* <pre>
* The list of feed items associated with the given Case.
* </pre>
*
* <code>repeated .google.cloud.support.v2beta.FeedItem feed_items = 1;</code>
*/
public com.google.cloud.support.v2beta.FeedItem.Builder addFeedItemsBuilder(int index) {
return getFeedItemsFieldBuilder()
.addBuilder(index, com.google.cloud.support.v2beta.FeedItem.getDefaultInstance());
}
/**
*
*
* <pre>
* The list of feed items associated with the given Case.
* </pre>
*
* <code>repeated .google.cloud.support.v2beta.FeedItem feed_items = 1;</code>
*/
public java.util.List<com.google.cloud.support.v2beta.FeedItem.Builder>
getFeedItemsBuilderList() {
return getFeedItemsFieldBuilder().getBuilderList();
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.support.v2beta.FeedItem,
com.google.cloud.support.v2beta.FeedItem.Builder,
com.google.cloud.support.v2beta.FeedItemOrBuilder>
getFeedItemsFieldBuilder() {
if (feedItemsBuilder_ == null) {
feedItemsBuilder_ =
new com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.support.v2beta.FeedItem,
com.google.cloud.support.v2beta.FeedItem.Builder,
com.google.cloud.support.v2beta.FeedItemOrBuilder>(
feedItems_, ((bitField0_ & 0x00000001) != 0), getParentForChildren(), isClean());
feedItems_ = null;
}
return feedItemsBuilder_;
}
private java.lang.Object nextPageToken_ = "";
/**
*
*
* <pre>
* A token to retrieve the next page of results. This should be set in the
* `page_token` field of subsequent `ShowFeedRequests`.
* If unspecified, there are no more results to retrieve.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The nextPageToken.
*/
public java.lang.String getNextPageToken() {
java.lang.Object ref = nextPageToken_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
nextPageToken_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* A token to retrieve the next page of results. This should be set in the
* `page_token` field of subsequent `ShowFeedRequests`.
* If unspecified, there are no more results to retrieve.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The bytes for nextPageToken.
*/
public com.google.protobuf.ByteString getNextPageTokenBytes() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
nextPageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* A token to retrieve the next page of results. This should be set in the
* `page_token` field of subsequent `ShowFeedRequests`.
* If unspecified, there are no more results to retrieve.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @param value The nextPageToken to set.
* @return This builder for chaining.
*/
public Builder setNextPageToken(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
nextPageToken_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* A token to retrieve the next page of results. This should be set in the
* `page_token` field of subsequent `ShowFeedRequests`.
* If unspecified, there are no more results to retrieve.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return This builder for chaining.
*/
public Builder clearNextPageToken() {
nextPageToken_ = getDefaultInstance().getNextPageToken();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
return this;
}
/**
*
*
* <pre>
* A token to retrieve the next page of results. This should be set in the
* `page_token` field of subsequent `ShowFeedRequests`.
* If unspecified, there are no more results to retrieve.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @param value The bytes for nextPageToken to set.
* @return This builder for chaining.
*/
public Builder setNextPageTokenBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
nextPageToken_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.support.v2beta.ShowFeedResponse)
}
// @@protoc_insertion_point(class_scope:google.cloud.support.v2beta.ShowFeedResponse)
private static final com.google.cloud.support.v2beta.ShowFeedResponse DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.support.v2beta.ShowFeedResponse();
}
public static com.google.cloud.support.v2beta.ShowFeedResponse getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<ShowFeedResponse> PARSER =
new com.google.protobuf.AbstractParser<ShowFeedResponse>() {
@java.lang.Override
public ShowFeedResponse parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<ShowFeedResponse> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<ShowFeedResponse> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.support.v2beta.ShowFeedResponse getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleapis/google-cloud-java | 37,069 | java-orgpolicy/google-cloud-orgpolicy/src/main/java/com/google/cloud/orgpolicy/v2/stub/OrgPolicyStubSettings.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.cloud.orgpolicy.v2.stub;
import static com.google.cloud.orgpolicy.v2.OrgPolicyClient.ListConstraintsPagedResponse;
import static com.google.cloud.orgpolicy.v2.OrgPolicyClient.ListCustomConstraintsPagedResponse;
import static com.google.cloud.orgpolicy.v2.OrgPolicyClient.ListPoliciesPagedResponse;
import com.google.api.core.ApiFunction;
import com.google.api.core.ApiFuture;
import com.google.api.core.BetaApi;
import com.google.api.core.ObsoleteApi;
import com.google.api.gax.core.GaxProperties;
import com.google.api.gax.core.GoogleCredentialsProvider;
import com.google.api.gax.core.InstantiatingExecutorProvider;
import com.google.api.gax.grpc.GaxGrpcProperties;
import com.google.api.gax.grpc.GrpcTransportChannel;
import com.google.api.gax.grpc.InstantiatingGrpcChannelProvider;
import com.google.api.gax.httpjson.GaxHttpJsonProperties;
import com.google.api.gax.httpjson.HttpJsonTransportChannel;
import com.google.api.gax.httpjson.InstantiatingHttpJsonChannelProvider;
import com.google.api.gax.retrying.RetrySettings;
import com.google.api.gax.rpc.ApiCallContext;
import com.google.api.gax.rpc.ApiClientHeaderProvider;
import com.google.api.gax.rpc.ClientContext;
import com.google.api.gax.rpc.PageContext;
import com.google.api.gax.rpc.PagedCallSettings;
import com.google.api.gax.rpc.PagedListDescriptor;
import com.google.api.gax.rpc.PagedListResponseFactory;
import com.google.api.gax.rpc.StatusCode;
import com.google.api.gax.rpc.StubSettings;
import com.google.api.gax.rpc.TransportChannelProvider;
import com.google.api.gax.rpc.UnaryCallSettings;
import com.google.api.gax.rpc.UnaryCallable;
import com.google.cloud.orgpolicy.v2.Constraint;
import com.google.cloud.orgpolicy.v2.CreateCustomConstraintRequest;
import com.google.cloud.orgpolicy.v2.CreatePolicyRequest;
import com.google.cloud.orgpolicy.v2.CustomConstraint;
import com.google.cloud.orgpolicy.v2.DeleteCustomConstraintRequest;
import com.google.cloud.orgpolicy.v2.DeletePolicyRequest;
import com.google.cloud.orgpolicy.v2.GetCustomConstraintRequest;
import com.google.cloud.orgpolicy.v2.GetEffectivePolicyRequest;
import com.google.cloud.orgpolicy.v2.GetPolicyRequest;
import com.google.cloud.orgpolicy.v2.ListConstraintsRequest;
import com.google.cloud.orgpolicy.v2.ListConstraintsResponse;
import com.google.cloud.orgpolicy.v2.ListCustomConstraintsRequest;
import com.google.cloud.orgpolicy.v2.ListCustomConstraintsResponse;
import com.google.cloud.orgpolicy.v2.ListPoliciesRequest;
import com.google.cloud.orgpolicy.v2.ListPoliciesResponse;
import com.google.cloud.orgpolicy.v2.Policy;
import com.google.cloud.orgpolicy.v2.UpdateCustomConstraintRequest;
import com.google.cloud.orgpolicy.v2.UpdatePolicyRequest;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.Lists;
import com.google.protobuf.Empty;
import java.io.IOException;
import java.time.Duration;
import java.util.List;
import javax.annotation.Generated;
// AUTO-GENERATED DOCUMENTATION AND CLASS.
/**
* Settings class to configure an instance of {@link OrgPolicyStub}.
*
* <p>The default instance has everything set to sensible defaults:
*
* <ul>
* <li>The default service address (orgpolicy.googleapis.com) and default port (443) are used.
* <li>Credentials are acquired automatically through Application Default Credentials.
* <li>Retries are configured for idempotent methods but not for non-idempotent methods.
* </ul>
*
* <p>The builder of this class is recursive, so contained classes are themselves builders. When
* build() is called, the tree of builders is called to create the complete settings object.
*
* <p>For example, to set the
* [RetrySettings](https://cloud.google.com/java/docs/reference/gax/latest/com.google.api.gax.retrying.RetrySettings)
* of getPolicy:
*
* <pre>{@code
* // This snippet has been automatically generated and should be regarded as a code template only.
* // It will require modifications to work:
* // - It may require correct/in-range values for request initialization.
* // - It may require specifying regional endpoints when creating the service client as shown in
* // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library
* OrgPolicyStubSettings.Builder orgPolicySettingsBuilder = OrgPolicyStubSettings.newBuilder();
* orgPolicySettingsBuilder
* .getPolicySettings()
* .setRetrySettings(
* orgPolicySettingsBuilder
* .getPolicySettings()
* .getRetrySettings()
* .toBuilder()
* .setInitialRetryDelayDuration(Duration.ofSeconds(1))
* .setInitialRpcTimeoutDuration(Duration.ofSeconds(5))
* .setMaxAttempts(5)
* .setMaxRetryDelayDuration(Duration.ofSeconds(30))
* .setMaxRpcTimeoutDuration(Duration.ofSeconds(60))
* .setRetryDelayMultiplier(1.3)
* .setRpcTimeoutMultiplier(1.5)
* .setTotalTimeoutDuration(Duration.ofSeconds(300))
* .build());
* OrgPolicyStubSettings orgPolicySettings = orgPolicySettingsBuilder.build();
* }</pre>
*
* Please refer to the [Client Side Retry
* Guide](https://github.com/googleapis/google-cloud-java/blob/main/docs/client_retries.md) for
* additional support in setting retries.
*/
@Generated("by gapic-generator-java")
public class OrgPolicyStubSettings extends StubSettings<OrgPolicyStubSettings> {
/** The default scopes of the service. */
private static final ImmutableList<String> DEFAULT_SERVICE_SCOPES =
ImmutableList.<String>builder().add("https://www.googleapis.com/auth/cloud-platform").build();
private final PagedCallSettings<
ListConstraintsRequest, ListConstraintsResponse, ListConstraintsPagedResponse>
listConstraintsSettings;
private final PagedCallSettings<
ListPoliciesRequest, ListPoliciesResponse, ListPoliciesPagedResponse>
listPoliciesSettings;
private final UnaryCallSettings<GetPolicyRequest, Policy> getPolicySettings;
private final UnaryCallSettings<GetEffectivePolicyRequest, Policy> getEffectivePolicySettings;
private final UnaryCallSettings<CreatePolicyRequest, Policy> createPolicySettings;
private final UnaryCallSettings<UpdatePolicyRequest, Policy> updatePolicySettings;
private final UnaryCallSettings<DeletePolicyRequest, Empty> deletePolicySettings;
private final UnaryCallSettings<CreateCustomConstraintRequest, CustomConstraint>
createCustomConstraintSettings;
private final UnaryCallSettings<UpdateCustomConstraintRequest, CustomConstraint>
updateCustomConstraintSettings;
private final UnaryCallSettings<GetCustomConstraintRequest, CustomConstraint>
getCustomConstraintSettings;
private final PagedCallSettings<
ListCustomConstraintsRequest,
ListCustomConstraintsResponse,
ListCustomConstraintsPagedResponse>
listCustomConstraintsSettings;
private final UnaryCallSettings<DeleteCustomConstraintRequest, Empty>
deleteCustomConstraintSettings;
private static final PagedListDescriptor<
ListConstraintsRequest, ListConstraintsResponse, Constraint>
LIST_CONSTRAINTS_PAGE_STR_DESC =
new PagedListDescriptor<ListConstraintsRequest, ListConstraintsResponse, Constraint>() {
@Override
public String emptyToken() {
return "";
}
@Override
public ListConstraintsRequest injectToken(
ListConstraintsRequest payload, String token) {
return ListConstraintsRequest.newBuilder(payload).setPageToken(token).build();
}
@Override
public ListConstraintsRequest injectPageSize(
ListConstraintsRequest payload, int pageSize) {
return ListConstraintsRequest.newBuilder(payload).setPageSize(pageSize).build();
}
@Override
public Integer extractPageSize(ListConstraintsRequest payload) {
return payload.getPageSize();
}
@Override
public String extractNextToken(ListConstraintsResponse payload) {
return payload.getNextPageToken();
}
@Override
public Iterable<Constraint> extractResources(ListConstraintsResponse payload) {
return payload.getConstraintsList();
}
};
private static final PagedListDescriptor<ListPoliciesRequest, ListPoliciesResponse, Policy>
LIST_POLICIES_PAGE_STR_DESC =
new PagedListDescriptor<ListPoliciesRequest, ListPoliciesResponse, Policy>() {
@Override
public String emptyToken() {
return "";
}
@Override
public ListPoliciesRequest injectToken(ListPoliciesRequest payload, String token) {
return ListPoliciesRequest.newBuilder(payload).setPageToken(token).build();
}
@Override
public ListPoliciesRequest injectPageSize(ListPoliciesRequest payload, int pageSize) {
return ListPoliciesRequest.newBuilder(payload).setPageSize(pageSize).build();
}
@Override
public Integer extractPageSize(ListPoliciesRequest payload) {
return payload.getPageSize();
}
@Override
public String extractNextToken(ListPoliciesResponse payload) {
return payload.getNextPageToken();
}
@Override
public Iterable<Policy> extractResources(ListPoliciesResponse payload) {
return payload.getPoliciesList();
}
};
private static final PagedListDescriptor<
ListCustomConstraintsRequest, ListCustomConstraintsResponse, CustomConstraint>
LIST_CUSTOM_CONSTRAINTS_PAGE_STR_DESC =
new PagedListDescriptor<
ListCustomConstraintsRequest, ListCustomConstraintsResponse, CustomConstraint>() {
@Override
public String emptyToken() {
return "";
}
@Override
public ListCustomConstraintsRequest injectToken(
ListCustomConstraintsRequest payload, String token) {
return ListCustomConstraintsRequest.newBuilder(payload).setPageToken(token).build();
}
@Override
public ListCustomConstraintsRequest injectPageSize(
ListCustomConstraintsRequest payload, int pageSize) {
return ListCustomConstraintsRequest.newBuilder(payload).setPageSize(pageSize).build();
}
@Override
public Integer extractPageSize(ListCustomConstraintsRequest payload) {
return payload.getPageSize();
}
@Override
public String extractNextToken(ListCustomConstraintsResponse payload) {
return payload.getNextPageToken();
}
@Override
public Iterable<CustomConstraint> extractResources(
ListCustomConstraintsResponse payload) {
return payload.getCustomConstraintsList();
}
};
private static final PagedListResponseFactory<
ListConstraintsRequest, ListConstraintsResponse, ListConstraintsPagedResponse>
LIST_CONSTRAINTS_PAGE_STR_FACT =
new PagedListResponseFactory<
ListConstraintsRequest, ListConstraintsResponse, ListConstraintsPagedResponse>() {
@Override
public ApiFuture<ListConstraintsPagedResponse> getFuturePagedResponse(
UnaryCallable<ListConstraintsRequest, ListConstraintsResponse> callable,
ListConstraintsRequest request,
ApiCallContext context,
ApiFuture<ListConstraintsResponse> futureResponse) {
PageContext<ListConstraintsRequest, ListConstraintsResponse, Constraint> pageContext =
PageContext.create(callable, LIST_CONSTRAINTS_PAGE_STR_DESC, request, context);
return ListConstraintsPagedResponse.createAsync(pageContext, futureResponse);
}
};
private static final PagedListResponseFactory<
ListPoliciesRequest, ListPoliciesResponse, ListPoliciesPagedResponse>
LIST_POLICIES_PAGE_STR_FACT =
new PagedListResponseFactory<
ListPoliciesRequest, ListPoliciesResponse, ListPoliciesPagedResponse>() {
@Override
public ApiFuture<ListPoliciesPagedResponse> getFuturePagedResponse(
UnaryCallable<ListPoliciesRequest, ListPoliciesResponse> callable,
ListPoliciesRequest request,
ApiCallContext context,
ApiFuture<ListPoliciesResponse> futureResponse) {
PageContext<ListPoliciesRequest, ListPoliciesResponse, Policy> pageContext =
PageContext.create(callable, LIST_POLICIES_PAGE_STR_DESC, request, context);
return ListPoliciesPagedResponse.createAsync(pageContext, futureResponse);
}
};
private static final PagedListResponseFactory<
ListCustomConstraintsRequest,
ListCustomConstraintsResponse,
ListCustomConstraintsPagedResponse>
LIST_CUSTOM_CONSTRAINTS_PAGE_STR_FACT =
new PagedListResponseFactory<
ListCustomConstraintsRequest,
ListCustomConstraintsResponse,
ListCustomConstraintsPagedResponse>() {
@Override
public ApiFuture<ListCustomConstraintsPagedResponse> getFuturePagedResponse(
UnaryCallable<ListCustomConstraintsRequest, ListCustomConstraintsResponse> callable,
ListCustomConstraintsRequest request,
ApiCallContext context,
ApiFuture<ListCustomConstraintsResponse> futureResponse) {
PageContext<
ListCustomConstraintsRequest, ListCustomConstraintsResponse, CustomConstraint>
pageContext =
PageContext.create(
callable, LIST_CUSTOM_CONSTRAINTS_PAGE_STR_DESC, request, context);
return ListCustomConstraintsPagedResponse.createAsync(pageContext, futureResponse);
}
};
/** Returns the object with the settings used for calls to listConstraints. */
public PagedCallSettings<
ListConstraintsRequest, ListConstraintsResponse, ListConstraintsPagedResponse>
listConstraintsSettings() {
return listConstraintsSettings;
}
/** Returns the object with the settings used for calls to listPolicies. */
public PagedCallSettings<ListPoliciesRequest, ListPoliciesResponse, ListPoliciesPagedResponse>
listPoliciesSettings() {
return listPoliciesSettings;
}
/** Returns the object with the settings used for calls to getPolicy. */
public UnaryCallSettings<GetPolicyRequest, Policy> getPolicySettings() {
return getPolicySettings;
}
/** Returns the object with the settings used for calls to getEffectivePolicy. */
public UnaryCallSettings<GetEffectivePolicyRequest, Policy> getEffectivePolicySettings() {
return getEffectivePolicySettings;
}
/** Returns the object with the settings used for calls to createPolicy. */
public UnaryCallSettings<CreatePolicyRequest, Policy> createPolicySettings() {
return createPolicySettings;
}
/** Returns the object with the settings used for calls to updatePolicy. */
public UnaryCallSettings<UpdatePolicyRequest, Policy> updatePolicySettings() {
return updatePolicySettings;
}
/** Returns the object with the settings used for calls to deletePolicy. */
public UnaryCallSettings<DeletePolicyRequest, Empty> deletePolicySettings() {
return deletePolicySettings;
}
/** Returns the object with the settings used for calls to createCustomConstraint. */
public UnaryCallSettings<CreateCustomConstraintRequest, CustomConstraint>
createCustomConstraintSettings() {
return createCustomConstraintSettings;
}
/** Returns the object with the settings used for calls to updateCustomConstraint. */
public UnaryCallSettings<UpdateCustomConstraintRequest, CustomConstraint>
updateCustomConstraintSettings() {
return updateCustomConstraintSettings;
}
/** Returns the object with the settings used for calls to getCustomConstraint. */
public UnaryCallSettings<GetCustomConstraintRequest, CustomConstraint>
getCustomConstraintSettings() {
return getCustomConstraintSettings;
}
/** Returns the object with the settings used for calls to listCustomConstraints. */
public PagedCallSettings<
ListCustomConstraintsRequest,
ListCustomConstraintsResponse,
ListCustomConstraintsPagedResponse>
listCustomConstraintsSettings() {
return listCustomConstraintsSettings;
}
/** Returns the object with the settings used for calls to deleteCustomConstraint. */
public UnaryCallSettings<DeleteCustomConstraintRequest, Empty> deleteCustomConstraintSettings() {
return deleteCustomConstraintSettings;
}
public OrgPolicyStub createStub() throws IOException {
if (getTransportChannelProvider()
.getTransportName()
.equals(GrpcTransportChannel.getGrpcTransportName())) {
return GrpcOrgPolicyStub.create(this);
}
if (getTransportChannelProvider()
.getTransportName()
.equals(HttpJsonTransportChannel.getHttpJsonTransportName())) {
return HttpJsonOrgPolicyStub.create(this);
}
throw new UnsupportedOperationException(
String.format(
"Transport not supported: %s", getTransportChannelProvider().getTransportName()));
}
/** Returns the default service name. */
@Override
public String getServiceName() {
return "orgpolicy";
}
/** Returns a builder for the default ExecutorProvider for this service. */
public static InstantiatingExecutorProvider.Builder defaultExecutorProviderBuilder() {
return InstantiatingExecutorProvider.newBuilder();
}
/** Returns the default service endpoint. */
@ObsoleteApi("Use getEndpoint() instead")
public static String getDefaultEndpoint() {
return "orgpolicy.googleapis.com:443";
}
/** Returns the default mTLS service endpoint. */
public static String getDefaultMtlsEndpoint() {
return "orgpolicy.mtls.googleapis.com:443";
}
/** Returns the default service scopes. */
public static List<String> getDefaultServiceScopes() {
return DEFAULT_SERVICE_SCOPES;
}
/** Returns a builder for the default credentials for this service. */
public static GoogleCredentialsProvider.Builder defaultCredentialsProviderBuilder() {
return GoogleCredentialsProvider.newBuilder()
.setScopesToApply(DEFAULT_SERVICE_SCOPES)
.setUseJwtAccessWithScope(true);
}
/** Returns a builder for the default gRPC ChannelProvider for this service. */
public static InstantiatingGrpcChannelProvider.Builder defaultGrpcTransportProviderBuilder() {
return InstantiatingGrpcChannelProvider.newBuilder()
.setMaxInboundMessageSize(Integer.MAX_VALUE);
}
/** Returns a builder for the default REST ChannelProvider for this service. */
@BetaApi
public static InstantiatingHttpJsonChannelProvider.Builder
defaultHttpJsonTransportProviderBuilder() {
return InstantiatingHttpJsonChannelProvider.newBuilder();
}
public static TransportChannelProvider defaultTransportChannelProvider() {
return defaultGrpcTransportProviderBuilder().build();
}
public static ApiClientHeaderProvider.Builder defaultGrpcApiClientHeaderProviderBuilder() {
return ApiClientHeaderProvider.newBuilder()
.setGeneratedLibToken("gapic", GaxProperties.getLibraryVersion(OrgPolicyStubSettings.class))
.setTransportToken(
GaxGrpcProperties.getGrpcTokenName(), GaxGrpcProperties.getGrpcVersion());
}
public static ApiClientHeaderProvider.Builder defaultHttpJsonApiClientHeaderProviderBuilder() {
return ApiClientHeaderProvider.newBuilder()
.setGeneratedLibToken("gapic", GaxProperties.getLibraryVersion(OrgPolicyStubSettings.class))
.setTransportToken(
GaxHttpJsonProperties.getHttpJsonTokenName(),
GaxHttpJsonProperties.getHttpJsonVersion());
}
public static ApiClientHeaderProvider.Builder defaultApiClientHeaderProviderBuilder() {
return OrgPolicyStubSettings.defaultGrpcApiClientHeaderProviderBuilder();
}
/** Returns a new gRPC builder for this class. */
public static Builder newBuilder() {
return Builder.createDefault();
}
/** Returns a new REST builder for this class. */
public static Builder newHttpJsonBuilder() {
return Builder.createHttpJsonDefault();
}
/** Returns a new builder for this class. */
public static Builder newBuilder(ClientContext clientContext) {
return new Builder(clientContext);
}
/** Returns a builder containing all the values of this settings class. */
public Builder toBuilder() {
return new Builder(this);
}
protected OrgPolicyStubSettings(Builder settingsBuilder) throws IOException {
super(settingsBuilder);
listConstraintsSettings = settingsBuilder.listConstraintsSettings().build();
listPoliciesSettings = settingsBuilder.listPoliciesSettings().build();
getPolicySettings = settingsBuilder.getPolicySettings().build();
getEffectivePolicySettings = settingsBuilder.getEffectivePolicySettings().build();
createPolicySettings = settingsBuilder.createPolicySettings().build();
updatePolicySettings = settingsBuilder.updatePolicySettings().build();
deletePolicySettings = settingsBuilder.deletePolicySettings().build();
createCustomConstraintSettings = settingsBuilder.createCustomConstraintSettings().build();
updateCustomConstraintSettings = settingsBuilder.updateCustomConstraintSettings().build();
getCustomConstraintSettings = settingsBuilder.getCustomConstraintSettings().build();
listCustomConstraintsSettings = settingsBuilder.listCustomConstraintsSettings().build();
deleteCustomConstraintSettings = settingsBuilder.deleteCustomConstraintSettings().build();
}
/** Builder for OrgPolicyStubSettings. */
public static class Builder extends StubSettings.Builder<OrgPolicyStubSettings, Builder> {
private final ImmutableList<UnaryCallSettings.Builder<?, ?>> unaryMethodSettingsBuilders;
private final PagedCallSettings.Builder<
ListConstraintsRequest, ListConstraintsResponse, ListConstraintsPagedResponse>
listConstraintsSettings;
private final PagedCallSettings.Builder<
ListPoliciesRequest, ListPoliciesResponse, ListPoliciesPagedResponse>
listPoliciesSettings;
private final UnaryCallSettings.Builder<GetPolicyRequest, Policy> getPolicySettings;
private final UnaryCallSettings.Builder<GetEffectivePolicyRequest, Policy>
getEffectivePolicySettings;
private final UnaryCallSettings.Builder<CreatePolicyRequest, Policy> createPolicySettings;
private final UnaryCallSettings.Builder<UpdatePolicyRequest, Policy> updatePolicySettings;
private final UnaryCallSettings.Builder<DeletePolicyRequest, Empty> deletePolicySettings;
private final UnaryCallSettings.Builder<CreateCustomConstraintRequest, CustomConstraint>
createCustomConstraintSettings;
private final UnaryCallSettings.Builder<UpdateCustomConstraintRequest, CustomConstraint>
updateCustomConstraintSettings;
private final UnaryCallSettings.Builder<GetCustomConstraintRequest, CustomConstraint>
getCustomConstraintSettings;
private final PagedCallSettings.Builder<
ListCustomConstraintsRequest,
ListCustomConstraintsResponse,
ListCustomConstraintsPagedResponse>
listCustomConstraintsSettings;
private final UnaryCallSettings.Builder<DeleteCustomConstraintRequest, Empty>
deleteCustomConstraintSettings;
private static final ImmutableMap<String, ImmutableSet<StatusCode.Code>>
RETRYABLE_CODE_DEFINITIONS;
static {
ImmutableMap.Builder<String, ImmutableSet<StatusCode.Code>> definitions =
ImmutableMap.builder();
definitions.put(
"retry_policy_0_codes",
ImmutableSet.copyOf(
Lists.<StatusCode.Code>newArrayList(
StatusCode.Code.UNAVAILABLE, StatusCode.Code.DEADLINE_EXCEEDED)));
RETRYABLE_CODE_DEFINITIONS = definitions.build();
}
private static final ImmutableMap<String, RetrySettings> RETRY_PARAM_DEFINITIONS;
static {
ImmutableMap.Builder<String, RetrySettings> definitions = ImmutableMap.builder();
RetrySettings settings = null;
settings =
RetrySettings.newBuilder()
.setInitialRetryDelayDuration(Duration.ofMillis(1000L))
.setRetryDelayMultiplier(1.3)
.setMaxRetryDelayDuration(Duration.ofMillis(10000L))
.setInitialRpcTimeoutDuration(Duration.ofMillis(60000L))
.setRpcTimeoutMultiplier(1.0)
.setMaxRpcTimeoutDuration(Duration.ofMillis(60000L))
.setTotalTimeoutDuration(Duration.ofMillis(60000L))
.build();
definitions.put("retry_policy_0_params", settings);
RETRY_PARAM_DEFINITIONS = definitions.build();
}
protected Builder() {
this(((ClientContext) null));
}
protected Builder(ClientContext clientContext) {
super(clientContext);
listConstraintsSettings = PagedCallSettings.newBuilder(LIST_CONSTRAINTS_PAGE_STR_FACT);
listPoliciesSettings = PagedCallSettings.newBuilder(LIST_POLICIES_PAGE_STR_FACT);
getPolicySettings = UnaryCallSettings.newUnaryCallSettingsBuilder();
getEffectivePolicySettings = UnaryCallSettings.newUnaryCallSettingsBuilder();
createPolicySettings = UnaryCallSettings.newUnaryCallSettingsBuilder();
updatePolicySettings = UnaryCallSettings.newUnaryCallSettingsBuilder();
deletePolicySettings = UnaryCallSettings.newUnaryCallSettingsBuilder();
createCustomConstraintSettings = UnaryCallSettings.newUnaryCallSettingsBuilder();
updateCustomConstraintSettings = UnaryCallSettings.newUnaryCallSettingsBuilder();
getCustomConstraintSettings = UnaryCallSettings.newUnaryCallSettingsBuilder();
listCustomConstraintsSettings =
PagedCallSettings.newBuilder(LIST_CUSTOM_CONSTRAINTS_PAGE_STR_FACT);
deleteCustomConstraintSettings = UnaryCallSettings.newUnaryCallSettingsBuilder();
unaryMethodSettingsBuilders =
ImmutableList.<UnaryCallSettings.Builder<?, ?>>of(
listConstraintsSettings,
listPoliciesSettings,
getPolicySettings,
getEffectivePolicySettings,
createPolicySettings,
updatePolicySettings,
deletePolicySettings,
createCustomConstraintSettings,
updateCustomConstraintSettings,
getCustomConstraintSettings,
listCustomConstraintsSettings,
deleteCustomConstraintSettings);
initDefaults(this);
}
protected Builder(OrgPolicyStubSettings settings) {
super(settings);
listConstraintsSettings = settings.listConstraintsSettings.toBuilder();
listPoliciesSettings = settings.listPoliciesSettings.toBuilder();
getPolicySettings = settings.getPolicySettings.toBuilder();
getEffectivePolicySettings = settings.getEffectivePolicySettings.toBuilder();
createPolicySettings = settings.createPolicySettings.toBuilder();
updatePolicySettings = settings.updatePolicySettings.toBuilder();
deletePolicySettings = settings.deletePolicySettings.toBuilder();
createCustomConstraintSettings = settings.createCustomConstraintSettings.toBuilder();
updateCustomConstraintSettings = settings.updateCustomConstraintSettings.toBuilder();
getCustomConstraintSettings = settings.getCustomConstraintSettings.toBuilder();
listCustomConstraintsSettings = settings.listCustomConstraintsSettings.toBuilder();
deleteCustomConstraintSettings = settings.deleteCustomConstraintSettings.toBuilder();
unaryMethodSettingsBuilders =
ImmutableList.<UnaryCallSettings.Builder<?, ?>>of(
listConstraintsSettings,
listPoliciesSettings,
getPolicySettings,
getEffectivePolicySettings,
createPolicySettings,
updatePolicySettings,
deletePolicySettings,
createCustomConstraintSettings,
updateCustomConstraintSettings,
getCustomConstraintSettings,
listCustomConstraintsSettings,
deleteCustomConstraintSettings);
}
private static Builder createDefault() {
Builder builder = new Builder(((ClientContext) null));
builder.setTransportChannelProvider(defaultTransportChannelProvider());
builder.setCredentialsProvider(defaultCredentialsProviderBuilder().build());
builder.setInternalHeaderProvider(defaultApiClientHeaderProviderBuilder().build());
builder.setMtlsEndpoint(getDefaultMtlsEndpoint());
builder.setSwitchToMtlsEndpointAllowed(true);
return initDefaults(builder);
}
private static Builder createHttpJsonDefault() {
Builder builder = new Builder(((ClientContext) null));
builder.setTransportChannelProvider(defaultHttpJsonTransportProviderBuilder().build());
builder.setCredentialsProvider(defaultCredentialsProviderBuilder().build());
builder.setInternalHeaderProvider(defaultHttpJsonApiClientHeaderProviderBuilder().build());
builder.setMtlsEndpoint(getDefaultMtlsEndpoint());
builder.setSwitchToMtlsEndpointAllowed(true);
return initDefaults(builder);
}
private static Builder initDefaults(Builder builder) {
builder
.listConstraintsSettings()
.setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_0_codes"))
.setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_0_params"));
builder
.listPoliciesSettings()
.setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_0_codes"))
.setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_0_params"));
builder
.getPolicySettings()
.setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_0_codes"))
.setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_0_params"));
builder
.getEffectivePolicySettings()
.setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_0_codes"))
.setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_0_params"));
builder
.createPolicySettings()
.setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_0_codes"))
.setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_0_params"));
builder
.updatePolicySettings()
.setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_0_codes"))
.setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_0_params"));
builder
.deletePolicySettings()
.setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_0_codes"))
.setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_0_params"));
builder
.createCustomConstraintSettings()
.setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_0_codes"))
.setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_0_params"));
builder
.updateCustomConstraintSettings()
.setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_0_codes"))
.setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_0_params"));
builder
.getCustomConstraintSettings()
.setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_0_codes"))
.setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_0_params"));
builder
.listCustomConstraintsSettings()
.setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_0_codes"))
.setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_0_params"));
builder
.deleteCustomConstraintSettings()
.setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_0_codes"))
.setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_0_params"));
return builder;
}
/**
* Applies the given settings updater function to all of the unary API methods in this service.
*
* <p>Note: This method does not support applying settings to streaming methods.
*/
public Builder applyToAllUnaryMethods(
ApiFunction<UnaryCallSettings.Builder<?, ?>, Void> settingsUpdater) {
super.applyToAllUnaryMethods(unaryMethodSettingsBuilders, settingsUpdater);
return this;
}
public ImmutableList<UnaryCallSettings.Builder<?, ?>> unaryMethodSettingsBuilders() {
return unaryMethodSettingsBuilders;
}
/** Returns the builder for the settings used for calls to listConstraints. */
public PagedCallSettings.Builder<
ListConstraintsRequest, ListConstraintsResponse, ListConstraintsPagedResponse>
listConstraintsSettings() {
return listConstraintsSettings;
}
/** Returns the builder for the settings used for calls to listPolicies. */
public PagedCallSettings.Builder<
ListPoliciesRequest, ListPoliciesResponse, ListPoliciesPagedResponse>
listPoliciesSettings() {
return listPoliciesSettings;
}
/** Returns the builder for the settings used for calls to getPolicy. */
public UnaryCallSettings.Builder<GetPolicyRequest, Policy> getPolicySettings() {
return getPolicySettings;
}
/** Returns the builder for the settings used for calls to getEffectivePolicy. */
public UnaryCallSettings.Builder<GetEffectivePolicyRequest, Policy>
getEffectivePolicySettings() {
return getEffectivePolicySettings;
}
/** Returns the builder for the settings used for calls to createPolicy. */
public UnaryCallSettings.Builder<CreatePolicyRequest, Policy> createPolicySettings() {
return createPolicySettings;
}
/** Returns the builder for the settings used for calls to updatePolicy. */
public UnaryCallSettings.Builder<UpdatePolicyRequest, Policy> updatePolicySettings() {
return updatePolicySettings;
}
/** Returns the builder for the settings used for calls to deletePolicy. */
public UnaryCallSettings.Builder<DeletePolicyRequest, Empty> deletePolicySettings() {
return deletePolicySettings;
}
/** Returns the builder for the settings used for calls to createCustomConstraint. */
public UnaryCallSettings.Builder<CreateCustomConstraintRequest, CustomConstraint>
createCustomConstraintSettings() {
return createCustomConstraintSettings;
}
/** Returns the builder for the settings used for calls to updateCustomConstraint. */
public UnaryCallSettings.Builder<UpdateCustomConstraintRequest, CustomConstraint>
updateCustomConstraintSettings() {
return updateCustomConstraintSettings;
}
/** Returns the builder for the settings used for calls to getCustomConstraint. */
public UnaryCallSettings.Builder<GetCustomConstraintRequest, CustomConstraint>
getCustomConstraintSettings() {
return getCustomConstraintSettings;
}
/** Returns the builder for the settings used for calls to listCustomConstraints. */
public PagedCallSettings.Builder<
ListCustomConstraintsRequest,
ListCustomConstraintsResponse,
ListCustomConstraintsPagedResponse>
listCustomConstraintsSettings() {
return listCustomConstraintsSettings;
}
/** Returns the builder for the settings used for calls to deleteCustomConstraint. */
public UnaryCallSettings.Builder<DeleteCustomConstraintRequest, Empty>
deleteCustomConstraintSettings() {
return deleteCustomConstraintSettings;
}
@Override
public OrgPolicyStubSettings build() throws IOException {
return new OrgPolicyStubSettings(this);
}
}
}
|
apache/hive | 37,050 | ql/src/test/org/apache/hadoop/hive/ql/metadata/TestHiveMetaStoreChecker.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hive.ql.metadata;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hive.conf.HiveConf;
import org.apache.hadoop.hive.conf.HiveConfForTest;
import org.apache.hadoop.hive.metastore.CheckResult;
import org.apache.hadoop.hive.metastore.HiveMetaStoreChecker;
import org.apache.hadoop.hive.metastore.HiveMetaStoreClient;
import org.apache.hadoop.hive.metastore.IMetaStoreClient;
import org.apache.hadoop.hive.metastore.api.AlreadyExistsException;
import org.apache.hadoop.hive.metastore.api.Database;
import org.apache.hadoop.hive.metastore.api.FieldSchema;
import org.apache.hadoop.hive.metastore.api.MetaException;
import org.apache.hadoop.hive.metastore.api.MetastoreException;
import org.apache.hadoop.hive.metastore.conf.MetastoreConf;
import org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat;
import org.apache.hadoop.hive.ql.io.orc.OrcInputFormat;
import org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat;
import org.apache.hadoop.hive.ql.session.SessionState;
import org.apache.hadoop.hive.serde.serdeConstants;
import org.apache.hadoop.mapred.TextInputFormat;
import org.apache.thrift.TException;
import org.junit.After;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
import com.google.common.collect.Lists;
/**
* TestHiveMetaStoreChecker.
*
*/
public class TestHiveMetaStoreChecker {
private Hive hive;
private IMetaStoreClient msc;
private FileSystem fs;
private HiveMetaStoreChecker checker = null;
private final String catName = "hive";
private final String dbName = "testhivemetastorechecker_db";
private final String tableName = "testhivemetastorechecker_table";
private final String partDateName = "partdate";
private final String partCityName = "partcity";
private List<FieldSchema> partCols;
private List<Map<String, String>> parts;
@Before
public void setUp() throws Exception {
hive = Hive.get();
HiveConf conf = new HiveConfForTest(hive.getConf(), getClass());
conf.set(MetastoreConf.ConfVars.FS_HANDLER_THREADS_COUNT.getVarname(), "15");
conf.set(MetastoreConf.ConfVars.MSCK_PATH_VALIDATION.getVarname(), "throw");
msc = new HiveMetaStoreClient(conf);
checker = new HiveMetaStoreChecker(msc, conf);
conf.setVar(HiveConf.ConfVars.HIVE_AUTHORIZATION_MANAGER,
"org.apache.hadoop.hive.ql.security.authorization.plugin.sqlstd.SQLStdHiveAuthorizerFactory");
HiveConf.setBoolVar(conf, HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY, false);
SessionState ss = SessionState.start(conf);
ss.initTxnMgr(conf);
partCols = new ArrayList<>();
partCols.add(new FieldSchema(partDateName, serdeConstants.STRING_TYPE_NAME, ""));
partCols.add(new FieldSchema(partCityName, serdeConstants.STRING_TYPE_NAME, ""));
parts = new ArrayList<>();
Map<String, String> part1 = new HashMap<>();
part1.put(partDateName, "2008-01-01");
part1.put(partCityName, "london");
parts.add(part1);
Map<String, String> part2 = new HashMap<>();
part2.put(partDateName, "2008-01-02");
part2.put(partCityName, "stockholm");
parts.add(part2);
//cleanup just in case something is left over from previous run
dropDbTable();
}
private void dropDbTable() {
// cleanup
try {
msc.dropTable(catName, dbName, tableName, true, true);
msc.dropDatabase(catName, dbName, true, true, true);
} catch (TException e) {
// ignore
}
}
@After
public void tearDown() throws Exception {
dropDbTable();
Hive.closeCurrent();
}
@Test
public void testTableCheck() throws HiveException, IOException, TException, MetastoreException,MetaException {
CheckResult result = checker.checkMetastore(catName, dbName, null, null, null);
// we haven't added anything so should return an all ok
assertEquals(Collections.<String>emptySet(), result.getTablesNotInMs());
assertEquals(Collections.<String>emptySet(), result.getTablesNotOnFs());
assertEquals(Collections.<CheckResult.PartitionResult>emptySet(), result.getPartitionsNotOnFs());
assertEquals(Collections.<CheckResult.PartitionResult>emptySet(), result.getPartitionsNotInMs());
// check table only, should not exist in ms
result = checker.checkMetastore(catName, dbName, tableName, null, null);
assertEquals(1, result.getTablesNotInMs().size());
assertEquals(tableName, result.getTablesNotInMs().iterator().next());
assertEquals(Collections.<String>emptySet(), result.getTablesNotOnFs());
assertEquals(Collections.<CheckResult.PartitionResult>emptySet(), result.getPartitionsNotOnFs());
assertEquals(Collections.<CheckResult.PartitionResult>emptySet(), result.getPartitionsNotInMs());
Database db = new Database();
db.setCatalogName(catName);
db.setName(dbName);
msc.createDatabase(db);
Table table = new Table(dbName, tableName);
table.setDbName(dbName);
table.setInputFormatClass(TextInputFormat.class);
table.setOutputFormatClass(HiveIgnoreKeyTextOutputFormat.class);
hive.createTable(table);
Assert.assertTrue(table.getTTable().isSetId());
table.getTTable().unsetId();
// now we've got a table, check that it works
// first check all (1) tables
result = checker.checkMetastore(catName, dbName, null, null, null);
assertEquals(Collections.<String>emptySet(), result.getTablesNotInMs());
assertEquals(Collections.<String>emptySet(), result.getTablesNotOnFs());
assertEquals(Collections.<CheckResult.PartitionResult>emptySet(), result.getPartitionsNotOnFs());
assertEquals(Collections.<CheckResult.PartitionResult>emptySet(), result.getPartitionsNotInMs());
// then let's check the one we know about
result = checker.checkMetastore(catName, dbName, tableName, null, null);
assertEquals(Collections.<String>emptySet(), result.getTablesNotInMs());
assertEquals(Collections.<String>emptySet(), result.getTablesNotOnFs());
assertEquals(Collections.<CheckResult.PartitionResult>emptySet(), result.getPartitionsNotOnFs());
assertEquals(Collections.<CheckResult.PartitionResult>emptySet(), result.getPartitionsNotInMs());
// remove the table folder
fs = table.getPath().getFileSystem(hive.getConf());
fs.delete(table.getPath(), true);
// now this shouldn't find the path on the fs
result = checker.checkMetastore(catName, dbName, tableName, null, null);
assertEquals(Collections.<String>emptySet(), result.getTablesNotInMs());
assertEquals(1, result.getTablesNotOnFs().size());
assertEquals(tableName, result.getTablesNotOnFs().iterator().next());
assertEquals(Collections.<CheckResult.PartitionResult>emptySet(), result.getPartitionsNotOnFs());
assertEquals(Collections.<CheckResult.PartitionResult>emptySet(), result.getPartitionsNotInMs());
// put it back and one additional table
fs.mkdirs(table.getPath());
Path fakeTable = table.getPath().getParent().suffix(
Path.SEPARATOR + "faketable");
fs.mkdirs(fakeTable);
fs.deleteOnExit(fakeTable);
// find the extra table
result = checker.checkMetastore(catName, dbName, null, null, null);
assertEquals(1, result.getTablesNotInMs().size());
assertEquals(fakeTable.getName(), Lists.newArrayList(result.getTablesNotInMs()).get(0));
assertEquals(Collections.<String>emptySet(), result.getTablesNotOnFs());
assertEquals(Collections.<CheckResult.PartitionResult>emptySet(), result.getPartitionsNotOnFs());
assertEquals(Collections.<CheckResult.PartitionResult>emptySet(), result.getPartitionsNotInMs());
// create a new external table
hive.dropTable(dbName, tableName);
table.setProperty("EXTERNAL", "TRUE");
hive.createTable(table);
// should return all ok
result = checker.checkMetastore(catName, dbName, null, null, null);
assertEquals(Collections.<String>emptySet(), result.getTablesNotInMs());
assertEquals(Collections.<String>emptySet(), result.getTablesNotOnFs());
assertEquals(Collections.<CheckResult.PartitionResult>emptySet(), result.getPartitionsNotOnFs());
assertEquals(Collections.<CheckResult.PartitionResult>emptySet(), result.getPartitionsNotInMs());
}
/*
* Tests the case when tblPath/p1=a/p2=b/p3=c/file for a table with partition (p1, p2)
* does not throw HiveException
*/
@Test
public void testAdditionalPartitionDirs()
throws HiveException, AlreadyExistsException, IOException, MetastoreException {
Table table = createTestTable(false);
List<Partition> partitions = hive.getPartitions(table);
assertEquals(2, partitions.size());
// add a fake partition dir on fs
fs = partitions.get(0).getDataLocation().getFileSystem(hive.getConf());
addFolderToPath(fs, table.getDataLocation().toString(),
partDateName + "=2017-01-01/" + partCityName + "=paloalto/fakePartCol=fakepartValue");
CheckResult result = checker.checkMetastore(catName, dbName, tableName, null, null);
assertEquals(Collections.<String> emptySet(), result.getTablesNotInMs());
assertEquals(Collections.<String> emptySet(), result.getTablesNotOnFs());
assertEquals(Collections.<CheckResult.PartitionResult> emptySet(), result.getPartitionsNotOnFs());
//fakePart path partition is added since the defined partition keys are valid
assertEquals(1, result.getPartitionsNotInMs().size());
}
@Test(expected = MetastoreException.class)
public void testInvalidPartitionKeyName()
throws HiveException, AlreadyExistsException, IOException, MetastoreException {
Table table = createTestTable(false);
List<Partition> partitions = hive.getPartitions(table);
assertEquals(2, partitions.size());
// add a fake partition dir on fs
fs = partitions.get(0).getDataLocation().getFileSystem(hive.getConf());
addFolderToPath(fs, table.getDataLocation().toString(),"fakedate=2009-01-01/fakecity=sanjose");
checker.checkMetastore(catName, dbName, tableName, null, null);
}
/*
* skip mode should not throw exception when a invalid partition directory
* is found. It should just ignore it
*/
@Test
public void testSkipInvalidPartitionKeyName()
throws HiveException, AlreadyExistsException, IOException, MetastoreException {
hive.getConf().set(MetastoreConf.ConfVars.MSCK_PATH_VALIDATION.getVarname(), "skip");
checker = new HiveMetaStoreChecker(msc, hive.getConf());
Table table = createTestTable(false);
List<Partition> partitions = hive.getPartitions(table);
assertEquals(2, partitions.size());
// add a fake partition dir on fs
fs = partitions.get(0).getDataLocation().getFileSystem(hive.getConf());
addFolderToPath(fs, table.getDataLocation().toString(),"fakedate=2009-01-01/fakecity=sanjose");
createPartitionsDirectoriesOnFS(table, 2);
CheckResult result = checker.checkMetastore(catName, dbName, tableName, null, null);
assertEquals(Collections.<String> emptySet(), result.getTablesNotInMs());
assertEquals(Collections.<String> emptySet(), result.getTablesNotOnFs());
assertEquals(Collections.<CheckResult.PartitionResult> emptySet(), result.getPartitionsNotOnFs());
// only 2 valid partitions should be added
assertEquals(2, result.getPartitionsNotInMs().size());
}
/*
* Tests the case when we have normal delta_dirs in the partition folder
* does not throw HiveException
*/
@Test
public void testAddPartitionNormalDeltas() throws Exception {
Table table = createTestTable(true);
List<Partition> partitions = hive.getPartitions(table);
assertEquals(2, partitions.size());
// add a partition dir on fs
fs = partitions.get(0).getDataLocation().getFileSystem(hive.getConf());
Path newPart = addFolderToPath(fs, table.getDataLocation().toString(),
partDateName + "=2017-01-01/" + partCityName + "=paloalto");
// Add a few deltas
addFolderToPath(fs, newPart.toString(), "delta_0000001_0000001_0000");
addFolderToPath(fs, newPart.toString(), "delta_0000010_0000010_0000");
addFolderToPath(fs, newPart.toString(), "delta_0000101_0000101_0000");
CheckResult result = checker.checkMetastore(catName, dbName, tableName, null, null);
assertEquals(Collections.<CheckResult.PartitionResult> emptySet(), result.getPartitionsNotOnFs());
assertEquals(1, result.getPartitionsNotInMs().size());
// Found the highest writeId
assertEquals(101, result.getPartitionsNotInMs().iterator().next().getMaxWriteId());
assertEquals(0, result.getPartitionsNotInMs().iterator().next().getMaxTxnId());
}
/*
* Tests the case when we have normal delta_dirs in the partition folder
* does not throw HiveException
*/
@Test
public void testAddPartitionCompactedDeltas() throws Exception {
Table table = createTestTable(true);
List<Partition> partitions = hive.getPartitions(table);
assertEquals(2, partitions.size());
// add a partition dir on fs
fs = partitions.get(0).getDataLocation().getFileSystem(hive.getConf());
Path newPart = addFolderToPath(fs, table.getDataLocation().toString(),
partDateName + "=2017-01-01/" + partCityName + "=paloalto");
// Add a few deltas
addFolderToPath(fs, newPart.toString(), "delta_0000001_0000001_0000");
addFolderToPath(fs, newPart.toString(), "delta_0000010_0000015_v0000067");
addFolderToPath(fs, newPart.toString(), "delta_0000101_0000120_v0000087");
CheckResult result = checker.checkMetastore(catName, dbName, tableName, null, null);
assertEquals(Collections.<CheckResult.PartitionResult> emptySet(), result.getPartitionsNotOnFs());
assertEquals(1, result.getPartitionsNotInMs().size());
// Found the highest writeId
assertEquals(120, result.getPartitionsNotInMs().iterator().next().getMaxWriteId());
assertEquals(87, result.getPartitionsNotInMs().iterator().next().getMaxTxnId());
}
@Test
public void testAddPartitionCompactedBase() throws Exception {
Table table = createTestTable(true);
List<Partition> partitions = hive.getPartitions(table);
assertEquals(2, partitions.size());
// add a partition dir on fs
fs = partitions.get(0).getDataLocation().getFileSystem(hive.getConf());
Path newPart = addFolderToPath(fs, table.getDataLocation().toString(),
partDateName + "=2017-01-01/" + partCityName + "=paloalto");
// Add a few deltas
addFolderToPath(fs, newPart.toString(), "delta_0000001_0000001_0000");
addFolderToPath(fs, newPart.toString(), "delta_0000002_0000002_0000");
addFolderToPath(fs, newPart.toString(), "delta_0000003_0000003_0000");
addFolderToPath(fs, newPart.toString(), "base_0000003_v0000200");
CheckResult result = checker.checkMetastore(catName, dbName, tableName, null, null);
assertEquals(Collections.<CheckResult.PartitionResult> emptySet(), result.getPartitionsNotOnFs());
assertEquals(1, result.getPartitionsNotInMs().size());
// Found the highest writeId
assertEquals(3, result.getPartitionsNotInMs().iterator().next().getMaxWriteId());
assertEquals(200, result.getPartitionsNotInMs().iterator().next().getMaxTxnId());
}
@Test
public void testAddPartitionMMBase() throws Exception {
Table table = createTestTable(true);
List<Partition> partitions = hive.getPartitions(table);
assertEquals(2, partitions.size());
// add a partition dir on fs
fs = partitions.get(0).getDataLocation().getFileSystem(hive.getConf());
Path newPart = addFolderToPath(fs, table.getDataLocation().toString(),
partDateName + "=2017-01-01/" + partCityName + "=paloalto");
// Add a few deltas
addFolderToPath(fs, newPart.toString(), "delta_0000001_0000001_0000");
addFolderToPath(fs, newPart.toString(), "delta_0000002_0000002_0000");
addFolderToPath(fs, newPart.toString(), "delta_0000003_0000003_0000");
addFolderToPath(fs, newPart.toString(), "base_0000004");
CheckResult result = checker.checkMetastore(catName, dbName, tableName, null, null);
assertEquals(Collections.<CheckResult.PartitionResult> emptySet(), result.getPartitionsNotOnFs());
assertEquals(1, result.getPartitionsNotInMs().size());
// Found the highest writeId
assertEquals(4, result.getPartitionsNotInMs().iterator().next().getMaxWriteId());
assertEquals(0, result.getPartitionsNotInMs().iterator().next().getMaxTxnId());
}
@Test
public void testNoNPartitionedTable() throws Exception {
Table table = createNonPartitionedTable();
// add a partition dir on fs
fs = table.getDataLocation().getFileSystem(hive.getConf());
Path tablePath = table.getDataLocation();
// Add a few deltas
addFolderToPath(fs, tablePath.toString(), "delta_0000001_0000001_0000");
addFolderToPath(fs, tablePath.toString(), "delta_0000002_0000002_0000");
addFolderToPath(fs, tablePath.toString(), "delta_0000003_0000003_0000");
addFolderToPath(fs, tablePath.toString(), "base_0000003_v0000200");
CheckResult result = checker.checkMetastore(catName, dbName, tableName, null, null);
assertEquals(Collections.<CheckResult.PartitionResult> emptySet(), result.getPartitionsNotOnFs());
assertEquals(Collections.<CheckResult.PartitionResult> emptySet(), result.getPartitionsNotInMs());
// Found the highest writeId
assertEquals(3, result.getMaxWriteId());
assertEquals(200, result.getMaxTxnId());
}
@Test
public void testPartitionsCheck() throws HiveException,
IOException, TException, MetastoreException {
Table table = createTestTable(false);
CheckResult result = checker.checkMetastore(catName, dbName, tableName, null, null);
// all is well
assertEquals(Collections.<String>emptySet(), result.getTablesNotInMs());
assertEquals(Collections.<String>emptySet(), result.getTablesNotOnFs());
assertEquals(Collections.<CheckResult.PartitionResult>emptySet(), result.getPartitionsNotOnFs());
assertEquals(Collections.<CheckResult.PartitionResult>emptySet(), result.getPartitionsNotInMs());
List<Partition> partitions = hive.getPartitions(table);
assertEquals(2, partitions.size());
Partition partToRemove = partitions.get(0);
// As this partition (partdate=2008-01-01/partcity=london) is the only
// partition under (partdate=2008-01-01)
// we also need to delete partdate=2008-01-01 to make it consistent.
Path partToRemovePath = partToRemove.getDataLocation().getParent();
fs = partToRemovePath.getFileSystem(hive.getConf());
fs.delete(partToRemovePath, true);
result = checker.checkMetastore(catName, dbName, tableName, null, null);
// missing one partition on fs
assertEquals(Collections.<String>emptySet(), result.getTablesNotInMs());
assertEquals(Collections.<String>emptySet(), result.getTablesNotOnFs());
assertEquals(1, result.getPartitionsNotOnFs().size());
assertEquals(partToRemove.getName(), result.getPartitionsNotOnFs().iterator().next()
.getPartitionName());
assertEquals(partToRemove.getTable().getTableName(),
result.getPartitionsNotOnFs().iterator().next().getTableName());
assertEquals(Collections.<CheckResult.PartitionResult>emptySet(), result.getPartitionsNotInMs());
// old test is moved to msck_repair_2.q
// cleanup
hive.dropTable(dbName, tableName, true, true);
hive.createTable(table);
result = checker.checkMetastore(catName, dbName, null, null, null);
assertEquals(Collections.<String>emptySet(), result.getTablesNotInMs());
assertEquals(Collections.<String>emptySet(), result.getTablesNotOnFs());
assertEquals(Collections.<CheckResult.PartitionResult>emptySet(), result.getPartitionsNotOnFs());
assertEquals(Collections.<CheckResult.PartitionResult>emptySet(), result.getPartitionsNotInMs()); //--0e
System.err.println("Test completed - partition check");
}
@Test
public void testDataDeletion() throws HiveException,
IOException, TException {
Database db = new Database();
db.setName(dbName);
hive.createDatabase(db);
Table table = new Table(dbName, tableName);
table.setDbName(dbName);
table.setInputFormatClass(TextInputFormat.class);
table.setOutputFormatClass(HiveIgnoreKeyTextOutputFormat.class);
table.setPartCols(partCols);
hive.createTable(table);
table = hive.getTable(dbName, tableName);
Path fakeTable = table.getPath().getParent().suffix(
Path.SEPARATOR + "faketable");
fs = fakeTable.getFileSystem(hive.getConf());
fs.mkdirs(fakeTable);
fs.deleteOnExit(fakeTable);
Path fakePart = new Path(table.getDataLocation().toString(),
"fakepartition=fakevalue");
fs.mkdirs(fakePart);
fs.deleteOnExit(fakePart);
hive.dropTable(dbName, tableName, true, true);
assertFalse(fs.exists(fakePart));
hive.dropDatabase(dbName);
assertFalse(fs.exists(fakeTable));
}
/**
* Test multi-threaded implementation of checker to find out missing partitions.
* @throws Exception ex
*/
@Test
public void testPartitionsNotInMs() throws Exception {
Table testTable = createPartitionedTestTable(dbName, tableName, 2, 0);
// add 10 partitions on the filesystem
createPartitionsDirectoriesOnFS(testTable, 10);
CheckResult result = checker.checkMetastore(catName, dbName, tableName, null, null);
assertEquals(Collections.<String>emptySet(), result.getTablesNotInMs());
assertEquals(Collections.<String>emptySet(), result.getTablesNotOnFs());
assertEquals(Collections.<CheckResult.PartitionResult>emptySet(), result.getPartitionsNotOnFs());
assertEquals(10, result.getPartitionsNotInMs().size());
}
/**
* Tests single threaded implementation of checkMetastore.
* @throws Exception ex
*/
@Test
public void testSingleThreadedCheckMetastore() throws Exception {
// set num of threads to 0 so that single-threaded checkMetastore is called
hive.getConf().set(MetastoreConf.ConfVars.FS_HANDLER_THREADS_COUNT.getVarname(), "0");
Table testTable = createPartitionedTestTable(dbName, tableName, 2, 0);
// add 10 partitions on the filesystem
createPartitionsDirectoriesOnFS(testTable, 10);
CheckResult result = checker.checkMetastore(catName, dbName, tableName, null, null);
assertEquals(Collections.<String> emptySet(), result.getTablesNotInMs());
assertEquals(Collections.<String> emptySet(), result.getTablesNotOnFs());
assertEquals(Collections.<CheckResult.PartitionResult> emptySet(), result.getPartitionsNotOnFs());
assertEquals(10, result.getPartitionsNotInMs().size());
}
/**
* Tests single threaded implementation for deeply nested partitioned tables
*
* @throws Exception ex
*/
@Test
public void testSingleThreadedDeeplyNestedTables() throws Exception {
// set num of threads to 0 so that single-threaded checkMetastore is called
hive.getConf().set(MetastoreConf.ConfVars.FS_HANDLER_THREADS_COUNT.getVarname(), "0");
int poolSize = 2;
// create a deeply nested table which has more partition keys than the pool size
Table testTable = createPartitionedTestTable(dbName, tableName, poolSize + 2, 0);
// add 10 partitions on the filesystem
createPartitionsDirectoriesOnFS(testTable, 10);
CheckResult result = checker.checkMetastore(catName, dbName, tableName, null, null);
assertEquals(Collections.<String> emptySet(), result.getTablesNotInMs());
assertEquals(Collections.<String> emptySet(), result.getTablesNotOnFs());
assertEquals(Collections.<CheckResult.PartitionResult> emptySet(), result.getPartitionsNotOnFs());
assertEquals(10, result.getPartitionsNotInMs().size());
}
/**
* Tests the case when the number of partition keys are more than the threadpool size.
*
* @throws Exception ex
*/
@Test
public void testDeeplyNestedPartitionedTables() throws Exception {
hive.getConf().set(MetastoreConf.ConfVars.FS_HANDLER_THREADS_COUNT.getVarname(), "2");
int poolSize = 2;
// create a deeply nested table which has more partition keys than the pool size
Table testTable = createPartitionedTestTable(dbName, tableName, poolSize + 2, 0);
// add 10 partitions on the filesystem
createPartitionsDirectoriesOnFS(testTable, 10);
CheckResult result = checker.checkMetastore(catName, dbName, tableName, null, null);
assertEquals(Collections.<String> emptySet(), result.getTablesNotInMs());
assertEquals(Collections.<String> emptySet(), result.getTablesNotOnFs());
assertEquals(Collections.<CheckResult.PartitionResult> emptySet(), result.getPartitionsNotOnFs());
assertEquals(10, result.getPartitionsNotInMs().size());
}
/**
* Test if checker throws HiveException when the there is a dummy directory present in the nested level
* of sub-directories
* @throws Exception ex
*/
@Test
public void testErrorForMissingPartitionColumn() throws Exception {
Table testTable = createPartitionedTestTable(dbName, tableName, 2, 0);
// add 10 partitions on the filesystem
createPartitionsDirectoriesOnFS(testTable, 10);
//create a fake directory to throw exception
StringBuilder sb = new StringBuilder(testTable.getDataLocation().toString());
sb.append(Path.SEPARATOR);
sb.append("dummyPart=error");
createDirectory(sb.toString());
//check result now
Exception exception = null;
try {
checker.checkMetastore(catName, dbName, tableName, null, null);
} catch (Exception e) {
exception = e;
}
assertTrue("Expected MetastoreException", exception instanceof MetastoreException);
createFile(sb.toString(), "dummyFile");
exception = null;
try {
checker.checkMetastore(catName, dbName, tableName, null, null);
} catch (Exception e) {
exception = e;
}
assertTrue("Expected MetastoreException", exception instanceof MetastoreException);
}
/**
* Tests if there exists a unknown partition directory on the FS with in-valid order of partition
* keys than what is specified in table specification.
*
* @throws Exception ex
*/
@Test(expected = MetastoreException.class)
public void testInvalidOrderForPartitionKeysOnFS() throws Exception {
Table testTable = createPartitionedTestTable(dbName, tableName, 2, 0);
// add 10 partitions on the filesystem
createInvalidPartitionDirsOnFS(testTable, 10);
checker.checkMetastore(catName, dbName, tableName, null, null);
}
/**
* In skip mode msck should ignore invalid partitions instead of throwing exception.
* @throws Exception ex
*/
@Test
public void testSkipInvalidOrderForPartitionKeysOnFS() throws Exception{
hive.getConf().set(MetastoreConf.ConfVars.MSCK_PATH_VALIDATION.getVarname(), "skip");
checker = new HiveMetaStoreChecker(msc, hive.getConf());
Table testTable = createPartitionedTestTable(dbName, tableName, 2, 0);
// add 10 partitions on the filesystem
createInvalidPartitionDirsOnFS(testTable, 2);
// add 10 partitions on the filesystem
createPartitionsDirectoriesOnFS(testTable, 2);
CheckResult result = checker.checkMetastore(catName, dbName, tableName, null, null);
assertEquals(Collections.<String> emptySet(), result.getTablesNotInMs());
assertEquals(Collections.<String> emptySet(), result.getTablesNotOnFs());
assertEquals(Collections.<CheckResult.PartitionResult> emptySet(), result.getPartitionsNotOnFs());
// only 2 valid partitions should be added
assertEquals(2, result.getPartitionsNotInMs().size());
}
/**
* Test if single-threaded implementation checker throws HiveException when the there is a dummy
* directory present in the nested level.
* @throws Exception ex
*/
@Test
public void testErrorForMissingPartitionsSingleThreaded() throws Exception {
// set num of threads to 0 so that single-threaded checkMetastore is called
hive.getConf().set(MetastoreConf.ConfVars.FS_HANDLER_THREADS_COUNT.getVarname(), "0");
Table testTable = createPartitionedTestTable(dbName, tableName, 2, 0);
// add 10 partitions on the filesystem
createPartitionsDirectoriesOnFS(testTable, 10);
// create a fake directory to throw exception
StringBuilder sb = new StringBuilder(testTable.getDataLocation().toString());
sb.append(Path.SEPARATOR);
sb.append("dummyPart=error");
createDirectory(sb.toString());
// check result now
Exception exception = null;
try {
checker.checkMetastore(catName, dbName, tableName, null, null);
} catch (Exception e) {
exception = e;
}
assertTrue("Expected MetastoreException", exception instanceof MetastoreException);
createFile(sb.toString(), "dummyFile");
exception = null;
try {
checker.checkMetastore(catName, dbName, tableName, null, null);
} catch (Exception e) {
exception = e;
}
assertTrue("Expected MetastoreException", exception instanceof MetastoreException);
}
/**
* Creates a test partitioned table with the required level of nested partitions and number of
* partitions
*
* @param dbName - Database name
* @param tableName - Table name
* @param numOfPartKeys - Number of partition keys (nested levels of sub-directories in base table
* path)
* @param valuesPerPartition - If greater than 0 creates valuesPerPartition dummy partitions
* @return The new table
* @throws Exception ex
*/
private Table createPartitionedTestTable(String dbName, String tableName, int numOfPartKeys, int valuesPerPartition)
throws Exception {
Database db = new Database();
db.setName(dbName);
hive.createDatabase(db, true);
Table table = new Table(dbName, tableName);
table.setDbName(dbName);
table.setInputFormatClass(TextInputFormat.class);
table.setOutputFormatClass(HiveIgnoreKeyTextOutputFormat.class);
// create partition key schema
ArrayList<FieldSchema> partKeys = new ArrayList<>();
for (int i = 1; i <= numOfPartKeys; i++) {
String partName = "part" + i;
partKeys.add(new FieldSchema(partName, serdeConstants.STRING_TYPE_NAME, ""));
}
table.setPartCols(partKeys);
// create table
hive.createTable(table, true);
table = hive.getTable(dbName, tableName);
if (valuesPerPartition == 0) {
return table;
}
// create partition specs
ArrayList<Map<String, String>> partitionSpecs = new ArrayList<>();
for (int partKeyIndex = 0; partKeyIndex < numOfPartKeys; partKeyIndex++) {
String partName = partKeys.get(partKeyIndex).getName();
Map<String, String> partMap = new HashMap<>();
for (int val = 1; val <= valuesPerPartition; val++) {
partMap.put(partName, String.valueOf(val));
}
partitionSpecs.add(partMap);
}
// create partitions
for (Map<String, String> partSpec : partitionSpecs) {
hive.createPartition(table, partSpec);
}
List<Partition> partitions = hive.getPartitions(table);
assertEquals(numOfPartKeys * valuesPerPartition, partitions.size());
return table;
}
/**
* Creates partition sub-directories for a given table on the file system. Used to test the
* use-cases when partitions for the table are not present in the metastore db
*
* @param table - Table which provides the base locations and partition specs for creating the
* sub-directories
* @param numPartitions - Number of partitions to be created
* @param reverseOrder - If set to true creates the partition sub-directories in the reverse order
* of specified by partition keys defined for the table
* @throws IOException ex
*/
private void createPartitionsDirectoriesOnFS(Table table, int numPartitions, boolean reverseOrder) throws IOException {
String path = table.getDataLocation().toString();
fs = table.getPath().getFileSystem(hive.getConf());
int numPartKeys = table.getPartitionKeys().size();
for (int i = 0; i < numPartitions; i++) {
StringBuilder partPath = new StringBuilder(path);
partPath.append(Path.SEPARATOR);
if (!reverseOrder) {
for (int j = 0; j < numPartKeys; j++) {
FieldSchema field = table.getPartitionKeys().get(j);
partPath.append(field.getName());
partPath.append('=');
partPath.append("val_");
partPath.append(i);
if (j < (numPartKeys - 1)) {
partPath.append(Path.SEPARATOR);
}
}
} else {
for (int j = numPartKeys - 1; j >= 0; j--) {
FieldSchema field = table.getPartitionKeys().get(j);
partPath.append(field.getName());
partPath.append('=');
partPath.append("val_");
partPath.append(i);
if (j > 0) {
partPath.append(Path.SEPARATOR);
}
}
}
createDirectory(partPath.toString());
}
}
private void createPartitionsDirectoriesOnFS(Table table, int numPartitions) throws IOException {
createPartitionsDirectoriesOnFS(table, numPartitions, false);
}
/**
* Creates a partition directory structure on file system but with a reverse order
* of sub-directories compared to the partition keys defined in the table. Eg. if the
* partition keys defined in table are (a int, b int, c int) this method will create
* an invalid directory c=val_1/b=val_1/a=val_1
* @param table table
* @param numPartitions Number of partitions to create
* @throws IOException
*/
private void createInvalidPartitionDirsOnFS(Table table, int numPartitions) throws IOException {
createPartitionsDirectoriesOnFS(table, numPartitions, true);
}
private void createFile(String partPath, String filename) throws IOException {
Path part = new Path(partPath);
fs.mkdirs(part);
fs.createNewFile(new Path(partPath + Path.SEPARATOR + filename));
fs.deleteOnExit(part);
}
private void createDirectory(String partPath) throws IOException {
Path part = new Path(partPath);
fs.mkdirs(part);
// create files under partitions to simulate real partitions
fs.createNewFile(new Path(partPath + Path.SEPARATOR + "dummydata1"));
fs.createNewFile(new Path(partPath + Path.SEPARATOR + "dummydata2"));
fs.deleteOnExit(part);
}
private Path addFolderToPath(FileSystem fs, String rootPath, String folder) throws IOException{
Path folderParth = new Path(rootPath, folder);
fs.mkdirs(folderParth);
fs.deleteOnExit(folderParth);
return folderParth;
}
private Table createTestTable(boolean transactional) throws HiveException, AlreadyExistsException {
Database db = new Database();
db.setName(dbName);
hive.createDatabase(db, true);
Table table = new Table(dbName, tableName);
table.setDbName(dbName);
if (transactional) {
table.setInputFormatClass(OrcInputFormat.class);
table.setOutputFormatClass(OrcOutputFormat.class);
} else {
table.setInputFormatClass(TextInputFormat.class);
table.setOutputFormatClass(HiveIgnoreKeyTextOutputFormat.class);
}
table.setPartCols(partCols);
if (transactional) {
table.setProperty("transactional", "true");
}
hive.createTable(table);
table = hive.getTable(dbName, tableName);
Assert.assertTrue(table.getTTable().isSetId());
table.getTTable().unsetId();
for (Map<String, String> partSpec : parts) {
hive.createPartition(table, partSpec);
}
return table;
}
private Table createNonPartitionedTable() throws Exception {
Database db = new Database();
db.setName(dbName);
hive.createDatabase(db, true);
Table table = new Table(dbName, tableName);
table.setDbName(dbName);
table.setInputFormatClass(OrcInputFormat.class);
table.setOutputFormatClass(OrcOutputFormat.class);
table.setProperty("transactional", "true");
hive.createTable(table);
table = hive.getTable(dbName, tableName);
Assert.assertTrue(table.getTTable().isSetId());
table.getTTable().unsetId();
return table;
}
}
|
apache/seatunnel | 37,033 | seatunnel-e2e/seatunnel-connector-v2-e2e/connector-paimon-e2e/src/test/java/org/apache/seatunnel/e2e/connector/paimon/PaimonSinkCDCIT.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.seatunnel.e2e.connector.paimon;
import org.apache.seatunnel.shade.org.apache.commons.lang3.StringUtils;
import org.apache.seatunnel.common.utils.SeaTunnelException;
import org.apache.seatunnel.e2e.common.TestResource;
import org.apache.seatunnel.e2e.common.container.EngineType;
import org.apache.seatunnel.e2e.common.container.TestContainer;
import org.apache.seatunnel.e2e.common.junit.DisabledOnContainer;
import org.apache.seatunnel.e2e.common.util.JobIdGenerator;
import org.apache.paimon.CoreOptions;
import org.apache.paimon.data.InternalRow;
import org.apache.paimon.reader.RecordReader;
import org.apache.paimon.table.FileStoreTable;
import org.apache.paimon.table.Table;
import org.apache.paimon.table.source.ReadBuilder;
import org.apache.paimon.table.source.TableRead;
import org.apache.paimon.table.source.TableScan;
import org.apache.paimon.types.DataField;
import org.apache.paimon.types.DateType;
import org.apache.paimon.types.TimestampType;
import org.apache.paimon.utils.DateTimeUtils;
import org.junit.jupiter.api.AfterEach;
import org.junit.jupiter.api.Assertions;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.TestTemplate;
import org.testcontainers.containers.Container;
import lombok.extern.slf4j.Slf4j;
import java.time.LocalDate;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.TimeUnit;
import java.util.stream.Collectors;
import static org.awaitility.Awaitility.given;
@DisabledOnContainer(
value = {},
type = {EngineType.SPARK, EngineType.FLINK},
disabledReason =
"Spark and Flink engine can not auto create paimon table on worker node in local file(e.g flink tm) by savemode feature which can lead error")
@Slf4j
public class PaimonSinkCDCIT extends AbstractPaimonIT implements TestResource {
@BeforeEach
@Override
public void startUp() throws Exception {
this.isWindows =
System.getProperties().getProperty("os.name").toUpperCase().contains("WINDOWS");
}
@AfterEach
@Override
public void tearDown() throws Exception {}
@TestTemplate
public void testSinkWithMultipleInBatchMode(TestContainer container) throws Exception {
Container.ExecResult execOneResult =
container.executeJob("/fake_cdc_sink_paimon_case9.conf");
Assertions.assertEquals(0, execOneResult.getExitCode());
Container.ExecResult execTwoResult =
container.executeJob("/fake_cdc_sink_paimon_case10.conf");
Assertions.assertEquals(0, execTwoResult.getExitCode());
given().ignoreExceptions()
.await()
.atLeast(100L, TimeUnit.MILLISECONDS)
.atMost(30L, TimeUnit.SECONDS)
.untilAsserted(
() -> {
List<PaimonRecord> paimonRecords =
loadPaimonData("seatunnel_namespace9", TARGET_TABLE);
Assertions.assertEquals(3, paimonRecords.size());
paimonRecords.forEach(
paimonRecord -> {
if (paimonRecord.getPkId() == 1) {
Assertions.assertEquals("A", paimonRecord.getName());
}
if (paimonRecord.getPkId() == 2
|| paimonRecord.getPkId() == 3) {
Assertions.assertEquals("CCC", paimonRecord.getName());
}
});
});
}
@TestTemplate
public void testFakeCDCSinkPaimon(TestContainer container) throws Exception {
Container.ExecResult execResult = container.executeJob("/fake_cdc_sink_paimon_case1.conf");
Assertions.assertEquals(0, execResult.getExitCode());
given().ignoreExceptions()
.await()
.atLeast(100L, TimeUnit.MILLISECONDS)
.atMost(30L, TimeUnit.SECONDS)
.untilAsserted(
() -> {
List<PaimonRecord> paimonRecords =
loadPaimonData("seatunnel_namespace1", TARGET_TABLE);
Assertions.assertEquals(2, paimonRecords.size());
paimonRecords.forEach(
paimonRecord -> {
if (paimonRecord.getPkId() == 1) {
Assertions.assertEquals("A_1", paimonRecord.getName());
}
if (paimonRecord.getPkId() == 3) {
Assertions.assertEquals("C", paimonRecord.getName());
}
});
});
}
@TestTemplate
public void testSinkWithIncompatibleSchema(TestContainer container) throws Exception {
Container.ExecResult execResult = container.executeJob("/fake_cdc_sink_paimon_case1.conf");
Assertions.assertEquals(0, execResult.getExitCode());
Container.ExecResult errResult =
container.executeJob("/fake_cdc_sink_paimon_case1_with_error_schema.conf");
Assertions.assertEquals(1, errResult.getExitCode());
Assertions.assertTrue(
errResult
.getStderr()
.contains(
"['Paimon': The source field with schema 'name INT', expected field schema of sink is '`name` INT'; whose actual schema in the sink table is '`name` STRING'. Please check schema of sink table.]"));
}
@TestTemplate
public void testFakeMultipleTableSinkPaimon(TestContainer container) throws Exception {
Container.ExecResult execResult = container.executeJob("/fake_cdc_sink_paimon_case2.conf");
Assertions.assertEquals(0, execResult.getExitCode());
given().ignoreExceptions()
.await()
.atLeast(100L, TimeUnit.MILLISECONDS)
.atMost(30L, TimeUnit.SECONDS)
.untilAsserted(
() -> {
// Check FakeDatabase1.FakeTable1
List<PaimonRecord> fake1PaimonRecords =
loadPaimonData(FAKE_DATABASE1, FAKE_TABLE1);
Assertions.assertEquals(2, fake1PaimonRecords.size());
fake1PaimonRecords.forEach(
paimonRecord -> {
if (paimonRecord.getPkId() == 1) {
Assertions.assertEquals("A_1", paimonRecord.getName());
}
if (paimonRecord.getPkId() == 3) {
Assertions.assertEquals("C", paimonRecord.getName());
}
});
// Check FakeDatabase2.FakeTable1
List<PaimonRecord> fake2PaimonRecords =
loadPaimonData(FAKE_DATABASE2, FAKE_TABLE2);
Assertions.assertEquals(2, fake2PaimonRecords.size());
fake2PaimonRecords.forEach(
paimonRecord -> {
if (paimonRecord.getPkId() == 100) {
Assertions.assertEquals(
"A_100", paimonRecord.getName());
}
if (paimonRecord.getPkId() == 200) {
Assertions.assertEquals("C", paimonRecord.getName());
}
});
});
}
@TestTemplate
public void testFakeCDCSinkPaimonWithMultipleBucket(TestContainer container) throws Exception {
Container.ExecResult execResult = container.executeJob("/fake_cdc_sink_paimon_case3.conf");
Assertions.assertEquals(0, execResult.getExitCode());
given().ignoreExceptions()
.await()
.atLeast(100L, TimeUnit.MILLISECONDS)
.atMost(30L, TimeUnit.SECONDS)
.untilAsserted(
() -> {
Table table = getTable("seatunnel_namespace3", TARGET_TABLE);
String bucket = table.options().get(CoreOptions.BUCKET.key());
Assertions.assertTrue(StringUtils.isNoneBlank(bucket));
Assertions.assertEquals(2, Integer.valueOf(bucket));
List<PaimonRecord> paimonRecords =
loadPaimonData("seatunnel_namespace3", TARGET_TABLE);
Assertions.assertEquals(2, paimonRecords.size());
paimonRecords.forEach(
paimonRecord -> {
if (paimonRecord.getPkId() == 1) {
Assertions.assertEquals("A_1", paimonRecord.getName());
}
if (paimonRecord.getPkId() == 3) {
Assertions.assertEquals("C", paimonRecord.getName());
}
});
});
}
@TestTemplate
public void testFakeCDCSinkPaimonWithPartition(TestContainer container) throws Exception {
Container.ExecResult execResult = container.executeJob("/fake_cdc_sink_paimon_case4.conf");
Assertions.assertEquals(0, execResult.getExitCode());
given().ignoreExceptions()
.await()
.atLeast(100L, TimeUnit.MILLISECONDS)
.atMost(30L, TimeUnit.SECONDS)
.untilAsserted(
() -> {
Table table = getTable("seatunnel_namespace4", TARGET_TABLE);
List<String> partitionKeys = table.partitionKeys();
List<String> primaryKeys = table.primaryKeys();
Assertions.assertTrue(partitionKeys.contains("dt"));
Assertions.assertEquals(2, primaryKeys.size());
Assertions.assertTrue(primaryKeys.contains("pk_id"));
Assertions.assertTrue(primaryKeys.contains("dt"));
ReadBuilder readBuilder = table.newReadBuilder();
TableScan.Plan plan = readBuilder.newScan().plan();
TableRead tableRead = readBuilder.newRead();
List<PaimonRecord> result = new ArrayList<>();
try (RecordReader<InternalRow> reader = tableRead.createReader(plan)) {
reader.forEachRemaining(
row -> {
result.add(
new PaimonRecord(
row.getLong(0),
row.getString(1).toString(),
row.getString(2).toString()));
log.info(
"key_id:"
+ row.getLong(0)
+ ", name:"
+ row.getString(1)
+ ", dt:"
+ row.getString(2));
});
}
Assertions.assertEquals(2, result.size());
List<PaimonRecord> filterRecords =
result.stream()
.filter(record -> record.pkId == 1)
.collect(Collectors.toList());
Assertions.assertEquals(1, filterRecords.size());
PaimonRecord paimonRecord = filterRecords.get(0);
Assertions.assertEquals("A_1", paimonRecord.getName());
Assertions.assertEquals("2024-03-20", paimonRecord.getDt());
});
}
@TestTemplate
public void testFakeCDCSinkPaimonWithParquet(TestContainer container) throws Exception {
Container.ExecResult execResult = container.executeJob("/fake_cdc_sink_paimon_case5.conf");
Assertions.assertEquals(0, execResult.getExitCode());
given().ignoreExceptions()
.await()
.atLeast(100L, TimeUnit.MILLISECONDS)
.atMost(30L, TimeUnit.SECONDS)
.untilAsserted(
() -> {
Table table = getTable("seatunnel_namespace5", TARGET_TABLE);
String fileFormat = table.options().get(CoreOptions.FILE_FORMAT.key());
Assertions.assertTrue(StringUtils.isNoneBlank(fileFormat));
Assertions.assertEquals("parquet", fileFormat);
List<PaimonRecord> paimonRecords =
loadPaimonData("seatunnel_namespace5", TARGET_TABLE);
Assertions.assertEquals(2, paimonRecords.size());
paimonRecords.forEach(
paimonRecord -> {
if (paimonRecord.getPkId() == 1) {
Assertions.assertEquals("A_1", paimonRecord.getName());
}
if (paimonRecord.getPkId() == 3) {
Assertions.assertEquals("C", paimonRecord.getName());
}
});
});
}
@TestTemplate
public void testFakeCDCSinkPaimonWithAvro(TestContainer container) throws Exception {
Container.ExecResult execResult = container.executeJob("/fake_cdc_sink_paimon_case6.conf");
Assertions.assertEquals(0, execResult.getExitCode());
given().ignoreExceptions()
.await()
.atLeast(100L, TimeUnit.MILLISECONDS)
.atMost(30L, TimeUnit.SECONDS)
.untilAsserted(
() -> {
Table table = getTable("seatunnel_namespace6", TARGET_TABLE);
String fileFormat = table.options().get(CoreOptions.FILE_FORMAT.key());
Assertions.assertTrue(StringUtils.isNoneBlank(fileFormat));
Assertions.assertEquals("avro", fileFormat);
List<PaimonRecord> paimonRecords =
loadPaimonData("seatunnel_namespace6", TARGET_TABLE);
Assertions.assertEquals(2, paimonRecords.size());
paimonRecords.forEach(
paimonRecord -> {
if (paimonRecord.getPkId() == 1) {
Assertions.assertEquals("A_1", paimonRecord.getName());
}
if (paimonRecord.getPkId() == 3) {
Assertions.assertEquals("C", paimonRecord.getName());
}
});
});
}
@TestTemplate
public void testFakeCDCSinkPaimonWithTimestampNAndRead(TestContainer container)
throws Exception {
Container.ExecResult execResult = container.executeJob("/fake_cdc_sink_paimon_case7.conf");
Assertions.assertEquals(0, execResult.getExitCode());
given().ignoreExceptions()
.await()
.atLeast(100L, TimeUnit.MILLISECONDS)
.atMost(30L, TimeUnit.SECONDS)
.untilAsserted(
() -> {
FileStoreTable table =
(FileStoreTable) getTable("seatunnel_namespace7", TARGET_TABLE);
List<DataField> fields = table.schema().fields();
for (DataField field : fields) {
if (field.name().equalsIgnoreCase("one_time")) {
Assertions.assertEquals(
0, ((TimestampType) field.type()).getPrecision());
}
if (field.name().equalsIgnoreCase("two_time")) {
Assertions.assertEquals(
3, ((TimestampType) field.type()).getPrecision());
}
if (field.name().equalsIgnoreCase("three_time")) {
Assertions.assertEquals(
6, ((TimestampType) field.type()).getPrecision());
}
if (field.name().equalsIgnoreCase("four_time")) {
Assertions.assertEquals(
9, ((TimestampType) field.type()).getPrecision());
}
}
ReadBuilder readBuilder = table.newReadBuilder();
TableScan.Plan plan = readBuilder.newScan().plan();
TableRead tableRead = readBuilder.newRead();
List<PaimonRecord> result = new ArrayList<>();
try (RecordReader<InternalRow> reader = tableRead.createReader(plan)) {
reader.forEachRemaining(
row ->
result.add(
new PaimonRecord(
row.getLong(0),
row.getString(1).toString(),
row.getTimestamp(2, 0),
row.getTimestamp(3, 3),
row.getTimestamp(4, 6),
row.getTimestamp(5, 9))));
}
Assertions.assertEquals(2, result.size());
for (PaimonRecord paimonRecord : result) {
Assertions.assertEquals(
"2024-03-10T10:00:12", paimonRecord.oneTime.toString());
Assertions.assertEquals(
"2024-03-10T10:00:00.123", paimonRecord.twoTime.toString());
Assertions.assertEquals(
"2024-03-10T10:00:00.123456",
paimonRecord.threeTime.toString());
Assertions.assertEquals(
"2024-03-10T10:00:00.123456789",
paimonRecord.fourTime.toString());
}
});
Container.ExecResult readResult =
container.executeJob("/paimon_to_assert_with_timestampN.conf");
Assertions.assertEquals(0, readResult.getExitCode());
}
@TestTemplate
public void testFakeSinkPaimonWithDate(TestContainer container) throws Exception {
Container.ExecResult execResult = container.executeJob("/fake_cdc_sink_paimon_case8.conf");
Assertions.assertEquals(0, execResult.getExitCode());
given().ignoreExceptions()
.await()
.atLeast(100L, TimeUnit.MILLISECONDS)
.atMost(30L, TimeUnit.SECONDS)
.untilAsserted(
() -> {
FileStoreTable table =
(FileStoreTable) getTable("seatunnel_namespace8", TARGET_TABLE);
List<DataField> fields = table.schema().fields();
for (DataField field : fields) {
if (field.name().equalsIgnoreCase("one_date")) {
Assertions.assertTrue(field.type() instanceof DateType);
}
}
ReadBuilder readBuilder = table.newReadBuilder();
TableScan.Plan plan = readBuilder.newScan().plan();
TableRead tableRead = readBuilder.newRead();
List<PaimonRecord> result = new ArrayList<>();
try (RecordReader<InternalRow> reader = tableRead.createReader(plan)) {
reader.forEachRemaining(
row ->
result.add(
new PaimonRecord(
row.getLong(0),
row.getString(1).toString(),
row.getInt(2))));
}
Assertions.assertEquals(3, result.size());
for (PaimonRecord paimonRecord : result) {
if (paimonRecord.getPkId() == 1) {
Assertions.assertEquals(
paimonRecord.oneDate,
DateTimeUtils.toInternal(
LocalDate.parse("2024-03-20")));
} else {
Assertions.assertEquals(
paimonRecord.oneDate,
DateTimeUtils.toInternal(
LocalDate.parse("2024-03-10")));
}
}
});
}
@TestTemplate
public void testFakeSinkPaimonWithFullTypeAndReadWithFilter(TestContainer container)
throws Exception {
Container.ExecResult writeResult =
container.executeJob("/fake_to_paimon_with_full_type.conf");
Assertions.assertEquals(0, writeResult.getExitCode());
Container.ExecResult readResult =
container.executeJob("/paimon_to_assert_with_filter1.conf");
Assertions.assertEquals(0, readResult.getExitCode());
Container.ExecResult readResult2 =
container.executeJob("/paimon_to_assert_with_filter2.conf");
Assertions.assertEquals(0, readResult2.getExitCode());
Container.ExecResult readResult3 =
container.executeJob("/paimon_to_assert_with_filter3.conf");
Assertions.assertEquals(0, readResult3.getExitCode());
Container.ExecResult readResult4 =
container.executeJob("/paimon_to_assert_with_filter4.conf");
Assertions.assertEquals(0, readResult4.getExitCode());
Container.ExecResult readResult5 =
container.executeJob("/paimon_to_assert_with_filter5.conf");
Assertions.assertEquals(0, readResult5.getExitCode());
Container.ExecResult readResult6 =
container.executeJob("/paimon_to_assert_with_filter6.conf");
Assertions.assertEquals(0, readResult6.getExitCode());
Container.ExecResult readResult7 =
container.executeJob("/paimon_to_assert_with_filter7.conf");
Assertions.assertEquals(0, readResult7.getExitCode());
Container.ExecResult readResult8 =
container.executeJob("/paimon_to_assert_with_filter8.conf");
Assertions.assertEquals(0, readResult8.getExitCode());
Container.ExecResult readResult9 =
container.executeJob("/paimon_to_assert_with_filter9.conf");
Assertions.assertEquals(0, readResult9.getExitCode());
Container.ExecResult readResult10 =
container.executeJob("/paimon_to_assert_with_filter10.conf");
Assertions.assertEquals(0, readResult10.getExitCode());
}
@TestTemplate
public void testSinkPaimonTruncateTable(TestContainer container) throws Exception {
Container.ExecResult writeResult =
container.executeJob("/fake_sink_paimon_truncate_with_local_case1.conf");
Assertions.assertEquals(0, writeResult.getExitCode());
Container.ExecResult readResult =
container.executeJob("/fake_sink_paimon_truncate_with_local_case2.conf");
Assertions.assertEquals(0, readResult.getExitCode());
given().ignoreExceptions()
.await()
.atLeast(100L, TimeUnit.MILLISECONDS)
.atMost(30L, TimeUnit.SECONDS)
.untilAsserted(
() -> {
List<PaimonRecord> paimonRecords =
loadPaimonData("seatunnel_namespace10", TARGET_TABLE);
Assertions.assertEquals(2, paimonRecords.size());
paimonRecords.forEach(
paimonRecord -> {
if (paimonRecord.getPkId() == 1) {
Assertions.assertEquals("Aa", paimonRecord.getName());
}
if (paimonRecord.getPkId() == 2) {
Assertions.assertEquals("Bb", paimonRecord.getName());
}
Assertions.assertEquals(200, paimonRecord.getScore());
});
List<Long> ids =
paimonRecords.stream()
.map(PaimonRecord::getPkId)
.collect(Collectors.toList());
Assertions.assertFalse(ids.contains(3L));
});
}
@TestTemplate
public void testChangelogLookup(TestContainer container) throws Exception {
// create Paimon table (changelog-producer=lookup)
Container.ExecResult writeResult =
container.executeJob("/changelog_fake_cdc_sink_paimon_case1_ddl.conf");
Assertions.assertEquals(0, writeResult.getExitCode());
String[] jobIds =
new String[] {
String.valueOf(JobIdGenerator.newJobId()),
String.valueOf(JobIdGenerator.newJobId()),
String.valueOf(JobIdGenerator.newJobId())
};
log.info("jobIds: {}", Arrays.toString(jobIds));
List<CompletableFuture<Void>> futures = new ArrayList<>();
// read changelog and write to append only paimon table
futures.add(
CompletableFuture.runAsync(
() -> {
try {
container.executeJob("/changelog_paimon_to_paimon.conf", jobIds[0]);
} catch (Exception e) {
throw new SeaTunnelException(e);
}
}));
TimeUnit.SECONDS.sleep(10);
// dml: insert data
futures.add(
CompletableFuture.runAsync(
() -> {
try {
container.executeJob(
"/changelog_fake_cdc_sink_paimon_case1_insert_data.conf",
jobIds[1]);
} catch (Exception e) {
throw new SeaTunnelException(e);
}
}));
// dml: update and delete data
TimeUnit.SECONDS.sleep(10);
futures.add(
CompletableFuture.runAsync(
() -> {
try {
container.executeJob(
"/changelog_fake_cdc_sink_paimon_case1_update_data.conf",
jobIds[2]);
} catch (Exception e) {
throw new SeaTunnelException(e);
}
}));
// stream job running 60 seconds
TimeUnit.SECONDS.sleep(60);
// cancel stream job
container.cancelJob(jobIds[1]);
container.cancelJob(jobIds[2]);
container.cancelJob(jobIds[0]);
changeLogEnabled = true;
List<PaimonRecord> paimonRecords1 = loadPaimonData("seatunnel_namespace", "st_test_sink");
List<String> actual1 =
paimonRecords1.stream()
.map(PaimonRecord::toChangeLogLookUp)
.collect(Collectors.toList());
log.info("paimon records: {}", actual1);
Assertions.assertEquals(8, actual1.size());
Assertions.assertEquals(
Arrays.asList(
"[+I, 1, A, 100, +I]",
"[+I, 2, B, 100, +I]",
"[+I, 3, C, 100, +I]",
"[+I, 1, A, 100, -U]",
"[+I, 1, Aa, 200, +U]",
"[+I, 2, B, 100, -U]",
"[+I, 2, Bb, 90, +U]",
"[+I, 3, C, 100, -D]"),
actual1);
List<PaimonRecord> paimonRecords2 = loadPaimonData("seatunnel_namespace", "st_test_lookup");
List<String> actual2 =
paimonRecords2.stream()
.map(PaimonRecord::toChangeLogFull)
.collect(Collectors.toList());
log.info("paimon records: {}", actual2);
Assertions.assertEquals(2, actual2.size());
Assertions.assertEquals(Arrays.asList("[+U, 1, Aa, 200]", "[+I, 2, Bb, 90]"), actual2);
changeLogEnabled = false;
futures.forEach(future -> future.cancel(true));
}
@TestTemplate
public void testChangelogFullCompaction(TestContainer container) throws Exception {
Long jobId = JobIdGenerator.newJobId();
log.info("jobId: {}", jobId);
CompletableFuture<Void> voidCompletableFuture =
CompletableFuture.runAsync(
() -> {
try {
container.executeJob(
"/changelog_fake_cdc_sink_paimon_case2.conf",
String.valueOf(jobId));
} catch (Exception e) {
throw new SeaTunnelException(e);
}
});
// stream job running 20 seconds
TimeUnit.SECONDS.sleep(20);
changeLogEnabled = true;
// cancel stream job
container.cancelJob(String.valueOf(jobId));
TimeUnit.SECONDS.sleep(5);
List<PaimonRecord> paimonRecords = loadPaimonData("seatunnel_namespace", "st_test_full");
List<String> actual =
paimonRecords.stream()
.map(PaimonRecord::toChangeLogFull)
.collect(Collectors.toList());
log.info("paimon records: {}", actual);
Assertions.assertEquals(2, actual.size());
Assertions.assertEquals(Arrays.asList("[+U, 1, Aa, 200]", "[+I, 2, Bb, 90]"), actual);
changeLogEnabled = false;
voidCompletableFuture.cancel(true);
}
protected List<PaimonRecord> loadPaimonData(String dbName, String tbName) throws Exception {
FileStoreTable table = (FileStoreTable) getTable(dbName, tbName);
ReadBuilder readBuilder = table.newReadBuilder();
TableScan.Plan plan = readBuilder.newScan().plan();
TableRead tableRead = readBuilder.newRead();
List<PaimonRecord> result = new ArrayList<>();
log.info(
"====================================Paimon data===========================================");
log.info(
"==========================================================================================");
log.info(
"==========================================================================================");
try (RecordReader<InternalRow> reader = tableRead.createReader(plan)) {
reader.forEachRemaining(
row -> {
PaimonRecord paimonRecord;
if (changeLogEnabled) {
paimonRecord =
new PaimonRecord(
row.getRowKind(),
row.getLong(0),
row.getString(1).toString());
} else {
paimonRecord =
new PaimonRecord(row.getLong(0), row.getString(1).toString());
}
if (table.schema().fieldNames().contains("score")) {
paimonRecord.setScore(row.getInt(2));
}
if (table.schema().fieldNames().contains("op")) {
paimonRecord.setOp(row.getString(3).toString());
}
result.add(paimonRecord);
log.info(
"rowKind:"
+ row.getRowKind().shortString()
+ ", key_id:"
+ row.getLong(0)
+ ", name:"
+ row.getString(1));
});
}
log.info(
"==========================================================================================");
log.info(
"==========================================================================================");
log.info(
"==========================================================================================");
return result;
}
}
|
googleapis/google-cloud-java | 36,767 | java-securitycenter/proto-google-cloud-securitycenter-v1p1beta1/src/main/java/com/google/cloud/securitycenter/v1p1beta1/ListSourcesResponse.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/securitycenter/v1p1beta1/securitycenter_service.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.securitycenter.v1p1beta1;
/**
*
*
* <pre>
* Response message for listing sources.
* </pre>
*
* Protobuf type {@code google.cloud.securitycenter.v1p1beta1.ListSourcesResponse}
*/
public final class ListSourcesResponse extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.securitycenter.v1p1beta1.ListSourcesResponse)
ListSourcesResponseOrBuilder {
private static final long serialVersionUID = 0L;
// Use ListSourcesResponse.newBuilder() to construct.
private ListSourcesResponse(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private ListSourcesResponse() {
sources_ = java.util.Collections.emptyList();
nextPageToken_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new ListSourcesResponse();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.securitycenter.v1p1beta1.SecuritycenterService
.internal_static_google_cloud_securitycenter_v1p1beta1_ListSourcesResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.securitycenter.v1p1beta1.SecuritycenterService
.internal_static_google_cloud_securitycenter_v1p1beta1_ListSourcesResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.securitycenter.v1p1beta1.ListSourcesResponse.class,
com.google.cloud.securitycenter.v1p1beta1.ListSourcesResponse.Builder.class);
}
public static final int SOURCES_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private java.util.List<com.google.cloud.securitycenter.v1p1beta1.Source> sources_;
/**
*
*
* <pre>
* Sources belonging to the requested parent.
* </pre>
*
* <code>repeated .google.cloud.securitycenter.v1p1beta1.Source sources = 1;</code>
*/
@java.lang.Override
public java.util.List<com.google.cloud.securitycenter.v1p1beta1.Source> getSourcesList() {
return sources_;
}
/**
*
*
* <pre>
* Sources belonging to the requested parent.
* </pre>
*
* <code>repeated .google.cloud.securitycenter.v1p1beta1.Source sources = 1;</code>
*/
@java.lang.Override
public java.util.List<? extends com.google.cloud.securitycenter.v1p1beta1.SourceOrBuilder>
getSourcesOrBuilderList() {
return sources_;
}
/**
*
*
* <pre>
* Sources belonging to the requested parent.
* </pre>
*
* <code>repeated .google.cloud.securitycenter.v1p1beta1.Source sources = 1;</code>
*/
@java.lang.Override
public int getSourcesCount() {
return sources_.size();
}
/**
*
*
* <pre>
* Sources belonging to the requested parent.
* </pre>
*
* <code>repeated .google.cloud.securitycenter.v1p1beta1.Source sources = 1;</code>
*/
@java.lang.Override
public com.google.cloud.securitycenter.v1p1beta1.Source getSources(int index) {
return sources_.get(index);
}
/**
*
*
* <pre>
* Sources belonging to the requested parent.
* </pre>
*
* <code>repeated .google.cloud.securitycenter.v1p1beta1.Source sources = 1;</code>
*/
@java.lang.Override
public com.google.cloud.securitycenter.v1p1beta1.SourceOrBuilder getSourcesOrBuilder(int index) {
return sources_.get(index);
}
public static final int NEXT_PAGE_TOKEN_FIELD_NUMBER = 2;
@SuppressWarnings("serial")
private volatile java.lang.Object nextPageToken_ = "";
/**
*
*
* <pre>
* Token to retrieve the next page of results, or empty if there are no more
* results.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The nextPageToken.
*/
@java.lang.Override
public java.lang.String getNextPageToken() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
nextPageToken_ = s;
return s;
}
}
/**
*
*
* <pre>
* Token to retrieve the next page of results, or empty if there are no more
* results.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The bytes for nextPageToken.
*/
@java.lang.Override
public com.google.protobuf.ByteString getNextPageTokenBytes() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
nextPageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
for (int i = 0; i < sources_.size(); i++) {
output.writeMessage(1, sources_.get(i));
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 2, nextPageToken_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
for (int i = 0; i < sources_.size(); i++) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, sources_.get(i));
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, nextPageToken_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.securitycenter.v1p1beta1.ListSourcesResponse)) {
return super.equals(obj);
}
com.google.cloud.securitycenter.v1p1beta1.ListSourcesResponse other =
(com.google.cloud.securitycenter.v1p1beta1.ListSourcesResponse) obj;
if (!getSourcesList().equals(other.getSourcesList())) return false;
if (!getNextPageToken().equals(other.getNextPageToken())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (getSourcesCount() > 0) {
hash = (37 * hash) + SOURCES_FIELD_NUMBER;
hash = (53 * hash) + getSourcesList().hashCode();
}
hash = (37 * hash) + NEXT_PAGE_TOKEN_FIELD_NUMBER;
hash = (53 * hash) + getNextPageToken().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.securitycenter.v1p1beta1.ListSourcesResponse parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.securitycenter.v1p1beta1.ListSourcesResponse parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.securitycenter.v1p1beta1.ListSourcesResponse parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.securitycenter.v1p1beta1.ListSourcesResponse parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.securitycenter.v1p1beta1.ListSourcesResponse parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.securitycenter.v1p1beta1.ListSourcesResponse parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.securitycenter.v1p1beta1.ListSourcesResponse parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.securitycenter.v1p1beta1.ListSourcesResponse parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.securitycenter.v1p1beta1.ListSourcesResponse parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.securitycenter.v1p1beta1.ListSourcesResponse parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.securitycenter.v1p1beta1.ListSourcesResponse parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.securitycenter.v1p1beta1.ListSourcesResponse parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.securitycenter.v1p1beta1.ListSourcesResponse prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Response message for listing sources.
* </pre>
*
* Protobuf type {@code google.cloud.securitycenter.v1p1beta1.ListSourcesResponse}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.securitycenter.v1p1beta1.ListSourcesResponse)
com.google.cloud.securitycenter.v1p1beta1.ListSourcesResponseOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.securitycenter.v1p1beta1.SecuritycenterService
.internal_static_google_cloud_securitycenter_v1p1beta1_ListSourcesResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.securitycenter.v1p1beta1.SecuritycenterService
.internal_static_google_cloud_securitycenter_v1p1beta1_ListSourcesResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.securitycenter.v1p1beta1.ListSourcesResponse.class,
com.google.cloud.securitycenter.v1p1beta1.ListSourcesResponse.Builder.class);
}
// Construct using com.google.cloud.securitycenter.v1p1beta1.ListSourcesResponse.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
if (sourcesBuilder_ == null) {
sources_ = java.util.Collections.emptyList();
} else {
sources_ = null;
sourcesBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000001);
nextPageToken_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.securitycenter.v1p1beta1.SecuritycenterService
.internal_static_google_cloud_securitycenter_v1p1beta1_ListSourcesResponse_descriptor;
}
@java.lang.Override
public com.google.cloud.securitycenter.v1p1beta1.ListSourcesResponse
getDefaultInstanceForType() {
return com.google.cloud.securitycenter.v1p1beta1.ListSourcesResponse.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.securitycenter.v1p1beta1.ListSourcesResponse build() {
com.google.cloud.securitycenter.v1p1beta1.ListSourcesResponse result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.securitycenter.v1p1beta1.ListSourcesResponse buildPartial() {
com.google.cloud.securitycenter.v1p1beta1.ListSourcesResponse result =
new com.google.cloud.securitycenter.v1p1beta1.ListSourcesResponse(this);
buildPartialRepeatedFields(result);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartialRepeatedFields(
com.google.cloud.securitycenter.v1p1beta1.ListSourcesResponse result) {
if (sourcesBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0)) {
sources_ = java.util.Collections.unmodifiableList(sources_);
bitField0_ = (bitField0_ & ~0x00000001);
}
result.sources_ = sources_;
} else {
result.sources_ = sourcesBuilder_.build();
}
}
private void buildPartial0(
com.google.cloud.securitycenter.v1p1beta1.ListSourcesResponse result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000002) != 0)) {
result.nextPageToken_ = nextPageToken_;
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.securitycenter.v1p1beta1.ListSourcesResponse) {
return mergeFrom((com.google.cloud.securitycenter.v1p1beta1.ListSourcesResponse) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.securitycenter.v1p1beta1.ListSourcesResponse other) {
if (other
== com.google.cloud.securitycenter.v1p1beta1.ListSourcesResponse.getDefaultInstance())
return this;
if (sourcesBuilder_ == null) {
if (!other.sources_.isEmpty()) {
if (sources_.isEmpty()) {
sources_ = other.sources_;
bitField0_ = (bitField0_ & ~0x00000001);
} else {
ensureSourcesIsMutable();
sources_.addAll(other.sources_);
}
onChanged();
}
} else {
if (!other.sources_.isEmpty()) {
if (sourcesBuilder_.isEmpty()) {
sourcesBuilder_.dispose();
sourcesBuilder_ = null;
sources_ = other.sources_;
bitField0_ = (bitField0_ & ~0x00000001);
sourcesBuilder_ =
com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders
? getSourcesFieldBuilder()
: null;
} else {
sourcesBuilder_.addAllMessages(other.sources_);
}
}
}
if (!other.getNextPageToken().isEmpty()) {
nextPageToken_ = other.nextPageToken_;
bitField0_ |= 0x00000002;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
com.google.cloud.securitycenter.v1p1beta1.Source m =
input.readMessage(
com.google.cloud.securitycenter.v1p1beta1.Source.parser(),
extensionRegistry);
if (sourcesBuilder_ == null) {
ensureSourcesIsMutable();
sources_.add(m);
} else {
sourcesBuilder_.addMessage(m);
}
break;
} // case 10
case 18:
{
nextPageToken_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000002;
break;
} // case 18
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.util.List<com.google.cloud.securitycenter.v1p1beta1.Source> sources_ =
java.util.Collections.emptyList();
private void ensureSourcesIsMutable() {
if (!((bitField0_ & 0x00000001) != 0)) {
sources_ =
new java.util.ArrayList<com.google.cloud.securitycenter.v1p1beta1.Source>(sources_);
bitField0_ |= 0x00000001;
}
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.securitycenter.v1p1beta1.Source,
com.google.cloud.securitycenter.v1p1beta1.Source.Builder,
com.google.cloud.securitycenter.v1p1beta1.SourceOrBuilder>
sourcesBuilder_;
/**
*
*
* <pre>
* Sources belonging to the requested parent.
* </pre>
*
* <code>repeated .google.cloud.securitycenter.v1p1beta1.Source sources = 1;</code>
*/
public java.util.List<com.google.cloud.securitycenter.v1p1beta1.Source> getSourcesList() {
if (sourcesBuilder_ == null) {
return java.util.Collections.unmodifiableList(sources_);
} else {
return sourcesBuilder_.getMessageList();
}
}
/**
*
*
* <pre>
* Sources belonging to the requested parent.
* </pre>
*
* <code>repeated .google.cloud.securitycenter.v1p1beta1.Source sources = 1;</code>
*/
public int getSourcesCount() {
if (sourcesBuilder_ == null) {
return sources_.size();
} else {
return sourcesBuilder_.getCount();
}
}
/**
*
*
* <pre>
* Sources belonging to the requested parent.
* </pre>
*
* <code>repeated .google.cloud.securitycenter.v1p1beta1.Source sources = 1;</code>
*/
public com.google.cloud.securitycenter.v1p1beta1.Source getSources(int index) {
if (sourcesBuilder_ == null) {
return sources_.get(index);
} else {
return sourcesBuilder_.getMessage(index);
}
}
/**
*
*
* <pre>
* Sources belonging to the requested parent.
* </pre>
*
* <code>repeated .google.cloud.securitycenter.v1p1beta1.Source sources = 1;</code>
*/
public Builder setSources(int index, com.google.cloud.securitycenter.v1p1beta1.Source value) {
if (sourcesBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureSourcesIsMutable();
sources_.set(index, value);
onChanged();
} else {
sourcesBuilder_.setMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* Sources belonging to the requested parent.
* </pre>
*
* <code>repeated .google.cloud.securitycenter.v1p1beta1.Source sources = 1;</code>
*/
public Builder setSources(
int index, com.google.cloud.securitycenter.v1p1beta1.Source.Builder builderForValue) {
if (sourcesBuilder_ == null) {
ensureSourcesIsMutable();
sources_.set(index, builderForValue.build());
onChanged();
} else {
sourcesBuilder_.setMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* Sources belonging to the requested parent.
* </pre>
*
* <code>repeated .google.cloud.securitycenter.v1p1beta1.Source sources = 1;</code>
*/
public Builder addSources(com.google.cloud.securitycenter.v1p1beta1.Source value) {
if (sourcesBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureSourcesIsMutable();
sources_.add(value);
onChanged();
} else {
sourcesBuilder_.addMessage(value);
}
return this;
}
/**
*
*
* <pre>
* Sources belonging to the requested parent.
* </pre>
*
* <code>repeated .google.cloud.securitycenter.v1p1beta1.Source sources = 1;</code>
*/
public Builder addSources(int index, com.google.cloud.securitycenter.v1p1beta1.Source value) {
if (sourcesBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureSourcesIsMutable();
sources_.add(index, value);
onChanged();
} else {
sourcesBuilder_.addMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* Sources belonging to the requested parent.
* </pre>
*
* <code>repeated .google.cloud.securitycenter.v1p1beta1.Source sources = 1;</code>
*/
public Builder addSources(
com.google.cloud.securitycenter.v1p1beta1.Source.Builder builderForValue) {
if (sourcesBuilder_ == null) {
ensureSourcesIsMutable();
sources_.add(builderForValue.build());
onChanged();
} else {
sourcesBuilder_.addMessage(builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* Sources belonging to the requested parent.
* </pre>
*
* <code>repeated .google.cloud.securitycenter.v1p1beta1.Source sources = 1;</code>
*/
public Builder addSources(
int index, com.google.cloud.securitycenter.v1p1beta1.Source.Builder builderForValue) {
if (sourcesBuilder_ == null) {
ensureSourcesIsMutable();
sources_.add(index, builderForValue.build());
onChanged();
} else {
sourcesBuilder_.addMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* Sources belonging to the requested parent.
* </pre>
*
* <code>repeated .google.cloud.securitycenter.v1p1beta1.Source sources = 1;</code>
*/
public Builder addAllSources(
java.lang.Iterable<? extends com.google.cloud.securitycenter.v1p1beta1.Source> values) {
if (sourcesBuilder_ == null) {
ensureSourcesIsMutable();
com.google.protobuf.AbstractMessageLite.Builder.addAll(values, sources_);
onChanged();
} else {
sourcesBuilder_.addAllMessages(values);
}
return this;
}
/**
*
*
* <pre>
* Sources belonging to the requested parent.
* </pre>
*
* <code>repeated .google.cloud.securitycenter.v1p1beta1.Source sources = 1;</code>
*/
public Builder clearSources() {
if (sourcesBuilder_ == null) {
sources_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
} else {
sourcesBuilder_.clear();
}
return this;
}
/**
*
*
* <pre>
* Sources belonging to the requested parent.
* </pre>
*
* <code>repeated .google.cloud.securitycenter.v1p1beta1.Source sources = 1;</code>
*/
public Builder removeSources(int index) {
if (sourcesBuilder_ == null) {
ensureSourcesIsMutable();
sources_.remove(index);
onChanged();
} else {
sourcesBuilder_.remove(index);
}
return this;
}
/**
*
*
* <pre>
* Sources belonging to the requested parent.
* </pre>
*
* <code>repeated .google.cloud.securitycenter.v1p1beta1.Source sources = 1;</code>
*/
public com.google.cloud.securitycenter.v1p1beta1.Source.Builder getSourcesBuilder(int index) {
return getSourcesFieldBuilder().getBuilder(index);
}
/**
*
*
* <pre>
* Sources belonging to the requested parent.
* </pre>
*
* <code>repeated .google.cloud.securitycenter.v1p1beta1.Source sources = 1;</code>
*/
public com.google.cloud.securitycenter.v1p1beta1.SourceOrBuilder getSourcesOrBuilder(
int index) {
if (sourcesBuilder_ == null) {
return sources_.get(index);
} else {
return sourcesBuilder_.getMessageOrBuilder(index);
}
}
/**
*
*
* <pre>
* Sources belonging to the requested parent.
* </pre>
*
* <code>repeated .google.cloud.securitycenter.v1p1beta1.Source sources = 1;</code>
*/
public java.util.List<? extends com.google.cloud.securitycenter.v1p1beta1.SourceOrBuilder>
getSourcesOrBuilderList() {
if (sourcesBuilder_ != null) {
return sourcesBuilder_.getMessageOrBuilderList();
} else {
return java.util.Collections.unmodifiableList(sources_);
}
}
/**
*
*
* <pre>
* Sources belonging to the requested parent.
* </pre>
*
* <code>repeated .google.cloud.securitycenter.v1p1beta1.Source sources = 1;</code>
*/
public com.google.cloud.securitycenter.v1p1beta1.Source.Builder addSourcesBuilder() {
return getSourcesFieldBuilder()
.addBuilder(com.google.cloud.securitycenter.v1p1beta1.Source.getDefaultInstance());
}
/**
*
*
* <pre>
* Sources belonging to the requested parent.
* </pre>
*
* <code>repeated .google.cloud.securitycenter.v1p1beta1.Source sources = 1;</code>
*/
public com.google.cloud.securitycenter.v1p1beta1.Source.Builder addSourcesBuilder(int index) {
return getSourcesFieldBuilder()
.addBuilder(index, com.google.cloud.securitycenter.v1p1beta1.Source.getDefaultInstance());
}
/**
*
*
* <pre>
* Sources belonging to the requested parent.
* </pre>
*
* <code>repeated .google.cloud.securitycenter.v1p1beta1.Source sources = 1;</code>
*/
public java.util.List<com.google.cloud.securitycenter.v1p1beta1.Source.Builder>
getSourcesBuilderList() {
return getSourcesFieldBuilder().getBuilderList();
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.securitycenter.v1p1beta1.Source,
com.google.cloud.securitycenter.v1p1beta1.Source.Builder,
com.google.cloud.securitycenter.v1p1beta1.SourceOrBuilder>
getSourcesFieldBuilder() {
if (sourcesBuilder_ == null) {
sourcesBuilder_ =
new com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.securitycenter.v1p1beta1.Source,
com.google.cloud.securitycenter.v1p1beta1.Source.Builder,
com.google.cloud.securitycenter.v1p1beta1.SourceOrBuilder>(
sources_, ((bitField0_ & 0x00000001) != 0), getParentForChildren(), isClean());
sources_ = null;
}
return sourcesBuilder_;
}
private java.lang.Object nextPageToken_ = "";
/**
*
*
* <pre>
* Token to retrieve the next page of results, or empty if there are no more
* results.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The nextPageToken.
*/
public java.lang.String getNextPageToken() {
java.lang.Object ref = nextPageToken_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
nextPageToken_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Token to retrieve the next page of results, or empty if there are no more
* results.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The bytes for nextPageToken.
*/
public com.google.protobuf.ByteString getNextPageTokenBytes() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
nextPageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Token to retrieve the next page of results, or empty if there are no more
* results.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @param value The nextPageToken to set.
* @return This builder for chaining.
*/
public Builder setNextPageToken(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
nextPageToken_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Token to retrieve the next page of results, or empty if there are no more
* results.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return This builder for chaining.
*/
public Builder clearNextPageToken() {
nextPageToken_ = getDefaultInstance().getNextPageToken();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
return this;
}
/**
*
*
* <pre>
* Token to retrieve the next page of results, or empty if there are no more
* results.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @param value The bytes for nextPageToken to set.
* @return This builder for chaining.
*/
public Builder setNextPageTokenBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
nextPageToken_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.securitycenter.v1p1beta1.ListSourcesResponse)
}
// @@protoc_insertion_point(class_scope:google.cloud.securitycenter.v1p1beta1.ListSourcesResponse)
private static final com.google.cloud.securitycenter.v1p1beta1.ListSourcesResponse
DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.securitycenter.v1p1beta1.ListSourcesResponse();
}
public static com.google.cloud.securitycenter.v1p1beta1.ListSourcesResponse getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<ListSourcesResponse> PARSER =
new com.google.protobuf.AbstractParser<ListSourcesResponse>() {
@java.lang.Override
public ListSourcesResponse parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<ListSourcesResponse> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<ListSourcesResponse> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.securitycenter.v1p1beta1.ListSourcesResponse getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleapis/google-cloud-java | 36,879 | java-netapp/proto-google-cloud-netapp-v1/src/main/java/com/google/cloud/netapp/v1/UpdateBackupPolicyRequest.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/netapp/v1/backup_policy.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.netapp.v1;
/**
*
*
* <pre>
* UpdateBackupPolicyRequest for updating a backup policy.
* </pre>
*
* Protobuf type {@code google.cloud.netapp.v1.UpdateBackupPolicyRequest}
*/
public final class UpdateBackupPolicyRequest extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.netapp.v1.UpdateBackupPolicyRequest)
UpdateBackupPolicyRequestOrBuilder {
private static final long serialVersionUID = 0L;
// Use UpdateBackupPolicyRequest.newBuilder() to construct.
private UpdateBackupPolicyRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private UpdateBackupPolicyRequest() {}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new UpdateBackupPolicyRequest();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.netapp.v1.BackupPolicyProto
.internal_static_google_cloud_netapp_v1_UpdateBackupPolicyRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.netapp.v1.BackupPolicyProto
.internal_static_google_cloud_netapp_v1_UpdateBackupPolicyRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.netapp.v1.UpdateBackupPolicyRequest.class,
com.google.cloud.netapp.v1.UpdateBackupPolicyRequest.Builder.class);
}
private int bitField0_;
public static final int UPDATE_MASK_FIELD_NUMBER = 1;
private com.google.protobuf.FieldMask updateMask_;
/**
*
*
* <pre>
* Required. Field mask is used to specify the fields to be overwritten in the
* Backup Policy resource by the update.
* The fields specified in the update_mask are relative to the resource, not
* the full request. A field will be overwritten if it is in the mask. If the
* user does not provide a mask then all fields will be overwritten.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the updateMask field is set.
*/
@java.lang.Override
public boolean hasUpdateMask() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* Required. Field mask is used to specify the fields to be overwritten in the
* Backup Policy resource by the update.
* The fields specified in the update_mask are relative to the resource, not
* the full request. A field will be overwritten if it is in the mask. If the
* user does not provide a mask then all fields will be overwritten.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The updateMask.
*/
@java.lang.Override
public com.google.protobuf.FieldMask getUpdateMask() {
return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_;
}
/**
*
*
* <pre>
* Required. Field mask is used to specify the fields to be overwritten in the
* Backup Policy resource by the update.
* The fields specified in the update_mask are relative to the resource, not
* the full request. A field will be overwritten if it is in the mask. If the
* user does not provide a mask then all fields will be overwritten.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
@java.lang.Override
public com.google.protobuf.FieldMaskOrBuilder getUpdateMaskOrBuilder() {
return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_;
}
public static final int BACKUP_POLICY_FIELD_NUMBER = 2;
private com.google.cloud.netapp.v1.BackupPolicy backupPolicy_;
/**
*
*
* <pre>
* Required. The backup policy being updated
* </pre>
*
* <code>
* .google.cloud.netapp.v1.BackupPolicy backup_policy = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the backupPolicy field is set.
*/
@java.lang.Override
public boolean hasBackupPolicy() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
*
*
* <pre>
* Required. The backup policy being updated
* </pre>
*
* <code>
* .google.cloud.netapp.v1.BackupPolicy backup_policy = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The backupPolicy.
*/
@java.lang.Override
public com.google.cloud.netapp.v1.BackupPolicy getBackupPolicy() {
return backupPolicy_ == null
? com.google.cloud.netapp.v1.BackupPolicy.getDefaultInstance()
: backupPolicy_;
}
/**
*
*
* <pre>
* Required. The backup policy being updated
* </pre>
*
* <code>
* .google.cloud.netapp.v1.BackupPolicy backup_policy = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
@java.lang.Override
public com.google.cloud.netapp.v1.BackupPolicyOrBuilder getBackupPolicyOrBuilder() {
return backupPolicy_ == null
? com.google.cloud.netapp.v1.BackupPolicy.getDefaultInstance()
: backupPolicy_;
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (((bitField0_ & 0x00000001) != 0)) {
output.writeMessage(1, getUpdateMask());
}
if (((bitField0_ & 0x00000002) != 0)) {
output.writeMessage(2, getBackupPolicy());
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) != 0)) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, getUpdateMask());
}
if (((bitField0_ & 0x00000002) != 0)) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, getBackupPolicy());
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.netapp.v1.UpdateBackupPolicyRequest)) {
return super.equals(obj);
}
com.google.cloud.netapp.v1.UpdateBackupPolicyRequest other =
(com.google.cloud.netapp.v1.UpdateBackupPolicyRequest) obj;
if (hasUpdateMask() != other.hasUpdateMask()) return false;
if (hasUpdateMask()) {
if (!getUpdateMask().equals(other.getUpdateMask())) return false;
}
if (hasBackupPolicy() != other.hasBackupPolicy()) return false;
if (hasBackupPolicy()) {
if (!getBackupPolicy().equals(other.getBackupPolicy())) return false;
}
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (hasUpdateMask()) {
hash = (37 * hash) + UPDATE_MASK_FIELD_NUMBER;
hash = (53 * hash) + getUpdateMask().hashCode();
}
if (hasBackupPolicy()) {
hash = (37 * hash) + BACKUP_POLICY_FIELD_NUMBER;
hash = (53 * hash) + getBackupPolicy().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.netapp.v1.UpdateBackupPolicyRequest parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.netapp.v1.UpdateBackupPolicyRequest parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.netapp.v1.UpdateBackupPolicyRequest parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.netapp.v1.UpdateBackupPolicyRequest parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.netapp.v1.UpdateBackupPolicyRequest parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.netapp.v1.UpdateBackupPolicyRequest parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.netapp.v1.UpdateBackupPolicyRequest parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.netapp.v1.UpdateBackupPolicyRequest parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.netapp.v1.UpdateBackupPolicyRequest parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.netapp.v1.UpdateBackupPolicyRequest parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.netapp.v1.UpdateBackupPolicyRequest parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.netapp.v1.UpdateBackupPolicyRequest parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.cloud.netapp.v1.UpdateBackupPolicyRequest prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* UpdateBackupPolicyRequest for updating a backup policy.
* </pre>
*
* Protobuf type {@code google.cloud.netapp.v1.UpdateBackupPolicyRequest}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.netapp.v1.UpdateBackupPolicyRequest)
com.google.cloud.netapp.v1.UpdateBackupPolicyRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.netapp.v1.BackupPolicyProto
.internal_static_google_cloud_netapp_v1_UpdateBackupPolicyRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.netapp.v1.BackupPolicyProto
.internal_static_google_cloud_netapp_v1_UpdateBackupPolicyRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.netapp.v1.UpdateBackupPolicyRequest.class,
com.google.cloud.netapp.v1.UpdateBackupPolicyRequest.Builder.class);
}
// Construct using com.google.cloud.netapp.v1.UpdateBackupPolicyRequest.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {
getUpdateMaskFieldBuilder();
getBackupPolicyFieldBuilder();
}
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
updateMask_ = null;
if (updateMaskBuilder_ != null) {
updateMaskBuilder_.dispose();
updateMaskBuilder_ = null;
}
backupPolicy_ = null;
if (backupPolicyBuilder_ != null) {
backupPolicyBuilder_.dispose();
backupPolicyBuilder_ = null;
}
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.netapp.v1.BackupPolicyProto
.internal_static_google_cloud_netapp_v1_UpdateBackupPolicyRequest_descriptor;
}
@java.lang.Override
public com.google.cloud.netapp.v1.UpdateBackupPolicyRequest getDefaultInstanceForType() {
return com.google.cloud.netapp.v1.UpdateBackupPolicyRequest.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.netapp.v1.UpdateBackupPolicyRequest build() {
com.google.cloud.netapp.v1.UpdateBackupPolicyRequest result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.netapp.v1.UpdateBackupPolicyRequest buildPartial() {
com.google.cloud.netapp.v1.UpdateBackupPolicyRequest result =
new com.google.cloud.netapp.v1.UpdateBackupPolicyRequest(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(com.google.cloud.netapp.v1.UpdateBackupPolicyRequest result) {
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.updateMask_ = updateMaskBuilder_ == null ? updateMask_ : updateMaskBuilder_.build();
to_bitField0_ |= 0x00000001;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.backupPolicy_ =
backupPolicyBuilder_ == null ? backupPolicy_ : backupPolicyBuilder_.build();
to_bitField0_ |= 0x00000002;
}
result.bitField0_ |= to_bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.netapp.v1.UpdateBackupPolicyRequest) {
return mergeFrom((com.google.cloud.netapp.v1.UpdateBackupPolicyRequest) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.netapp.v1.UpdateBackupPolicyRequest other) {
if (other == com.google.cloud.netapp.v1.UpdateBackupPolicyRequest.getDefaultInstance())
return this;
if (other.hasUpdateMask()) {
mergeUpdateMask(other.getUpdateMask());
}
if (other.hasBackupPolicy()) {
mergeBackupPolicy(other.getBackupPolicy());
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
input.readMessage(getUpdateMaskFieldBuilder().getBuilder(), extensionRegistry);
bitField0_ |= 0x00000001;
break;
} // case 10
case 18:
{
input.readMessage(getBackupPolicyFieldBuilder().getBuilder(), extensionRegistry);
bitField0_ |= 0x00000002;
break;
} // case 18
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private com.google.protobuf.FieldMask updateMask_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.FieldMask,
com.google.protobuf.FieldMask.Builder,
com.google.protobuf.FieldMaskOrBuilder>
updateMaskBuilder_;
/**
*
*
* <pre>
* Required. Field mask is used to specify the fields to be overwritten in the
* Backup Policy resource by the update.
* The fields specified in the update_mask are relative to the resource, not
* the full request. A field will be overwritten if it is in the mask. If the
* user does not provide a mask then all fields will be overwritten.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the updateMask field is set.
*/
public boolean hasUpdateMask() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* Required. Field mask is used to specify the fields to be overwritten in the
* Backup Policy resource by the update.
* The fields specified in the update_mask are relative to the resource, not
* the full request. A field will be overwritten if it is in the mask. If the
* user does not provide a mask then all fields will be overwritten.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The updateMask.
*/
public com.google.protobuf.FieldMask getUpdateMask() {
if (updateMaskBuilder_ == null) {
return updateMask_ == null
? com.google.protobuf.FieldMask.getDefaultInstance()
: updateMask_;
} else {
return updateMaskBuilder_.getMessage();
}
}
/**
*
*
* <pre>
* Required. Field mask is used to specify the fields to be overwritten in the
* Backup Policy resource by the update.
* The fields specified in the update_mask are relative to the resource, not
* the full request. A field will be overwritten if it is in the mask. If the
* user does not provide a mask then all fields will be overwritten.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setUpdateMask(com.google.protobuf.FieldMask value) {
if (updateMaskBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
updateMask_ = value;
} else {
updateMaskBuilder_.setMessage(value);
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. Field mask is used to specify the fields to be overwritten in the
* Backup Policy resource by the update.
* The fields specified in the update_mask are relative to the resource, not
* the full request. A field will be overwritten if it is in the mask. If the
* user does not provide a mask then all fields will be overwritten.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setUpdateMask(com.google.protobuf.FieldMask.Builder builderForValue) {
if (updateMaskBuilder_ == null) {
updateMask_ = builderForValue.build();
} else {
updateMaskBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. Field mask is used to specify the fields to be overwritten in the
* Backup Policy resource by the update.
* The fields specified in the update_mask are relative to the resource, not
* the full request. A field will be overwritten if it is in the mask. If the
* user does not provide a mask then all fields will be overwritten.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder mergeUpdateMask(com.google.protobuf.FieldMask value) {
if (updateMaskBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0)
&& updateMask_ != null
&& updateMask_ != com.google.protobuf.FieldMask.getDefaultInstance()) {
getUpdateMaskBuilder().mergeFrom(value);
} else {
updateMask_ = value;
}
} else {
updateMaskBuilder_.mergeFrom(value);
}
if (updateMask_ != null) {
bitField0_ |= 0x00000001;
onChanged();
}
return this;
}
/**
*
*
* <pre>
* Required. Field mask is used to specify the fields to be overwritten in the
* Backup Policy resource by the update.
* The fields specified in the update_mask are relative to the resource, not
* the full request. A field will be overwritten if it is in the mask. If the
* user does not provide a mask then all fields will be overwritten.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder clearUpdateMask() {
bitField0_ = (bitField0_ & ~0x00000001);
updateMask_ = null;
if (updateMaskBuilder_ != null) {
updateMaskBuilder_.dispose();
updateMaskBuilder_ = null;
}
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. Field mask is used to specify the fields to be overwritten in the
* Backup Policy resource by the update.
* The fields specified in the update_mask are relative to the resource, not
* the full request. A field will be overwritten if it is in the mask. If the
* user does not provide a mask then all fields will be overwritten.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.protobuf.FieldMask.Builder getUpdateMaskBuilder() {
bitField0_ |= 0x00000001;
onChanged();
return getUpdateMaskFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* Required. Field mask is used to specify the fields to be overwritten in the
* Backup Policy resource by the update.
* The fields specified in the update_mask are relative to the resource, not
* the full request. A field will be overwritten if it is in the mask. If the
* user does not provide a mask then all fields will be overwritten.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.protobuf.FieldMaskOrBuilder getUpdateMaskOrBuilder() {
if (updateMaskBuilder_ != null) {
return updateMaskBuilder_.getMessageOrBuilder();
} else {
return updateMask_ == null
? com.google.protobuf.FieldMask.getDefaultInstance()
: updateMask_;
}
}
/**
*
*
* <pre>
* Required. Field mask is used to specify the fields to be overwritten in the
* Backup Policy resource by the update.
* The fields specified in the update_mask are relative to the resource, not
* the full request. A field will be overwritten if it is in the mask. If the
* user does not provide a mask then all fields will be overwritten.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.FieldMask,
com.google.protobuf.FieldMask.Builder,
com.google.protobuf.FieldMaskOrBuilder>
getUpdateMaskFieldBuilder() {
if (updateMaskBuilder_ == null) {
updateMaskBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.FieldMask,
com.google.protobuf.FieldMask.Builder,
com.google.protobuf.FieldMaskOrBuilder>(
getUpdateMask(), getParentForChildren(), isClean());
updateMask_ = null;
}
return updateMaskBuilder_;
}
private com.google.cloud.netapp.v1.BackupPolicy backupPolicy_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.netapp.v1.BackupPolicy,
com.google.cloud.netapp.v1.BackupPolicy.Builder,
com.google.cloud.netapp.v1.BackupPolicyOrBuilder>
backupPolicyBuilder_;
/**
*
*
* <pre>
* Required. The backup policy being updated
* </pre>
*
* <code>
* .google.cloud.netapp.v1.BackupPolicy backup_policy = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the backupPolicy field is set.
*/
public boolean hasBackupPolicy() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
*
*
* <pre>
* Required. The backup policy being updated
* </pre>
*
* <code>
* .google.cloud.netapp.v1.BackupPolicy backup_policy = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The backupPolicy.
*/
public com.google.cloud.netapp.v1.BackupPolicy getBackupPolicy() {
if (backupPolicyBuilder_ == null) {
return backupPolicy_ == null
? com.google.cloud.netapp.v1.BackupPolicy.getDefaultInstance()
: backupPolicy_;
} else {
return backupPolicyBuilder_.getMessage();
}
}
/**
*
*
* <pre>
* Required. The backup policy being updated
* </pre>
*
* <code>
* .google.cloud.netapp.v1.BackupPolicy backup_policy = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setBackupPolicy(com.google.cloud.netapp.v1.BackupPolicy value) {
if (backupPolicyBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
backupPolicy_ = value;
} else {
backupPolicyBuilder_.setMessage(value);
}
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The backup policy being updated
* </pre>
*
* <code>
* .google.cloud.netapp.v1.BackupPolicy backup_policy = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setBackupPolicy(
com.google.cloud.netapp.v1.BackupPolicy.Builder builderForValue) {
if (backupPolicyBuilder_ == null) {
backupPolicy_ = builderForValue.build();
} else {
backupPolicyBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The backup policy being updated
* </pre>
*
* <code>
* .google.cloud.netapp.v1.BackupPolicy backup_policy = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder mergeBackupPolicy(com.google.cloud.netapp.v1.BackupPolicy value) {
if (backupPolicyBuilder_ == null) {
if (((bitField0_ & 0x00000002) != 0)
&& backupPolicy_ != null
&& backupPolicy_ != com.google.cloud.netapp.v1.BackupPolicy.getDefaultInstance()) {
getBackupPolicyBuilder().mergeFrom(value);
} else {
backupPolicy_ = value;
}
} else {
backupPolicyBuilder_.mergeFrom(value);
}
if (backupPolicy_ != null) {
bitField0_ |= 0x00000002;
onChanged();
}
return this;
}
/**
*
*
* <pre>
* Required. The backup policy being updated
* </pre>
*
* <code>
* .google.cloud.netapp.v1.BackupPolicy backup_policy = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder clearBackupPolicy() {
bitField0_ = (bitField0_ & ~0x00000002);
backupPolicy_ = null;
if (backupPolicyBuilder_ != null) {
backupPolicyBuilder_.dispose();
backupPolicyBuilder_ = null;
}
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The backup policy being updated
* </pre>
*
* <code>
* .google.cloud.netapp.v1.BackupPolicy backup_policy = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.cloud.netapp.v1.BackupPolicy.Builder getBackupPolicyBuilder() {
bitField0_ |= 0x00000002;
onChanged();
return getBackupPolicyFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* Required. The backup policy being updated
* </pre>
*
* <code>
* .google.cloud.netapp.v1.BackupPolicy backup_policy = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.cloud.netapp.v1.BackupPolicyOrBuilder getBackupPolicyOrBuilder() {
if (backupPolicyBuilder_ != null) {
return backupPolicyBuilder_.getMessageOrBuilder();
} else {
return backupPolicy_ == null
? com.google.cloud.netapp.v1.BackupPolicy.getDefaultInstance()
: backupPolicy_;
}
}
/**
*
*
* <pre>
* Required. The backup policy being updated
* </pre>
*
* <code>
* .google.cloud.netapp.v1.BackupPolicy backup_policy = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.netapp.v1.BackupPolicy,
com.google.cloud.netapp.v1.BackupPolicy.Builder,
com.google.cloud.netapp.v1.BackupPolicyOrBuilder>
getBackupPolicyFieldBuilder() {
if (backupPolicyBuilder_ == null) {
backupPolicyBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.netapp.v1.BackupPolicy,
com.google.cloud.netapp.v1.BackupPolicy.Builder,
com.google.cloud.netapp.v1.BackupPolicyOrBuilder>(
getBackupPolicy(), getParentForChildren(), isClean());
backupPolicy_ = null;
}
return backupPolicyBuilder_;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.netapp.v1.UpdateBackupPolicyRequest)
}
// @@protoc_insertion_point(class_scope:google.cloud.netapp.v1.UpdateBackupPolicyRequest)
private static final com.google.cloud.netapp.v1.UpdateBackupPolicyRequest DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.netapp.v1.UpdateBackupPolicyRequest();
}
public static com.google.cloud.netapp.v1.UpdateBackupPolicyRequest getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<UpdateBackupPolicyRequest> PARSER =
new com.google.protobuf.AbstractParser<UpdateBackupPolicyRequest>() {
@java.lang.Override
public UpdateBackupPolicyRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<UpdateBackupPolicyRequest> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<UpdateBackupPolicyRequest> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.netapp.v1.UpdateBackupPolicyRequest getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleapis/google-cloud-java | 36,783 | java-datalineage/proto-google-cloud-datalineage-v1/src/main/java/com/google/cloud/datacatalog/lineage/v1/UpdateProcessRequest.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/datacatalog/lineage/v1/lineage.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.datacatalog.lineage.v1;
/**
*
*
* <pre>
* Request message for
* [UpdateProcess][google.cloud.datacatalog.lineage.v1.UpdateProcess].
* </pre>
*
* Protobuf type {@code google.cloud.datacatalog.lineage.v1.UpdateProcessRequest}
*/
public final class UpdateProcessRequest extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.datacatalog.lineage.v1.UpdateProcessRequest)
UpdateProcessRequestOrBuilder {
private static final long serialVersionUID = 0L;
// Use UpdateProcessRequest.newBuilder() to construct.
private UpdateProcessRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private UpdateProcessRequest() {}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new UpdateProcessRequest();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.datacatalog.lineage.v1.LineageProto
.internal_static_google_cloud_datacatalog_lineage_v1_UpdateProcessRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.datacatalog.lineage.v1.LineageProto
.internal_static_google_cloud_datacatalog_lineage_v1_UpdateProcessRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.datacatalog.lineage.v1.UpdateProcessRequest.class,
com.google.cloud.datacatalog.lineage.v1.UpdateProcessRequest.Builder.class);
}
private int bitField0_;
public static final int PROCESS_FIELD_NUMBER = 1;
private com.google.cloud.datacatalog.lineage.v1.Process process_;
/**
*
*
* <pre>
* Required. The lineage process to update.
*
* The process's `name` field is used to identify the process to update.
* </pre>
*
* <code>
* .google.cloud.datacatalog.lineage.v1.Process process = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the process field is set.
*/
@java.lang.Override
public boolean hasProcess() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* Required. The lineage process to update.
*
* The process's `name` field is used to identify the process to update.
* </pre>
*
* <code>
* .google.cloud.datacatalog.lineage.v1.Process process = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The process.
*/
@java.lang.Override
public com.google.cloud.datacatalog.lineage.v1.Process getProcess() {
return process_ == null
? com.google.cloud.datacatalog.lineage.v1.Process.getDefaultInstance()
: process_;
}
/**
*
*
* <pre>
* Required. The lineage process to update.
*
* The process's `name` field is used to identify the process to update.
* </pre>
*
* <code>
* .google.cloud.datacatalog.lineage.v1.Process process = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
@java.lang.Override
public com.google.cloud.datacatalog.lineage.v1.ProcessOrBuilder getProcessOrBuilder() {
return process_ == null
? com.google.cloud.datacatalog.lineage.v1.Process.getDefaultInstance()
: process_;
}
public static final int UPDATE_MASK_FIELD_NUMBER = 2;
private com.google.protobuf.FieldMask updateMask_;
/**
*
*
* <pre>
* The list of fields to update. Currently not used. The whole message is
* updated.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2;</code>
*
* @return Whether the updateMask field is set.
*/
@java.lang.Override
public boolean hasUpdateMask() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
*
*
* <pre>
* The list of fields to update. Currently not used. The whole message is
* updated.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2;</code>
*
* @return The updateMask.
*/
@java.lang.Override
public com.google.protobuf.FieldMask getUpdateMask() {
return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_;
}
/**
*
*
* <pre>
* The list of fields to update. Currently not used. The whole message is
* updated.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2;</code>
*/
@java.lang.Override
public com.google.protobuf.FieldMaskOrBuilder getUpdateMaskOrBuilder() {
return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_;
}
public static final int ALLOW_MISSING_FIELD_NUMBER = 3;
private boolean allowMissing_ = false;
/**
*
*
* <pre>
* If set to true and the process is not found, the request inserts it.
* </pre>
*
* <code>bool allow_missing = 3;</code>
*
* @return The allowMissing.
*/
@java.lang.Override
public boolean getAllowMissing() {
return allowMissing_;
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (((bitField0_ & 0x00000001) != 0)) {
output.writeMessage(1, getProcess());
}
if (((bitField0_ & 0x00000002) != 0)) {
output.writeMessage(2, getUpdateMask());
}
if (allowMissing_ != false) {
output.writeBool(3, allowMissing_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) != 0)) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, getProcess());
}
if (((bitField0_ & 0x00000002) != 0)) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, getUpdateMask());
}
if (allowMissing_ != false) {
size += com.google.protobuf.CodedOutputStream.computeBoolSize(3, allowMissing_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.datacatalog.lineage.v1.UpdateProcessRequest)) {
return super.equals(obj);
}
com.google.cloud.datacatalog.lineage.v1.UpdateProcessRequest other =
(com.google.cloud.datacatalog.lineage.v1.UpdateProcessRequest) obj;
if (hasProcess() != other.hasProcess()) return false;
if (hasProcess()) {
if (!getProcess().equals(other.getProcess())) return false;
}
if (hasUpdateMask() != other.hasUpdateMask()) return false;
if (hasUpdateMask()) {
if (!getUpdateMask().equals(other.getUpdateMask())) return false;
}
if (getAllowMissing() != other.getAllowMissing()) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (hasProcess()) {
hash = (37 * hash) + PROCESS_FIELD_NUMBER;
hash = (53 * hash) + getProcess().hashCode();
}
if (hasUpdateMask()) {
hash = (37 * hash) + UPDATE_MASK_FIELD_NUMBER;
hash = (53 * hash) + getUpdateMask().hashCode();
}
hash = (37 * hash) + ALLOW_MISSING_FIELD_NUMBER;
hash = (53 * hash) + com.google.protobuf.Internal.hashBoolean(getAllowMissing());
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.datacatalog.lineage.v1.UpdateProcessRequest parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.datacatalog.lineage.v1.UpdateProcessRequest parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.datacatalog.lineage.v1.UpdateProcessRequest parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.datacatalog.lineage.v1.UpdateProcessRequest parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.datacatalog.lineage.v1.UpdateProcessRequest parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.datacatalog.lineage.v1.UpdateProcessRequest parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.datacatalog.lineage.v1.UpdateProcessRequest parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.datacatalog.lineage.v1.UpdateProcessRequest parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.datacatalog.lineage.v1.UpdateProcessRequest parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.datacatalog.lineage.v1.UpdateProcessRequest parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.datacatalog.lineage.v1.UpdateProcessRequest parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.datacatalog.lineage.v1.UpdateProcessRequest parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.datacatalog.lineage.v1.UpdateProcessRequest prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Request message for
* [UpdateProcess][google.cloud.datacatalog.lineage.v1.UpdateProcess].
* </pre>
*
* Protobuf type {@code google.cloud.datacatalog.lineage.v1.UpdateProcessRequest}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.datacatalog.lineage.v1.UpdateProcessRequest)
com.google.cloud.datacatalog.lineage.v1.UpdateProcessRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.datacatalog.lineage.v1.LineageProto
.internal_static_google_cloud_datacatalog_lineage_v1_UpdateProcessRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.datacatalog.lineage.v1.LineageProto
.internal_static_google_cloud_datacatalog_lineage_v1_UpdateProcessRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.datacatalog.lineage.v1.UpdateProcessRequest.class,
com.google.cloud.datacatalog.lineage.v1.UpdateProcessRequest.Builder.class);
}
// Construct using com.google.cloud.datacatalog.lineage.v1.UpdateProcessRequest.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {
getProcessFieldBuilder();
getUpdateMaskFieldBuilder();
}
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
process_ = null;
if (processBuilder_ != null) {
processBuilder_.dispose();
processBuilder_ = null;
}
updateMask_ = null;
if (updateMaskBuilder_ != null) {
updateMaskBuilder_.dispose();
updateMaskBuilder_ = null;
}
allowMissing_ = false;
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.datacatalog.lineage.v1.LineageProto
.internal_static_google_cloud_datacatalog_lineage_v1_UpdateProcessRequest_descriptor;
}
@java.lang.Override
public com.google.cloud.datacatalog.lineage.v1.UpdateProcessRequest
getDefaultInstanceForType() {
return com.google.cloud.datacatalog.lineage.v1.UpdateProcessRequest.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.datacatalog.lineage.v1.UpdateProcessRequest build() {
com.google.cloud.datacatalog.lineage.v1.UpdateProcessRequest result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.datacatalog.lineage.v1.UpdateProcessRequest buildPartial() {
com.google.cloud.datacatalog.lineage.v1.UpdateProcessRequest result =
new com.google.cloud.datacatalog.lineage.v1.UpdateProcessRequest(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(
com.google.cloud.datacatalog.lineage.v1.UpdateProcessRequest result) {
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.process_ = processBuilder_ == null ? process_ : processBuilder_.build();
to_bitField0_ |= 0x00000001;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.updateMask_ = updateMaskBuilder_ == null ? updateMask_ : updateMaskBuilder_.build();
to_bitField0_ |= 0x00000002;
}
if (((from_bitField0_ & 0x00000004) != 0)) {
result.allowMissing_ = allowMissing_;
}
result.bitField0_ |= to_bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.datacatalog.lineage.v1.UpdateProcessRequest) {
return mergeFrom((com.google.cloud.datacatalog.lineage.v1.UpdateProcessRequest) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.datacatalog.lineage.v1.UpdateProcessRequest other) {
if (other
== com.google.cloud.datacatalog.lineage.v1.UpdateProcessRequest.getDefaultInstance())
return this;
if (other.hasProcess()) {
mergeProcess(other.getProcess());
}
if (other.hasUpdateMask()) {
mergeUpdateMask(other.getUpdateMask());
}
if (other.getAllowMissing() != false) {
setAllowMissing(other.getAllowMissing());
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
input.readMessage(getProcessFieldBuilder().getBuilder(), extensionRegistry);
bitField0_ |= 0x00000001;
break;
} // case 10
case 18:
{
input.readMessage(getUpdateMaskFieldBuilder().getBuilder(), extensionRegistry);
bitField0_ |= 0x00000002;
break;
} // case 18
case 24:
{
allowMissing_ = input.readBool();
bitField0_ |= 0x00000004;
break;
} // case 24
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private com.google.cloud.datacatalog.lineage.v1.Process process_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.datacatalog.lineage.v1.Process,
com.google.cloud.datacatalog.lineage.v1.Process.Builder,
com.google.cloud.datacatalog.lineage.v1.ProcessOrBuilder>
processBuilder_;
/**
*
*
* <pre>
* Required. The lineage process to update.
*
* The process's `name` field is used to identify the process to update.
* </pre>
*
* <code>
* .google.cloud.datacatalog.lineage.v1.Process process = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the process field is set.
*/
public boolean hasProcess() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* Required. The lineage process to update.
*
* The process's `name` field is used to identify the process to update.
* </pre>
*
* <code>
* .google.cloud.datacatalog.lineage.v1.Process process = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The process.
*/
public com.google.cloud.datacatalog.lineage.v1.Process getProcess() {
if (processBuilder_ == null) {
return process_ == null
? com.google.cloud.datacatalog.lineage.v1.Process.getDefaultInstance()
: process_;
} else {
return processBuilder_.getMessage();
}
}
/**
*
*
* <pre>
* Required. The lineage process to update.
*
* The process's `name` field is used to identify the process to update.
* </pre>
*
* <code>
* .google.cloud.datacatalog.lineage.v1.Process process = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setProcess(com.google.cloud.datacatalog.lineage.v1.Process value) {
if (processBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
process_ = value;
} else {
processBuilder_.setMessage(value);
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The lineage process to update.
*
* The process's `name` field is used to identify the process to update.
* </pre>
*
* <code>
* .google.cloud.datacatalog.lineage.v1.Process process = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setProcess(
com.google.cloud.datacatalog.lineage.v1.Process.Builder builderForValue) {
if (processBuilder_ == null) {
process_ = builderForValue.build();
} else {
processBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The lineage process to update.
*
* The process's `name` field is used to identify the process to update.
* </pre>
*
* <code>
* .google.cloud.datacatalog.lineage.v1.Process process = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder mergeProcess(com.google.cloud.datacatalog.lineage.v1.Process value) {
if (processBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0)
&& process_ != null
&& process_ != com.google.cloud.datacatalog.lineage.v1.Process.getDefaultInstance()) {
getProcessBuilder().mergeFrom(value);
} else {
process_ = value;
}
} else {
processBuilder_.mergeFrom(value);
}
if (process_ != null) {
bitField0_ |= 0x00000001;
onChanged();
}
return this;
}
/**
*
*
* <pre>
* Required. The lineage process to update.
*
* The process's `name` field is used to identify the process to update.
* </pre>
*
* <code>
* .google.cloud.datacatalog.lineage.v1.Process process = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder clearProcess() {
bitField0_ = (bitField0_ & ~0x00000001);
process_ = null;
if (processBuilder_ != null) {
processBuilder_.dispose();
processBuilder_ = null;
}
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The lineage process to update.
*
* The process's `name` field is used to identify the process to update.
* </pre>
*
* <code>
* .google.cloud.datacatalog.lineage.v1.Process process = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.cloud.datacatalog.lineage.v1.Process.Builder getProcessBuilder() {
bitField0_ |= 0x00000001;
onChanged();
return getProcessFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* Required. The lineage process to update.
*
* The process's `name` field is used to identify the process to update.
* </pre>
*
* <code>
* .google.cloud.datacatalog.lineage.v1.Process process = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.cloud.datacatalog.lineage.v1.ProcessOrBuilder getProcessOrBuilder() {
if (processBuilder_ != null) {
return processBuilder_.getMessageOrBuilder();
} else {
return process_ == null
? com.google.cloud.datacatalog.lineage.v1.Process.getDefaultInstance()
: process_;
}
}
/**
*
*
* <pre>
* Required. The lineage process to update.
*
* The process's `name` field is used to identify the process to update.
* </pre>
*
* <code>
* .google.cloud.datacatalog.lineage.v1.Process process = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.datacatalog.lineage.v1.Process,
com.google.cloud.datacatalog.lineage.v1.Process.Builder,
com.google.cloud.datacatalog.lineage.v1.ProcessOrBuilder>
getProcessFieldBuilder() {
if (processBuilder_ == null) {
processBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.datacatalog.lineage.v1.Process,
com.google.cloud.datacatalog.lineage.v1.Process.Builder,
com.google.cloud.datacatalog.lineage.v1.ProcessOrBuilder>(
getProcess(), getParentForChildren(), isClean());
process_ = null;
}
return processBuilder_;
}
private com.google.protobuf.FieldMask updateMask_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.FieldMask,
com.google.protobuf.FieldMask.Builder,
com.google.protobuf.FieldMaskOrBuilder>
updateMaskBuilder_;
/**
*
*
* <pre>
* The list of fields to update. Currently not used. The whole message is
* updated.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2;</code>
*
* @return Whether the updateMask field is set.
*/
public boolean hasUpdateMask() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
*
*
* <pre>
* The list of fields to update. Currently not used. The whole message is
* updated.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2;</code>
*
* @return The updateMask.
*/
public com.google.protobuf.FieldMask getUpdateMask() {
if (updateMaskBuilder_ == null) {
return updateMask_ == null
? com.google.protobuf.FieldMask.getDefaultInstance()
: updateMask_;
} else {
return updateMaskBuilder_.getMessage();
}
}
/**
*
*
* <pre>
* The list of fields to update. Currently not used. The whole message is
* updated.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2;</code>
*/
public Builder setUpdateMask(com.google.protobuf.FieldMask value) {
if (updateMaskBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
updateMask_ = value;
} else {
updateMaskBuilder_.setMessage(value);
}
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* The list of fields to update. Currently not used. The whole message is
* updated.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2;</code>
*/
public Builder setUpdateMask(com.google.protobuf.FieldMask.Builder builderForValue) {
if (updateMaskBuilder_ == null) {
updateMask_ = builderForValue.build();
} else {
updateMaskBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* The list of fields to update. Currently not used. The whole message is
* updated.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2;</code>
*/
public Builder mergeUpdateMask(com.google.protobuf.FieldMask value) {
if (updateMaskBuilder_ == null) {
if (((bitField0_ & 0x00000002) != 0)
&& updateMask_ != null
&& updateMask_ != com.google.protobuf.FieldMask.getDefaultInstance()) {
getUpdateMaskBuilder().mergeFrom(value);
} else {
updateMask_ = value;
}
} else {
updateMaskBuilder_.mergeFrom(value);
}
if (updateMask_ != null) {
bitField0_ |= 0x00000002;
onChanged();
}
return this;
}
/**
*
*
* <pre>
* The list of fields to update. Currently not used. The whole message is
* updated.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2;</code>
*/
public Builder clearUpdateMask() {
bitField0_ = (bitField0_ & ~0x00000002);
updateMask_ = null;
if (updateMaskBuilder_ != null) {
updateMaskBuilder_.dispose();
updateMaskBuilder_ = null;
}
onChanged();
return this;
}
/**
*
*
* <pre>
* The list of fields to update. Currently not used. The whole message is
* updated.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2;</code>
*/
public com.google.protobuf.FieldMask.Builder getUpdateMaskBuilder() {
bitField0_ |= 0x00000002;
onChanged();
return getUpdateMaskFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* The list of fields to update. Currently not used. The whole message is
* updated.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2;</code>
*/
public com.google.protobuf.FieldMaskOrBuilder getUpdateMaskOrBuilder() {
if (updateMaskBuilder_ != null) {
return updateMaskBuilder_.getMessageOrBuilder();
} else {
return updateMask_ == null
? com.google.protobuf.FieldMask.getDefaultInstance()
: updateMask_;
}
}
/**
*
*
* <pre>
* The list of fields to update. Currently not used. The whole message is
* updated.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2;</code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.FieldMask,
com.google.protobuf.FieldMask.Builder,
com.google.protobuf.FieldMaskOrBuilder>
getUpdateMaskFieldBuilder() {
if (updateMaskBuilder_ == null) {
updateMaskBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.FieldMask,
com.google.protobuf.FieldMask.Builder,
com.google.protobuf.FieldMaskOrBuilder>(
getUpdateMask(), getParentForChildren(), isClean());
updateMask_ = null;
}
return updateMaskBuilder_;
}
private boolean allowMissing_;
/**
*
*
* <pre>
* If set to true and the process is not found, the request inserts it.
* </pre>
*
* <code>bool allow_missing = 3;</code>
*
* @return The allowMissing.
*/
@java.lang.Override
public boolean getAllowMissing() {
return allowMissing_;
}
/**
*
*
* <pre>
* If set to true and the process is not found, the request inserts it.
* </pre>
*
* <code>bool allow_missing = 3;</code>
*
* @param value The allowMissing to set.
* @return This builder for chaining.
*/
public Builder setAllowMissing(boolean value) {
allowMissing_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
*
*
* <pre>
* If set to true and the process is not found, the request inserts it.
* </pre>
*
* <code>bool allow_missing = 3;</code>
*
* @return This builder for chaining.
*/
public Builder clearAllowMissing() {
bitField0_ = (bitField0_ & ~0x00000004);
allowMissing_ = false;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.datacatalog.lineage.v1.UpdateProcessRequest)
}
// @@protoc_insertion_point(class_scope:google.cloud.datacatalog.lineage.v1.UpdateProcessRequest)
private static final com.google.cloud.datacatalog.lineage.v1.UpdateProcessRequest
DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.datacatalog.lineage.v1.UpdateProcessRequest();
}
public static com.google.cloud.datacatalog.lineage.v1.UpdateProcessRequest getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<UpdateProcessRequest> PARSER =
new com.google.protobuf.AbstractParser<UpdateProcessRequest>() {
@java.lang.Override
public UpdateProcessRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<UpdateProcessRequest> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<UpdateProcessRequest> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.datacatalog.lineage.v1.UpdateProcessRequest getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
oracle/fastr | 35,982 | com.oracle.truffle.r.test/src/com/oracle/truffle/r/test/library/base/TestSimpleArithmetic.java | /*
* This program is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License as published by the Free
* Software Foundation; either version 2 of the License, or (at your option)
* any later version.
*
* This program is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
* more details.
*
* You should have received a copy of the GNU General Public License along with
* this program; if not, write to the Free Software Foundation, Inc., 51
* Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
*
* Copyright (c) 2012-2014, Purdue University
* Copyright (c) 2013, 2021, Oracle and/or its affiliates
*
* All rights reserved.
*/
package com.oracle.truffle.r.test.library.base;
import org.junit.Test;
import com.oracle.truffle.r.test.ArithmeticIncludeList;
import com.oracle.truffle.r.test.TestBase;
public class TestSimpleArithmetic extends TestBase {
@Test
public void testScalarsReal() {
assertEval("{ 1L+1 }");
assertEval("{ 1L+1L }");
assertEval("{ ( 1+1)*(3+2) }");
assertEval("{ 1+TRUE }");
assertEval("{ 1L+TRUE }");
assertEval("{ 1+FALSE<=0 }");
assertEval("{ 1L+FALSE<=0 }");
assertEval("{ TRUE+TRUE+TRUE*TRUE+FALSE+4 }");
assertEval("{ 1L*NA }");
assertEval("{ 1+NA }");
assertEval("{ 2L^10L }");
assertEval("{ 0x10 + 0x10L + 1.28 }");
assertEval("{ 1000000000*100000000000 }");
assertEval("{ 1000000000L*1000000000 }");
assertEval("{ for(i in 1:2) 1000000000L*1000000000L }");
}
@Test
public void testIntegerDivision() {
assertEval("{ 3 %/% 2 }");
assertEval("{ 3L %/% 2L }");
assertEval("{ 3L %/% -2L }");
assertEval("{ 3 %/% -2 }");
assertEval("{ 3 %/% 0 }");
}
@Test
public void testModulo() {
assertEval("{ 3 %% 2 }");
assertEval("{ 3L %% 2L }");
assertEval("{ 3L %% -2L }");
assertEval("{ 3 %% -2 }");
assertEval("{ 3 %% 0 }");
}
@Test
public void testExponentiation() {
assertEval("{ 1^(1/0) }");
assertEval("{ (-2)^(1/0) }");
assertEval("{ (-2)^(-1/0) }");
assertEval("{ (1)^(-1/0) }");
assertEval("{ 0^(-1/0) }");
assertEval("{ 0^(1/0) }");
assertEval("{ 0^(0/0) }");
assertEval("{ 1^(0/0) }");
assertEval("{ (-1)^(0/0) }");
assertEval("{ (-1/0)^(0/0) }");
assertEval("{ (1/0)^(0/0) }");
assertEval("{ (0/0)^(1/0) }");
assertEval("{ (-1/0)^3 }");
assertEval("{ (1/0)^(-4) }");
assertEval("{(-1/0)^(-4) }");
}
@Test
public void testVectorsEmptyResult() {
assertEval("{ integer()+1 }");
assertEval("{ 1+integer() }");
}
@Test
public void testVectorsNA() {
assertEval("{ 1 + c(1L, NA, 3L) }");
assertEval("{ NA + c(1, 2, 3) }");
assertEval("{ c(1, 2, 3) + NA }");
assertEval("{ NA+1:3 }");
assertEval("{ 1:3+NA }");
assertEval("{ NA+c(1L, 2L, 3L) }");
assertEval("{ c(1L, 2L, 3L)+NA }");
assertEval("{ c(NA,NA,NA)+1:3 }");
assertEval("{ 1:3+c(NA, NA, NA) }");
assertEval("{ c(NA,NA,NA)+c(1L,2L,3L) }");
assertEval("{ c(1L,2L,3L)+c(NA, NA, NA) }");
assertEval("{ c(NA,NA)+1:4 }");
assertEval("{ 1:4+c(NA, NA) }");
assertEval("{ c(NA,NA,NA,NA)+1:2 }");
assertEval("{ 1:2+c(NA,NA,NA,NA) }");
assertEval("{ c(NA,NA)+c(1L,2L,3L,4L) }");
assertEval("{ c(1L,2L,3L,4L)+c(NA, NA) }");
assertEval("{ c(NA,NA,NA,NA)+c(1L,2L) }");
assertEval("{ c(1L,2L)+c(NA,NA,NA,NA) }");
assertEval("{ c(1L,NA)+1 }");
assertEval("{ c(1L,NA) + c(2,3) }");
assertEval("{ c(2,3) + c(1L,NA)}");
}
@Test
public void testScalarsComplexIgnore() {
assertEval("{ (1+2i)^(-2) }");
assertEval(ArithmeticIncludeList.INCLUDE_LIST, "{ ((1+0i)/(0+0i)) ^ (-3) }");
assertEval(ArithmeticIncludeList.INCLUDE_LIST, "{ ((1+1i)/(0+0i)) ^ (-3) }");
}
@Test
public void testScalarsComplex() {
assertEval("{ (1+2i)*(3+4i) }");
assertEval("{ x <- 1+2i; y <- 3+4i; x*y }");
assertEval("{ x <- 1+2i; y <- 3+4i; x-y }");
assertEval("{ x <- c(-1.5-1i,-1.3-1i) ; y <- c(0+0i, 0+0i) ; y*y+x }");
assertEval("{ x <- c(-1.5-1i,-1.3-1i) ; y <- c(0+0i, 0+0i) ; y-x }");
assertEval("{ x <- 1+2i; y <- 3+4i; x/y }");
assertEval("{ x <- c(-1-2i,3+10i) ; y <- c(3+1i, -4+5i) ; y-x }");
assertEval("{ (1+2i)^2 }");
assertEval("{ (1+2i)^0 }");
assertEval(ArithmeticIncludeList.INCLUDE_LIST, "{ 1/((1+0i)/(0+0i)) }");
assertEval("{ f <- function(a, b) { a + b } ; f(1+2i, 3+4i) ; f(1, 2) }");
assertEval("{ f <- function(a, b) { a + b } ; f(2, 3+4i) ; f(1, 2) }");
assertEval("{ f <- function(a, b) { a + b } ; f(1+2i, 3) ; f(1, 2) }");
assertEval("{ f <- function(b) { b / 4i } ; f(1) ; f(1L) }");
assertEval("{ f <- function(b) { 1i / b } ; f(1) ; f(1L) ; f(4) }");
assertEval("{ f <- function(b) { 1i / b } ; f(1+1i) ; f(1L) }");
assertEval("{ f <- function(b) { 1i / b } ; f(1) ; f(1L) }");
assertEval("{ f <- function(b) { 1i / b } ; f(TRUE) ; f(1L) }");
assertEval("{ f <- function(a, b) { a + b } ; f(1,1) ; f(1,1+2i) ; f(TRUE, 2) }");
assertEval("{ f <- function(b) { 1 / b } ; f(1+1i) ; f(1L) }");
assertEval("{ f <- function(b) { b / 2 } ; f(1+1i) ; f(1L) }");
assertEval("{ f <- function(a, b) { a / b } ; f(1,1) ; f(1,1L) ; f(2+1i,(1:2)[3]) }");
assertEval("{ (0+2i)^0 }");
assertEval(ArithmeticIncludeList.INCLUDE_LIST, "{ (1+2i) / ((0-1i)/(0+0i)) }");
assertEval("{ (3+2i)^2 }");
assertEval("{ x <- 1+2i; y <- 3+4i; round(x*x*y/(x+y), digits=5) }");
assertEval("{ round( (1+2i)^(3+4i), digits=5 ) }");
assertEval("{ round( ((1+1i)/(0+1i)) ^ (-3.54), digits=5) }");
assertEval("{ c(1+2i,1.1+2.1i) }");
assertEval("{ c(1+2i,11.1+2.1i) }");
assertEval("{ c(1+2i,1.1+12.1i) }");
assertEval("{ c(11+2i,1.1+2.1i) }");
assertEval("{ c(1+12i,1.1+2.1i) }");
assertEval("{ c(-1+2i,1.1+2.1i) }");
assertEval("{ c(1-2i,1+22i) }");
assertEval("{ x <- c(-1-2i,3+10i) ; y <- c(3+1i, -4+5i) ; round(y/x, digits=5) }");
assertEval("{ x <- c(-1-2i,3+10i) ; y <- c(3+1i, -4+5i) ; y+x }");
assertEval("{ x <- c(-1-2i,3+10i) ; y <- c(3+1i, -4+5i) ; y*x }");
}
@Test
public void testComplexNaNInfinity() {
assertEval("{ 0^(-1+1i) }");
assertEval("{ (0+0i)/(0+0i) }");
assertEval(ArithmeticIncludeList.INCLUDE_LIST, "{ (1+0i)/(0+0i) }");
assertEval(ArithmeticIncludeList.INCLUDE_LIST, "{ (0+1i)/(0+0i) }");
assertEval(ArithmeticIncludeList.INCLUDE_LIST, "{ (1+1i)/(0+0i) }");
assertEval(ArithmeticIncludeList.INCLUDE_LIST, "{ (-1+0i)/(0+0i) }");
assertEval(ArithmeticIncludeList.INCLUDE_LIST, "{ (-1-1i)/(0+0i) }");
assertEval("{ (1+2i) / ((0-0i)/(0+0i)) }");
assertEval(ArithmeticIncludeList.INCLUDE_LIST, "{ ((0+1i)/0) * ((0+1i)/0) }");
assertEval(ArithmeticIncludeList.INCLUDE_LIST, "{ ((0-1i)/0) * ((0+1i)/0) }");
assertEval(ArithmeticIncludeList.INCLUDE_LIST, "{ ((0-1i)/0) * ((0-1i)/0) }");
assertEval(ArithmeticIncludeList.INCLUDE_LIST, "{ ((0-1i)/0) * ((1-1i)/0) }");
assertEval(ArithmeticIncludeList.INCLUDE_LIST, "{ ((0-1i)/0) * ((-1-1i)/0) }");
assertEval("{ 0/0 - 4i }");
assertEval("{ 4i + 0/0 }");
assertEval("{ a <- 1 + 2i; b <- 0/0 - 4i; a + b }");
}
@Test
public void testScalars() {
assertEval("{ 1L / 2L }");
assertEval("{ f <- function(a, b) { a / b } ; f(1L, 2L) ; f(1, 2) }");
assertEval("{ (1:2)[3] / 2L }");
assertEval("{ 2L / (1:2)[3] }");
assertEval("{ a <- (1:2)[3] ; b <- 2L ; a / b }");
assertEval("{ a <- 2L ; b <- (1:2)[3] ; a / b }");
assertEval("{ (1:2)[3] + 2L }");
assertEval("{ 2L + (1:2)[3] }");
assertEval("{ a <- (1:2)[3] ; b <- 2L ; a + b }");
assertEval("{ a <- 2L ; b <- (1:2)[3] ; a + b }");
assertEval("{ a <- (1:2)[3] ; b <- 2 ; a + b }");
assertEval("{ a <- 2 ; b <- (1:2)[3] ; a + b }");
assertEval("{ f <- function(a, b) { a / b } ; f(1,1) ; f(1,1L) ; f(2L,4) }");
assertEval("{ f <- function(a, b) { a / b } ; f(1,1) ; f(1,1L) ; f(2L,4L) }");
assertEval("{ f <- function(a, b) { a / b } ; f(1,1) ; f(1,1L) ; f(2L,(1:2)[3]) }");
assertEval("{ f <- function(a, b) { a / b } ; f(1,1) ; f(1,1L) ; f((1:2)[3], 2L) }");
assertEval("{ f <- function(a, b) { a + b } ; f(1,1) ; f(1,1L) ; f(2L,4) }");
assertEval("{ f <- function(a, b) { a + b } ; f(1,1) ; f(1,1L) ; f(2L,4L) }");
assertEval("{ f <- function(a, b) { a + b } ; f(1,1) ; f(1,1L) ; f(2L,(1:2)[3]) }");
assertEval("{ f <- function(a, b) { a + b } ; f(1,1) ; f(1,1L) ; f((1:2)[3], 2L) }");
assertEval("{ f <- function(a, b) { a / b } ; f(1,1) ; f(1,1L) ; f(2,(1:2)[3]) }");
assertEval("{ f <- function(a, b) { a / b } ; f(1,1) ; f(1,1L) ; f((1:2)[3],2) }");
assertEval("{ f <- function(b) { 1 / b } ; f(1) ; f(1L) ; f(4) }");
assertEval("{ f <- function(b) { 1 / b } ; f(1) ; f(1L) }");
assertEval("{ f <- function(b) { 1 / b } ; f(1L) ; f(1) }");
assertEval("{ f <- function(b) { 1 / b } ; f(TRUE) ; f(1L) }");
assertEval("{ f <- function(b) { b / 1 } ; f(1) ; f(1L) ; f(4) }");
assertEval("{ f <- function(b) { b / 2 } ; f(1) ; f(1L) }");
assertEval("{ f <- function(b) { b / 4 } ; f(1L) ; f(1) }");
assertEval("{ f <- function(b) { 4L / b } ; f(1L) ; f(2) }");
assertEval("{ f <- function(b) { 4L + b } ; f(1L) ; f(2) }");
assertEval("{ f <- function(b) { b / 2L } ; f(1L) ; f(2) }");
assertEval("{ f <- function(b) { 4L / b } ; f(1L) ; f(2) ; f(TRUE) }");
assertEval("{ f <- function(b) { 4L + b } ; f(1L) ; f(2) ; f(TRUE) }");
assertEval("{ f <- function(b) { 4L + b } ; f(1L) ; f(2) ; f((1:2)[3]) }");
assertEval("{ f <- function(b) { 4L / b } ; f(1L) ; f(2) ; f((1:2)[3]) }");
assertEval("{ f <- function(b) { (1:2)[3] + b } ; f(1L) ; f(2) }");
assertEval("{ f <- function(b) { (1:2)[3] + b } ; f(1) ; f(2L) }");
assertEval("{ f <- function(b) { b + 4L } ; f(1L) ; f(2) ; f(TRUE) }");
assertEval("{ f <- function(b) { b + 4L } ; f(1L) ; f(2) ; f((1:2)[3]) }");
assertEval("{ f <- function(b) { b / 4L } ; f(1L) ; f(2) ; f(TRUE) }");
assertEval("{ f <- function(b) { b / 4L } ; f(1L) ; f(2) ; f((1:2)[3]) }");
assertEval("{ f <- function(b) { 1 + b } ; f(1L) ; f(TRUE) }");
assertEval("{ f <- function(b) { FALSE + b } ; f(1L) ; f(2) }");
assertEval("{ f <- function(b) { b + 1 } ; f(1L) ; f(TRUE) }");
assertEval("{ f <- function(b) { b + FALSE } ; f(1L) ; f(2) }");
}
@Test
public void testScalarsRange() {
assertEval("{ f <- function(a, b) { a + b } ; f(c(1,2), c(3,4)) ; f(c(1,2), 3:4) }");
assertEval("{ f <- function(a, b) { a + b } ; f(1:2, c(3,4)) ; f(c(1,2), 3:4) }");
assertEval("{ f <- function(a, b) { a + b } ; f(1:2, 3:4) ; f(c(1,2), 3:4) }");
}
@Test
public void testVectors() {
assertEval("{ x<-c(1,2,3);x }");
assertEval("{ x<-c(1,2,3);x*2 }");
assertEval("{ x<-c(1,2,3);x+2 }");
assertEval("{ x<-c(1,2,3);x+FALSE }");
assertEval("{ x<-c(1,2,3);x+TRUE }");
assertEval("{ x<-c(1,2,3);x*x+x }");
assertEval("{ x<-c(1,2);y<-c(3,4,5,6);x+y }");
assertEval("{ x<-c(1,2);y<-c(3,4,5,6);x*y }");
assertEval("{ x<-c(1,2);z<-c();x==z }");
assertEval("{ x<-1+NA; c(1,2,3,4)+c(x,10) }");
assertEval("{ c(1L,2L,3L)+TRUE }");
assertEval("{ c(1L,2L,3L)*c(10L) }");
assertEval("{ c(1L,2L,3L)*c(10,11,12) }");
assertEval("{ c(1L,2L,3L,4L)-c(TRUE,FALSE) }");
assertEval("{ ia<-c(1L,2L);ib<-c(3L,4L);d<-c(5,6);ia+ib+d }");
}
@Test
public void testVectorsRanges() {
assertEval("{ 1L + 1:2 }");
assertEval("{ 4:3 + 2L }");
assertEval("{ 1:2 + 3:4 }");
assertEval("{ 1:2 + c(1L, 2L) }");
assertEval("{ c(1L, 2L) + 1:4 }");
assertEval("{ 1:4 + c(1L, 2L) }");
assertEval("{ 2L + 1:2 }");
assertEval("{ 1:2 + 2L }");
assertEval("{ c(1L, 2L) + 2L }");
assertEval("{ 2L + c(1L, 2L) }");
assertEval("{ 1 + 1:2 }");
assertEval("{ c(1,2) + 1:2 }");
assertEval("{ c(1,2,3,4) + 1:2 }");
assertEval("{ c(1,2,3,4) + c(1L,2L) }");
assertEval("{ 1:2 + 1 }");
assertEval("{ 1:2 + c(1,2) }");
assertEval("{ 1:2 + c(1,2,3,4) }");
assertEval("{ c(1L,2L) + c(1,2,3,4) }");
assertEval("{ 1L + c(1,2) }");
assertEval("{ 1:4+c(1,2) }");
assertEval("{ c(1,2)+1:4 }");
}
@Test
public void testVectorsOperations() {
assertEval("{ a <- c(1,3) ; b <- c(2,4) ; a ^ b }");
assertEval("{ a <- c(1,3) ; a ^ 3 }");
assertEval("{ c(1,3) - 4 }");
assertEval("{ c(1,3) %/% c(2,4) }");
assertEval("{ c(1,3) / c(2,4) }");
assertEval("{ 1:1 / 0:0 }");
assertEval("{ -2:2 / 0:0 }");
assertEval("3 ** 4");
}
@Test
public void testVectorsOperationsComplex() {
assertEval("{ a <- c(1+1i,3+2i) ; a - (4+3i) }");
assertEval("{ c(1+1i,3+2i) * c(1,2) }");
assertEval("{ z <- c(1+1i,3+2i) ; z * c(1,2) }");
assertEval("{ round(c(1+1i,2+3i)^c(1+1i,3+4i), digits = 5) }");
assertEval("{ round( 3^c(1,2,3+1i), digits=5 ) }");
assertEval("{ z <- c(-1.5-1i,10) ; (z * z)[1] }");
assertEval("{ c(1+1i,3+2i) / 2 }");
assertEval("{ c(1,2,3+1i)^3 }");
}
@Test
public void testVectorsComplex() {
assertEval("{ 1:4+c(1,2+2i) }");
assertEval("{ c(1,2+2i)+1:4 }");
assertEval("x <- c(NaN, 3+2i); xre <- Re(x); xim <- (0+1i) * Im(x); xre + xim");
}
@Test
public void testVectorsModulo() {
assertEval("{ c(3,4) %% 2 }");
assertEval("{ c(3,4) %% c(2,5) }");
}
@Test
public void testVectorsIntegerDivision() {
assertEval("{ c(3,4) %/% 2 }");
}
@Test
public void testVectorsLengthWarning() {
assertEval("{ 1:2+1:3 }");
assertEval("{ 1:3*1:2 }");
assertEval("{ 1:3+c(1,2+2i) }");
assertEval("{ c(1,2+2i)+1:3 }");
}
@Test
public void testVectorsNonConformable() {
assertEval("{ x <- 1:2 ; dim(x) <- 1:2 ; y <- 2:3 ; dim(y) <- 2:1 ; x + y }");
assertEval("{ x <- 1:2 ; dim(x) <- 1:2 ; y <- 2:3 ; dim(y) <- c(1,1,2) ; x + y }");
}
@Test
public void testVectorsMatrixDimsDontMatch() {
assertEval(Output.IgnoreErrorContext, "{ m <- matrix(nrow=2, ncol=2, 1:4) ; m + 1:16 }");
}
@Test
public void testUnaryNot() {
assertEval("{ !TRUE }");
assertEval("{ !FALSE }");
assertEval("{ !NA }");
}
@Test
public void testUnaryNotVector() {
assertEval("{ !c(TRUE,TRUE,FALSE,NA) }");
assertEval("{ !c(1,2,3,4,0,0,NA) }");
assertEval("{ !((0-3):3) }");
}
@Test
public void testUnaryNotRaw() {
assertEval("{ f <- function(arg) { !arg } ; f(as.raw(10)) ; f(as.raw(1:3)) }");
assertEval("{ a <- as.raw(201) ; !a }");
assertEval("{ a <- as.raw(12) ; !a }");
assertEval("{ f <- function(arg) { !arg } ; f(as.raw(10)) ; f(as.raw(c(a=1,b=2))) }");
assertEval("{ l <- list(); !l }");
assertEval("{ f <- function(arg) { !arg } ; f(as.raw(10)) ; f(matrix(as.raw(1:4),nrow=2 )) }");
assertEval("{ f <- function(arg) { !arg } ; f(as.raw(10)) ; x <- as.raw(10:11) ; attr(x, \"my\") <- 1 ; f(x) }");
}
@Test
public void testUnaryNotError() {
assertEval("{ l <- c(\"hello\", \"hi\") ; !l }");
assertEval("{ l <- function(){1} ; !l }");
assertEval("{ l <- list(1); !l }");
assertEval("{ x<-1:4; dim(x)<-c(2, 2); names(x)<-101:104; attr(x, \"dimnames\")<-list(c(\"201\", \"202\"), c(\"203\", \"204\")); attr(x, \"foo\")<-\"foo\"; y<-!x; attributes(y) }");
}
@Test
public void testUnaryNotPropagate() {
// list of sequences should be converted to list of string vectors
assertEval("{ x<-1:4; dim(x)<-c(2, 2); names(x)<-101:104; attr(x, \"dimnames\")<-list(201:202, 203:204); attr(x, \"foo\")<-\"foo\"; y<-!x; attributes(y) }");
}
@Test
public void testUnaryNotDimensions() {
assertEval("{ xx <- double(0); dim(xx) <- c(0,0); dim(!xx) }");
assertEval("{ xx <- double(1); dim(xx) <- c(1,1); dim(!xx) }");
}
@Test
public void testUnaryMinus() {
assertEval("{ -3 }");
assertEval("{ --3 }");
assertEval("{ ---3 }");
assertEval("{ ----3 }");
assertEval("{ -(0/0) }");
assertEval("{ -(1/0) }");
}
@Test
public void testUnaryMinusVector() {
assertEval("{ -(1[2]) }");
}
@Test
public void testUnaryMinusComplex() {
assertEval("{ -(2+1i) }");
assertEval(ArithmeticIncludeList.INCLUDE_LIST, "{ -((0+1i)/0) }");
assertEval(ArithmeticIncludeList.INCLUDE_LIST, "{ -((1+0i)/0) }");
assertEval(ArithmeticIncludeList.INCLUDE_LIST, "{ -c((1+0i)/0,2) }");
}
@Test
public void testUnaryMinusAsFunction() {
assertEval("{ f <- function(z) { -z } ; f(TRUE) ; f(1L) }");
assertEval("{ f <- function(z) { -z } ; f(1L) ; f(1) }");
assertEval("{ f <- function(z) { -z } ; f(1) ; f(1L) }");
assertEval("{ f <- function(z) { -z } ; f(1L) ; f(TRUE) }");
assertEval("{ z <- logical() ; -z }");
assertEval("{ z <- integer() ; -z }");
assertEval("{ z <- double() ; -z }");
assertEval("{ f <- function(z) { -z } ; f(1:3) ; f(1L) }");
assertEval("{ f <- function(z) { -z } ; f(1:3) ; f(TRUE) }");
}
@Test
public void testUnaryMinusAsFunctionComplex() {
assertEval("{ f <- function(z) { -z } ; f(1L) ; f(1+1i) }");
assertEval("{ f <- function(z) { -z } ; f(1+1i) ; f(1L) }");
assertEval("{ z <- (1+1i)[0] ; -z }");
assertEval("{ f <- function(z) { -z } ; f(1:3) ; f(c((0+0i)/0,1+1i)) }");
}
@Test
public void testUnaryMinusErrors() {
assertEval("{ z <- \"hello\" ; -z }");
assertEval("{ z <- c(\"hello\",\"hi\") ; -z }");
assertEval("{ f <- function(z) { -z } ; f(1:3) ; f(\"hello\") }");
}
@Test
public void testUnaryMinusDimensions() {
assertEval("{ xx <- double(0); dim(xx) <- c(0,0); dim(-xx) }");
assertEval("{ xx <- double(1); dim(xx) <- c(1,1); dim(-xx) }");
}
@Test
public void testMatrices() {
assertEval("{ m <- matrix(1:6, nrow=2, ncol=3, byrow=TRUE) ; m-1 }");
assertEval("{ z<-matrix(12)+1 ; z }");
assertEval("{ m <- matrix(1:6, nrow=2, ncol=3, byrow=TRUE) ; m+1L }");
assertEval("{ m <- matrix(1:6, nrow=2, ncol=3, byrow=TRUE) ; m+m }");
}
@Test
public void testMatricesProduct() {
assertEval("{ double() %*% double() }");
assertEval("{ m <- double() ; dim(m) <- c(0,4) ; m %*% t(m) }");
assertEval("{ m <- double() ; dim(m) <- c(0,4) ; t(m) %*% m }");
assertEval("{ m <- double() ; dim(m) <- c(0,4) ; n <- matrix(1:4,4) ; m %*% n }");
assertEval("{ m <- double() ; dim(m) <- c(4,0) ; n <- matrix(1:4,ncol=4) ; n %*% m }");
assertEval("{ x <- 1:3 %*% 9:11 ; x[1] }");
assertEval("{ m<-matrix(1:3, nrow=1) ; 1:2 %*% m }");
assertEval("{ m<-matrix(1:6, nrow=2) ; 1:2 %*% m }");
assertEval("{ m<-matrix(1:6, nrow=2) ; m %*% 1:3 }");
assertEval("{ m<-matrix(1:3, ncol=1) ; m %*% 1:2 }");
assertEval("{ a<-matrix(1:6, ncol=2) ; b<-matrix(11:16, nrow=2) ; a %*% b }");
assertEval("{ a <- array(1:9, dim=c(3,1,3)) ; a %*% 1:9 }");
assertEval("{ matrix(2,nrow=2,ncol=3) %*% matrix(4,nrow=1,ncol=5) }");
assertEval("{ 1:3 %*% matrix(4,nrow=2,ncol=5) }");
assertEval("{ matrix(4,nrow=2,ncol=5) %*% 1:4 }");
assertEval("{ m <- matrix(c(1,2,3,0/0), nrow=4) ; m %*% 1:4 }");
assertEval("{ m <- matrix(c(NA,1,0/0,2), nrow=2) ; 1:2 %*% m }");
assertEval("{ m <- double() ; dim(m) <- c(0,0) ; m %*% m }");
assertEval("{ m <- matrix(c(NA,1,4,2), nrow=2) ; t(m) %*% m }");
assertEval("{ matrix(c(3,1,0/0,2), nrow=2) %*% matrix(1:6,nrow=2) }");
assertEval("{ as.raw(1:3) %*% 1:3 }");
assertEval("{ options(matprod = 'blas'); matrix(c(NaN,1,7,2,4,NA), nrow=3) %*% matrix(c(3,1,NA,2,NaN,5,6,7), nrow=2) }");
assertEval("{ NaN %*% NA}");
assertEval("{ NA %*% NaN}");
assertEval("{ c(NaN) %*% c(NA)}");
assertEval("{ c(NA) %*% c(NaN)}");
assertEval("{ c(1,2,NA,NaN) %*% c(1,3,3,4) }");
assertEval("{ c(1,2,NaN,NA) %*% c(1,3,3,4) }");
assertEval("{ c(1,2,2,3) %*% c(1,3,NA,NaN) }");
assertEval("{ c(1,2,2,3) %*% c(1,3,NaN,NA) }");
assertEval("{ c(NA,NaN) %*% c(1.6,3.6) }");
assertEval("{ c(NaN,NA) %*% c(1.6,3.6) }");
assertEval("{ c(1.1,2.2) %*% c(NA,NaN) }");
assertEval("{ c(1.1,22.2) %*% c(NaN,NA) }");
assertEval("{c(as.complex(NaN), NA) %*% c(1, 3.6)}");
assertEval("{c(as.complex(NaN), NA, as.complex(NaN)) %*% c(1, 3.6)}");
assertEval("{c(as.complex(NA), NaN) %*% c(1, 3.6)}");
assertEval("{c(as.complex(NA), NaN, as.complex(NA)) %*% c(1, 3.6)}");
assertEval("{c(1.1, 2.2) %*% c(as.complex(NaN), NA)}");
assertEval("{c(1.1, 2.2) %*% c(as.complex(NaN), NA, as.complex(NaN))}");
assertEval("{c(1.1, 2.2) %*% c(as.complex(NA), NaN)}");
assertEval("{c(1.1, 2.2) %*% c(as.complex(NA), NaN, as.complex(NA))}");
assertEval("{c(as.complex(NaN)) %*% c(NA)}");
assertEval("{c(as.complex(NA)) %*% c(NaN)}");
assertEval("{c(NA) %*% c(as.complex(NaN))}");
assertEval("{c(NaN) %*% c(as.complex(NA))}");
}
@Test
public void testMatricesOuterProduct() {
assertEval("{ 1:3 %o% 1:2 }");
assertEval("{ 1:4 %*% 1:3 }");
assertEval("{ 1:3 %*% as.raw(c(1,2,3)) }");
assertEval("{ 1:3 %*% c(TRUE,FALSE,TRUE) }");
assertEval(Output.IgnoreErrorContext, "{ as.raw(1:3) %o% 1:3 }");
}
@Test
public void testMatricesPrecedence() {
assertEval("{ 10 / 1:3 %*% 3:1 }");
assertEval("{ x <- 1:2 ; dim(x) <- c(1,1,2) ; y <- 2:3 ; dim(y) <- c(1,1,2) ; x + y }");
}
@Test
public void testNonvectorizedLogicalOr() {
assertEval("{ 1.1 || 3.15 }");
assertEval("{ 0 || 0 }");
assertEval("{ 1 || 0 }");
assertEval("{ x <- 1 ; f <- function(r) { x <<- 2; r } ; TRUE || f(FALSE) ; x } ");
assertEval("{ NA || 1 }");
assertEval("{ 0 || NA }");
assertEval("{ NA || 0 }");
assertEval("{ x <- 1 ; f <- function(r) { x <<- 2; r } ; NA || f(NA) ; x }");
}
public void testNonvectorizedLogicalOrAsFunction() {
assertEval("{ f <- function(a,b) { a || b } ; f(1,2) ; f(1,2) ; f(1L,2L) }");
assertEval("{ f <- function(a,b) { a || b } ; f(1L,2L) ; f(1L,2L) ; f(0,FALSE) }");
}
@Test
public void testNonvectorizedLogicalAnd() {
assertEval("{ TRUE && FALSE }");
assertEval("{ FALSE && FALSE }");
assertEval("{ FALSE && TRUE }");
assertEval("{ TRUE && TRUE }");
assertEval("{ x <- 1 ; f <- function(r) { x <<- 2; r } ; FALSE && f(FALSE) ; x } ");
assertEval("{ x <- 1 ; f <- function(r) { x <<- 2; r } ; c(FALSE, TRUE) && f(FALSE) ; x } ");
assertEval("{ TRUE && NA }");
assertEval("{ FALSE && NA }");
assertEval("{ NA && TRUE }");
assertEval("{ NA && FALSE }");
assertEval("{ NA && NA }");
assertEval("{ x <- 1 ; f <- function(r) { x <<- 2; r } ; NA && f(NA) ; x } ");
assertEval("{ TRUE && c(TRUE, FALSE) }");
assertEval("{ c(TRUE, FALSE) && c(TRUE, FALSE) }");
assertEval("{ c(TRUE, FALSE) && c(TRUE, FALSE, FALSE) }");
assertEval("{ c(1.0, 0.0) && 1.0 }");
assertEval("{ c(1, 0) && 1 }");
assertEval("{ c(1.1, 0.0) && c(TRUE, FALSE) }");
assertEval("{ c(1, 0) && 1+1i }");
assertEval("{ c(1+1i, 0+0i) && 1 }");
assertEval("{ 1.0 && c(1+1i, 0+0i) }");
assertEval("{ c(1+1i, 0+0i) && c(1+1i, 0+0i) }");
assertEval("{ c(\"1\", \"0\") && TRUE }");
assertEval("{ c(1, 0) && \"1\" }");
assertEval("{ \"1\" && c(1, 0) }");
assertEval("{ as.raw(c(1, 0)) && TRUE }");
}
@Test
public void testNonvectorizedLogicalAndAsFunction() {
assertEval("{ f <- function(a,b) { a && b } ; f(c(TRUE, FALSE), TRUE) }");
assertEval("{ f <- function(a,b) { a && b } ; f(c(TRUE, FALSE), logical()) }");
assertEval("{ f <- function(a,b) { a && b } ; f(c(TRUE, FALSE), logical()) ; f(1:3,4:10) ; f(1,2) }");
assertEval("{ f <- function(a,b) { a && b } ; f(c(TRUE, FALSE), logical()) ; f(1:3,4:10) ; f(double(),2) }");
assertEval("{ f <- function(a,b) { a && b } ; f(c(TRUE, FALSE), logical()) ; f(1:3,4:10) ; f(integer(),2) }");
assertEval("{ f <- function(a,b) { a && b } ; f(c(TRUE, FALSE), logical()) ; f(1:3,4:10) ; f(2+3i,1/0) }");
assertEval("{ f <- function(a,b) { a && b } ; f(c(TRUE, FALSE), logical()) ; f(1:3,4:10) ; f(2+3i,logical()) }");
assertEval("{ f <- function(a,b) { a && b } ; f(c(TRUE, FALSE), logical()) ; f(1:3,4:10) ; f(1,2) ; f(logical(),4) }");
assertEval("{ f <- function(a,b) { a && b } ; f(c(TRUE, FALSE), logical()) ; f(TRUE, c(TRUE,TRUE,FALSE)) ; f(1,2) }");
}
@Test
public void testNonvectorizedLogicalSpecialChecks() {
assertEval("{ FALSE && \"hello\" }");
assertEval("{ TRUE || \"hello\" }");
assertEval("{ \"hello\" || TRUE }");
assertEval("{ FALSE || \"hello\" }");
assertEval("{ 0 && \"hello\" }");
assertEval("{ 0.0 && \"hello\" }");
assertEval("{ 1+2i && 0 }");
assertEval("{ 1+2i && TRUE }");
assertEval("{ TRUE && 0+0i}");
assertEval("{ 1.0 && 0+0i}");
assertEval("{ 1 && \"hello\" }");
assertEval("{ 0.1 && \"hello\" }");
assertEval("{ TRUE && \"hello\" }");
assertEval("{ \"hello\" && TRUE }");
assertEval("{ \"hello\" && 1 }");
assertEval("{ \"hello\" && 1L }");
assertEval("{ NULL && 1 }");
assertEval("{ 0.1 && NULL }");
assertEval("{ as.raw(1) && 1 }");
assertEval("{ 0.1 && as.raw(1) }");
assertEval("{ logical(0) && logical(0) }");
assertEval("{ logical(0) && TRUE }");
assertEval("{ logical(0) && FALSE }");
assertEval("{ character(0) && FALSE }");
assertEval("{ character(0) && TRUE }");
assertEval("{ 1 || \"hello\" }");
assertEval("{ FALSE || 1+2i }");
assertEval("{ 0+0i || FALSE}");
assertEval("{ 1.1 || \"hello\" }");
assertEval("{ 1+2i || 0 }");
assertEval("{ 1+2i || 1.0 }");
assertEval("{ 0 || \"hello\" }");
assertEval("{ 0L || \"hello\" }");
assertEval("{ \"hello\" || FALSE }");
assertEval("{ \"hello\" || 1 }");
assertEval("{ \"hello\" || 1L }");
assertEval("{ NULL || 1 }");
assertEval("{ 0 || NULL }");
assertEval("{ as.raw(1) || 1 }");
assertEval("{ 0 || as.raw(1) }");
assertEval("{ as.raw(10) && \"hi\" }");
assertEval("{ c(TRUE,FALSE) | logical() }");
assertEval("{ logical() | c(TRUE,FALSE) }");
assertEval(ArithmeticIncludeList.INCLUDE_LIST, "{ as.raw(c(1,4)) | raw() }");
assertEval(ArithmeticIncludeList.INCLUDE_LIST, "{ raw() | as.raw(c(1,4))}");
assertEval("{ logical(0) || logical(0) }");
assertEval("{ logical(0) || TRUE }");
assertEval("{ logical(0) || FALSE }");
assertEval("{ character(0) || FALSE }");
assertEval("{ character(0) || TRUE }");
}
@Test
public void testNonvectorizedLogicalLengthChecks() {
assertEval("{ as.raw(c(1,4)) | as.raw(c(1,5,4)) }");
assertEval("{ as.raw(c(1,5,4)) | as.raw(c(1,4)) }");
assertEval("{ c(TRUE, FALSE, FALSE) & c(TRUE,TRUE) }");
assertEval("{ c(TRUE, TRUE) & c(TRUE, FALSE, FALSE) }");
assertEval("{ c(a=TRUE, TRUE) | c(TRUE, b=FALSE, FALSE) }");
}
@Test
public void testVectorizedLogicalOr() {
assertEval("{ 1.1 | 3.15 }");
assertEval("{ 0 | 0 }");
assertEval("{ 1 | 0 }");
assertEval("{ NA | 1 }");
assertEval("{ NA | 0 }");
assertEval("{ 0 | NA }");
assertEval("{ x <- 1 ; f <- function(r) { x <<- 2; r } ; NA | f(NA) ; x }");
assertEval("{ x <- 1 ; f <- function(r) { x <<- 2; r } ; TRUE | f(FALSE) ; x }");
assertEval("{ a <- as.raw(200) ; b <- as.raw(255) ; a | b }");
assertEval("{ a <- as.raw(200) ; b <- as.raw(1) ; a | b }");
assertEval("c(TRUE, FALSE) | c(NA, NA)");
}
@Test
public void testVectorizedLogicalOrAsFunction() {
assertEval("{ f <- function(a,b) { a | b } ; f(c(TRUE, FALSE), FALSE) ; f(1L, 3+4i) }");
assertEval("{ f <- function(a,b) { a | b } ; f(c(TRUE, FALSE), FALSE) ; f(c(FALSE,FALSE), 3+4i) }");
assertEval("{ f <- function(a,b) { a | b } ; f(as.raw(c(1,4)), as.raw(3)) ; f(4, FALSE) }");
}
@Test
public void testVectorizedLogicalAnd() {
assertEval("{ TRUE & FALSE }");
assertEval("{ FALSE & FALSE }");
assertEval("{ FALSE & TRUE }");
assertEval("{ TRUE & TRUE }");
assertEval("{ TRUE & NA }");
assertEval("{ FALSE & NA }");
assertEval("{ NA & TRUE }");
assertEval("{ NA & FALSE }");
assertEval("{ NA & NA }");
assertEval("{ x <- 1 ; f <- function(r) { x <<- 2; r } ; NA & f(NA) ; x }");
assertEval("{ x <- 1 ; f <- function(r) { x <<- 2; r } ; FALSE & f(FALSE) ; x }");
assertEval("{ 1:4 & c(FALSE,TRUE) }");
assertEval("{ a <- as.raw(201) ; b <- as.raw(1) ; a & b }");
assertEval("{ c(FALSE, NA) & c(NA, NA) }");
}
@Test
public void testVectorizedLogicalAndAsFunction() {
assertEval("{ f <- function(a,b) { a & b } ; f(TRUE, 1L) ; f(FALSE, FALSE) }");
assertEval("{ f <- function(a,b) { a & b } ; f(TRUE, 1L) ; f(as.raw(10), as.raw(11)) }");
assertEval("{ f <- function(a,b) { a & b } ; f(TRUE, 1L) ; f(1L, 0L) }");
assertEval("{ f <- function(a,b) { a & b } ; f(TRUE, 1L) ; f(1L, 0) }");
assertEval("{ f <- function(a,b) { a & b } ; f(TRUE, 1L) ; f(1L, TRUE) }");
assertEval("{ f <- function(a,b) { a & b } ; f(TRUE, 1L) ; f(1L, 3+4i) }");
assertEval("{ f <- function(a,b) { a & b } ; f(TRUE, FALSE) ; f(1L, 3+4i) }");
assertEval("{ f <- function(a,b) { a & b } ; f(TRUE, FALSE) ; f(TRUE, 3+4i) }");
}
@Test
public void testVectorizedLogicalComplex() {
assertEval("{ 1+2i | 0 }");
assertEval("{ 1+2i & 0 }");
}
@Test
public void testVectorizedLogicalTypeCheck() {
assertEval("{ TRUE | \"hello\" }");
assertEval("{ f <- function(a,b) { a & b } ; f(TRUE, 1L) ; f(as.raw(10), 12) }");
assertEval("{ f <- function(a,b) { a & b } ; f(TRUE, 1L) ; f(FALSE, as.raw(10)) }");
assertEval("{ f <- function(a,b) { a | b } ; f(as.raw(c(1,4)), as.raw(3)) ; f(as.raw(4), FALSE) }");
assertEval("{ f <- function(a,b) { a | b } ; f(as.raw(c(1,4)), as.raw(3)) ; f(FALSE, as.raw(4)) }");
assertEval("{ f <- function(a,b) { a | b } ; f(as.raw(c(1,4)), 3) }");
assertEval("{ f <- function(a,b) { a | b } ; f(3, as.raw(c(1,4))) }");
}
@Test
public void testVectorizedLogicalAttributes() {
assertEval("{ x<-1:4; names(x)<-101:104; x | TRUE }");
assertEval("{ x<-1:4; names(x)<-101:104; y<-21:24; names(y)<-121:124; x | y }");
assertEval("{ x<-1:4; names(x)<-101:104; y<-21:28; names(y)<-121:128; x | y }");
assertEval("{ x<-1:4; names(x)<-101:104; attr(x, \"foo\")<-\"foo\"; attributes(x | TRUE) }");
assertEval("{ x<-1:4; names(x)<-101:104; attr(x, \"foo\")<-\"foo\"; y<-21:24; names(y)<-121:124; attributes(x | y) }");
// A misalignment error similar to those in TestSimpleVector (testIgnored1-3)
assertEval(Ignored.ReferenceError, "{ x<-as.raw(1:4); names(x)<-101:104; y<-as.raw(21:24); names(y)<-121:124; x | y }");
assertEval("{ x<-1:4; y<-21:24; names(y)<-121:124; attributes(x | y) }");
assertEval("{ x<-1:4; names(x)<-101:104; y<-21:28; names(y)<-121:128; attributes(y | x) }");
assertEval("{ x<-1:4; names(x)<-101:104; y<-21:28; attributes(x | y) }");
assertEval(Output.IgnoreErrorContext, "{ x<-1:4; dim(x)<-c(2,2); y<-21:28; x | y }");
}
@Test
public void testIntegerOverflow() {
// invoke twice to check warning keeps being printed
assertEval("{ for(i in 1:2) {x <- 2147483647L ; x + 1L} }");
assertEval("{ for(i in 1:2) {x <- 2147483647L ; x * x} }");
assertEval("{ for(i in 1:2) {x <- -2147483647L ; x - 2L} }");
assertEval("{ for(i in 1:2) {x <- -2147483647L ; x - 1L} }");
assertEval("{ for(i in 1:2) {x <- -2147483647L ; x + -1L} }");
assertEval("{ for(i in 1:2) 2147483647L + 1:3 }");
assertEval("{ for(i in 1:2) 2147483647L + c(1L,2L,3L) }");
assertEval("{ for(i in 1:2) 1:3 + 2147483647L }");
assertEval("{ for(i in 1:2) c(1L,2L,3L) + 2147483647L }");
assertEval("{ for(i in 1:2) 1:3 + c(2147483647L,2147483647L,2147483647L) }");
assertEval("{ for(i in 1:2) c(2147483647L,2147483647L,2147483647L) + 1:3 }");
assertEval("{ for(i in 1:2) c(1L,2L,3L) + c(2147483647L,2147483647L,2147483647L) }");
assertEval("{ for(i in 1:2) c(2147483647L,2147483647L,2147483647L) + c(1L,2L,3L) }");
assertEval("{ for(i in 1:2) 1:4 + c(2147483647L,2147483647L) }");
assertEval("{ for(i in 1:2) c(2147483647L,2147483647L) + 1:4 }");
assertEval("{ for(i in 1:2) c(1L,2L,3L,4L) + c(2147483647L,2147483647L) }");
assertEval("{ for(i in 1:2) c(2147483647L,2147483647L) + c(1L,2L,3L,4L) }");
}
@Test
public void testIntegerOverflowNoWarning() {
assertEval("{ 3L %/% 0L }");
assertEval("{ 3L %% 0L }");
assertEval("{ c(3L,3L) %/% 0L }");
assertEval("{ c(3L,3L) %% 0L }");
}
@Test
public void testArithmeticUpdate() {
assertEval("{ x <- 3 ; f <- function(z) { if (z) { x <- 1 } ; x <- x + 1L ; x } ; f(FALSE) }");
assertEval("{ x <- 3 ; f <- function(z) { if (z) { x <- 1 } ; x <- 1L + x ; x } ; f(FALSE) }");
assertEval("{ x <- 3 ; f <- function(z) { if (z) { x <- 1 } ; x <- x - 1L ; x } ; f(FALSE) }");
}
@Test
public void testXor() {
assertEval(" xor(TRUE, TRUE) ");
assertEval(" xor(FALSE, TRUE) ");
assertEval(" xor(TRUE, FALSE) ");
assertEval(" xor(FALSE, FALSE) ");
assertEval("{ xor(7, 42) }");
assertEval("{ xor(0:2, 2:4) }");
assertEval("{ xor(0:2, 2:7) }");
}
@Test
public void testArgSideEffect() {
assertEval("{ a <- c(1, 2, 4); foo <- function() { a[[1]] <<- 42; 33; }; a + foo() }");
}
}
|
googleapis/google-cloud-java | 36,857 | java-accesscontextmanager/proto-google-identity-accesscontextmanager-v1/src/main/java/com/google/identity/accesscontextmanager/v1/CreateServicePerimeterRequest.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/identity/accesscontextmanager/v1/access_context_manager.proto
// Protobuf Java Version: 3.25.8
package com.google.identity.accesscontextmanager.v1;
/**
*
*
* <pre>
* A request to create a `ServicePerimeter`.
* </pre>
*
* Protobuf type {@code google.identity.accesscontextmanager.v1.CreateServicePerimeterRequest}
*/
public final class CreateServicePerimeterRequest extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.identity.accesscontextmanager.v1.CreateServicePerimeterRequest)
CreateServicePerimeterRequestOrBuilder {
private static final long serialVersionUID = 0L;
// Use CreateServicePerimeterRequest.newBuilder() to construct.
private CreateServicePerimeterRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private CreateServicePerimeterRequest() {
parent_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new CreateServicePerimeterRequest();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.identity.accesscontextmanager.v1.AccessContextManagerProto
.internal_static_google_identity_accesscontextmanager_v1_CreateServicePerimeterRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.identity.accesscontextmanager.v1.AccessContextManagerProto
.internal_static_google_identity_accesscontextmanager_v1_CreateServicePerimeterRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.identity.accesscontextmanager.v1.CreateServicePerimeterRequest.class,
com.google.identity.accesscontextmanager.v1.CreateServicePerimeterRequest.Builder
.class);
}
private int bitField0_;
public static final int PARENT_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private volatile java.lang.Object parent_ = "";
/**
*
*
* <pre>
* Required. Resource name for the access policy which owns this [Service
* Perimeter] [google.identity.accesscontextmanager.v1.ServicePerimeter].
*
* Format: `accessPolicies/{policy_id}`
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The parent.
*/
@java.lang.Override
public java.lang.String getParent() {
java.lang.Object ref = parent_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
parent_ = s;
return s;
}
}
/**
*
*
* <pre>
* Required. Resource name for the access policy which owns this [Service
* Perimeter] [google.identity.accesscontextmanager.v1.ServicePerimeter].
*
* Format: `accessPolicies/{policy_id}`
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for parent.
*/
@java.lang.Override
public com.google.protobuf.ByteString getParentBytes() {
java.lang.Object ref = parent_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
parent_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int SERVICE_PERIMETER_FIELD_NUMBER = 2;
private com.google.identity.accesscontextmanager.v1.ServicePerimeter servicePerimeter_;
/**
*
*
* <pre>
* Required. The [Service Perimeter]
* [google.identity.accesscontextmanager.v1.ServicePerimeter] to create.
* Syntactic correctness of the [Service Perimeter]
* [google.identity.accesscontextmanager.v1.ServicePerimeter] is a
* precondition for creation.
* </pre>
*
* <code>
* .google.identity.accesscontextmanager.v1.ServicePerimeter service_perimeter = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the servicePerimeter field is set.
*/
@java.lang.Override
public boolean hasServicePerimeter() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* Required. The [Service Perimeter]
* [google.identity.accesscontextmanager.v1.ServicePerimeter] to create.
* Syntactic correctness of the [Service Perimeter]
* [google.identity.accesscontextmanager.v1.ServicePerimeter] is a
* precondition for creation.
* </pre>
*
* <code>
* .google.identity.accesscontextmanager.v1.ServicePerimeter service_perimeter = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The servicePerimeter.
*/
@java.lang.Override
public com.google.identity.accesscontextmanager.v1.ServicePerimeter getServicePerimeter() {
return servicePerimeter_ == null
? com.google.identity.accesscontextmanager.v1.ServicePerimeter.getDefaultInstance()
: servicePerimeter_;
}
/**
*
*
* <pre>
* Required. The [Service Perimeter]
* [google.identity.accesscontextmanager.v1.ServicePerimeter] to create.
* Syntactic correctness of the [Service Perimeter]
* [google.identity.accesscontextmanager.v1.ServicePerimeter] is a
* precondition for creation.
* </pre>
*
* <code>
* .google.identity.accesscontextmanager.v1.ServicePerimeter service_perimeter = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
@java.lang.Override
public com.google.identity.accesscontextmanager.v1.ServicePerimeterOrBuilder
getServicePerimeterOrBuilder() {
return servicePerimeter_ == null
? com.google.identity.accesscontextmanager.v1.ServicePerimeter.getDefaultInstance()
: servicePerimeter_;
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 1, parent_);
}
if (((bitField0_ & 0x00000001) != 0)) {
output.writeMessage(2, getServicePerimeter());
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, parent_);
}
if (((bitField0_ & 0x00000001) != 0)) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, getServicePerimeter());
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj
instanceof com.google.identity.accesscontextmanager.v1.CreateServicePerimeterRequest)) {
return super.equals(obj);
}
com.google.identity.accesscontextmanager.v1.CreateServicePerimeterRequest other =
(com.google.identity.accesscontextmanager.v1.CreateServicePerimeterRequest) obj;
if (!getParent().equals(other.getParent())) return false;
if (hasServicePerimeter() != other.hasServicePerimeter()) return false;
if (hasServicePerimeter()) {
if (!getServicePerimeter().equals(other.getServicePerimeter())) return false;
}
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + PARENT_FIELD_NUMBER;
hash = (53 * hash) + getParent().hashCode();
if (hasServicePerimeter()) {
hash = (37 * hash) + SERVICE_PERIMETER_FIELD_NUMBER;
hash = (53 * hash) + getServicePerimeter().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.identity.accesscontextmanager.v1.CreateServicePerimeterRequest parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.identity.accesscontextmanager.v1.CreateServicePerimeterRequest parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.identity.accesscontextmanager.v1.CreateServicePerimeterRequest parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.identity.accesscontextmanager.v1.CreateServicePerimeterRequest parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.identity.accesscontextmanager.v1.CreateServicePerimeterRequest parseFrom(
byte[] data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.identity.accesscontextmanager.v1.CreateServicePerimeterRequest parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.identity.accesscontextmanager.v1.CreateServicePerimeterRequest parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.identity.accesscontextmanager.v1.CreateServicePerimeterRequest parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.identity.accesscontextmanager.v1.CreateServicePerimeterRequest
parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.identity.accesscontextmanager.v1.CreateServicePerimeterRequest
parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.identity.accesscontextmanager.v1.CreateServicePerimeterRequest parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.identity.accesscontextmanager.v1.CreateServicePerimeterRequest parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.identity.accesscontextmanager.v1.CreateServicePerimeterRequest prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* A request to create a `ServicePerimeter`.
* </pre>
*
* Protobuf type {@code google.identity.accesscontextmanager.v1.CreateServicePerimeterRequest}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.identity.accesscontextmanager.v1.CreateServicePerimeterRequest)
com.google.identity.accesscontextmanager.v1.CreateServicePerimeterRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.identity.accesscontextmanager.v1.AccessContextManagerProto
.internal_static_google_identity_accesscontextmanager_v1_CreateServicePerimeterRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.identity.accesscontextmanager.v1.AccessContextManagerProto
.internal_static_google_identity_accesscontextmanager_v1_CreateServicePerimeterRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.identity.accesscontextmanager.v1.CreateServicePerimeterRequest.class,
com.google.identity.accesscontextmanager.v1.CreateServicePerimeterRequest.Builder
.class);
}
// Construct using
// com.google.identity.accesscontextmanager.v1.CreateServicePerimeterRequest.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {
getServicePerimeterFieldBuilder();
}
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
parent_ = "";
servicePerimeter_ = null;
if (servicePerimeterBuilder_ != null) {
servicePerimeterBuilder_.dispose();
servicePerimeterBuilder_ = null;
}
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.identity.accesscontextmanager.v1.AccessContextManagerProto
.internal_static_google_identity_accesscontextmanager_v1_CreateServicePerimeterRequest_descriptor;
}
@java.lang.Override
public com.google.identity.accesscontextmanager.v1.CreateServicePerimeterRequest
getDefaultInstanceForType() {
return com.google.identity.accesscontextmanager.v1.CreateServicePerimeterRequest
.getDefaultInstance();
}
@java.lang.Override
public com.google.identity.accesscontextmanager.v1.CreateServicePerimeterRequest build() {
com.google.identity.accesscontextmanager.v1.CreateServicePerimeterRequest result =
buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.identity.accesscontextmanager.v1.CreateServicePerimeterRequest
buildPartial() {
com.google.identity.accesscontextmanager.v1.CreateServicePerimeterRequest result =
new com.google.identity.accesscontextmanager.v1.CreateServicePerimeterRequest(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(
com.google.identity.accesscontextmanager.v1.CreateServicePerimeterRequest result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.parent_ = parent_;
}
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000002) != 0)) {
result.servicePerimeter_ =
servicePerimeterBuilder_ == null ? servicePerimeter_ : servicePerimeterBuilder_.build();
to_bitField0_ |= 0x00000001;
}
result.bitField0_ |= to_bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other
instanceof com.google.identity.accesscontextmanager.v1.CreateServicePerimeterRequest) {
return mergeFrom(
(com.google.identity.accesscontextmanager.v1.CreateServicePerimeterRequest) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(
com.google.identity.accesscontextmanager.v1.CreateServicePerimeterRequest other) {
if (other
== com.google.identity.accesscontextmanager.v1.CreateServicePerimeterRequest
.getDefaultInstance()) return this;
if (!other.getParent().isEmpty()) {
parent_ = other.parent_;
bitField0_ |= 0x00000001;
onChanged();
}
if (other.hasServicePerimeter()) {
mergeServicePerimeter(other.getServicePerimeter());
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
parent_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000001;
break;
} // case 10
case 18:
{
input.readMessage(
getServicePerimeterFieldBuilder().getBuilder(), extensionRegistry);
bitField0_ |= 0x00000002;
break;
} // case 18
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.lang.Object parent_ = "";
/**
*
*
* <pre>
* Required. Resource name for the access policy which owns this [Service
* Perimeter] [google.identity.accesscontextmanager.v1.ServicePerimeter].
*
* Format: `accessPolicies/{policy_id}`
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The parent.
*/
public java.lang.String getParent() {
java.lang.Object ref = parent_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
parent_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Required. Resource name for the access policy which owns this [Service
* Perimeter] [google.identity.accesscontextmanager.v1.ServicePerimeter].
*
* Format: `accessPolicies/{policy_id}`
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for parent.
*/
public com.google.protobuf.ByteString getParentBytes() {
java.lang.Object ref = parent_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
parent_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Required. Resource name for the access policy which owns this [Service
* Perimeter] [google.identity.accesscontextmanager.v1.ServicePerimeter].
*
* Format: `accessPolicies/{policy_id}`
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The parent to set.
* @return This builder for chaining.
*/
public Builder setParent(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
parent_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. Resource name for the access policy which owns this [Service
* Perimeter] [google.identity.accesscontextmanager.v1.ServicePerimeter].
*
* Format: `accessPolicies/{policy_id}`
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return This builder for chaining.
*/
public Builder clearParent() {
parent_ = getDefaultInstance().getParent();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. Resource name for the access policy which owns this [Service
* Perimeter] [google.identity.accesscontextmanager.v1.ServicePerimeter].
*
* Format: `accessPolicies/{policy_id}`
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The bytes for parent to set.
* @return This builder for chaining.
*/
public Builder setParentBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
parent_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
private com.google.identity.accesscontextmanager.v1.ServicePerimeter servicePerimeter_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.identity.accesscontextmanager.v1.ServicePerimeter,
com.google.identity.accesscontextmanager.v1.ServicePerimeter.Builder,
com.google.identity.accesscontextmanager.v1.ServicePerimeterOrBuilder>
servicePerimeterBuilder_;
/**
*
*
* <pre>
* Required. The [Service Perimeter]
* [google.identity.accesscontextmanager.v1.ServicePerimeter] to create.
* Syntactic correctness of the [Service Perimeter]
* [google.identity.accesscontextmanager.v1.ServicePerimeter] is a
* precondition for creation.
* </pre>
*
* <code>
* .google.identity.accesscontextmanager.v1.ServicePerimeter service_perimeter = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the servicePerimeter field is set.
*/
public boolean hasServicePerimeter() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
*
*
* <pre>
* Required. The [Service Perimeter]
* [google.identity.accesscontextmanager.v1.ServicePerimeter] to create.
* Syntactic correctness of the [Service Perimeter]
* [google.identity.accesscontextmanager.v1.ServicePerimeter] is a
* precondition for creation.
* </pre>
*
* <code>
* .google.identity.accesscontextmanager.v1.ServicePerimeter service_perimeter = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The servicePerimeter.
*/
public com.google.identity.accesscontextmanager.v1.ServicePerimeter getServicePerimeter() {
if (servicePerimeterBuilder_ == null) {
return servicePerimeter_ == null
? com.google.identity.accesscontextmanager.v1.ServicePerimeter.getDefaultInstance()
: servicePerimeter_;
} else {
return servicePerimeterBuilder_.getMessage();
}
}
/**
*
*
* <pre>
* Required. The [Service Perimeter]
* [google.identity.accesscontextmanager.v1.ServicePerimeter] to create.
* Syntactic correctness of the [Service Perimeter]
* [google.identity.accesscontextmanager.v1.ServicePerimeter] is a
* precondition for creation.
* </pre>
*
* <code>
* .google.identity.accesscontextmanager.v1.ServicePerimeter service_perimeter = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setServicePerimeter(
com.google.identity.accesscontextmanager.v1.ServicePerimeter value) {
if (servicePerimeterBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
servicePerimeter_ = value;
} else {
servicePerimeterBuilder_.setMessage(value);
}
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The [Service Perimeter]
* [google.identity.accesscontextmanager.v1.ServicePerimeter] to create.
* Syntactic correctness of the [Service Perimeter]
* [google.identity.accesscontextmanager.v1.ServicePerimeter] is a
* precondition for creation.
* </pre>
*
* <code>
* .google.identity.accesscontextmanager.v1.ServicePerimeter service_perimeter = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setServicePerimeter(
com.google.identity.accesscontextmanager.v1.ServicePerimeter.Builder builderForValue) {
if (servicePerimeterBuilder_ == null) {
servicePerimeter_ = builderForValue.build();
} else {
servicePerimeterBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The [Service Perimeter]
* [google.identity.accesscontextmanager.v1.ServicePerimeter] to create.
* Syntactic correctness of the [Service Perimeter]
* [google.identity.accesscontextmanager.v1.ServicePerimeter] is a
* precondition for creation.
* </pre>
*
* <code>
* .google.identity.accesscontextmanager.v1.ServicePerimeter service_perimeter = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder mergeServicePerimeter(
com.google.identity.accesscontextmanager.v1.ServicePerimeter value) {
if (servicePerimeterBuilder_ == null) {
if (((bitField0_ & 0x00000002) != 0)
&& servicePerimeter_ != null
&& servicePerimeter_
!= com.google.identity.accesscontextmanager.v1.ServicePerimeter
.getDefaultInstance()) {
getServicePerimeterBuilder().mergeFrom(value);
} else {
servicePerimeter_ = value;
}
} else {
servicePerimeterBuilder_.mergeFrom(value);
}
if (servicePerimeter_ != null) {
bitField0_ |= 0x00000002;
onChanged();
}
return this;
}
/**
*
*
* <pre>
* Required. The [Service Perimeter]
* [google.identity.accesscontextmanager.v1.ServicePerimeter] to create.
* Syntactic correctness of the [Service Perimeter]
* [google.identity.accesscontextmanager.v1.ServicePerimeter] is a
* precondition for creation.
* </pre>
*
* <code>
* .google.identity.accesscontextmanager.v1.ServicePerimeter service_perimeter = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder clearServicePerimeter() {
bitField0_ = (bitField0_ & ~0x00000002);
servicePerimeter_ = null;
if (servicePerimeterBuilder_ != null) {
servicePerimeterBuilder_.dispose();
servicePerimeterBuilder_ = null;
}
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The [Service Perimeter]
* [google.identity.accesscontextmanager.v1.ServicePerimeter] to create.
* Syntactic correctness of the [Service Perimeter]
* [google.identity.accesscontextmanager.v1.ServicePerimeter] is a
* precondition for creation.
* </pre>
*
* <code>
* .google.identity.accesscontextmanager.v1.ServicePerimeter service_perimeter = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.identity.accesscontextmanager.v1.ServicePerimeter.Builder
getServicePerimeterBuilder() {
bitField0_ |= 0x00000002;
onChanged();
return getServicePerimeterFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* Required. The [Service Perimeter]
* [google.identity.accesscontextmanager.v1.ServicePerimeter] to create.
* Syntactic correctness of the [Service Perimeter]
* [google.identity.accesscontextmanager.v1.ServicePerimeter] is a
* precondition for creation.
* </pre>
*
* <code>
* .google.identity.accesscontextmanager.v1.ServicePerimeter service_perimeter = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.identity.accesscontextmanager.v1.ServicePerimeterOrBuilder
getServicePerimeterOrBuilder() {
if (servicePerimeterBuilder_ != null) {
return servicePerimeterBuilder_.getMessageOrBuilder();
} else {
return servicePerimeter_ == null
? com.google.identity.accesscontextmanager.v1.ServicePerimeter.getDefaultInstance()
: servicePerimeter_;
}
}
/**
*
*
* <pre>
* Required. The [Service Perimeter]
* [google.identity.accesscontextmanager.v1.ServicePerimeter] to create.
* Syntactic correctness of the [Service Perimeter]
* [google.identity.accesscontextmanager.v1.ServicePerimeter] is a
* precondition for creation.
* </pre>
*
* <code>
* .google.identity.accesscontextmanager.v1.ServicePerimeter service_perimeter = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.identity.accesscontextmanager.v1.ServicePerimeter,
com.google.identity.accesscontextmanager.v1.ServicePerimeter.Builder,
com.google.identity.accesscontextmanager.v1.ServicePerimeterOrBuilder>
getServicePerimeterFieldBuilder() {
if (servicePerimeterBuilder_ == null) {
servicePerimeterBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.identity.accesscontextmanager.v1.ServicePerimeter,
com.google.identity.accesscontextmanager.v1.ServicePerimeter.Builder,
com.google.identity.accesscontextmanager.v1.ServicePerimeterOrBuilder>(
getServicePerimeter(), getParentForChildren(), isClean());
servicePerimeter_ = null;
}
return servicePerimeterBuilder_;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.identity.accesscontextmanager.v1.CreateServicePerimeterRequest)
}
// @@protoc_insertion_point(class_scope:google.identity.accesscontextmanager.v1.CreateServicePerimeterRequest)
private static final com.google.identity.accesscontextmanager.v1.CreateServicePerimeterRequest
DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE =
new com.google.identity.accesscontextmanager.v1.CreateServicePerimeterRequest();
}
public static com.google.identity.accesscontextmanager.v1.CreateServicePerimeterRequest
getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<CreateServicePerimeterRequest> PARSER =
new com.google.protobuf.AbstractParser<CreateServicePerimeterRequest>() {
@java.lang.Override
public CreateServicePerimeterRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<CreateServicePerimeterRequest> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<CreateServicePerimeterRequest> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.identity.accesscontextmanager.v1.CreateServicePerimeterRequest
getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
apache/distributedlog | 36,950 | distributedlog-proxy-server/src/main/java/org/apache/distributedlog/service/stream/StreamImpl.java | /**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.distributedlog.service.stream;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.base.Optional;
import com.google.common.base.Stopwatch;
import java.util.concurrent.CompletableFuture;
import org.apache.distributedlog.api.AsyncLogWriter;
import org.apache.distributedlog.DistributedLogConfiguration;
import org.apache.distributedlog.api.DistributedLogManager;
import org.apache.distributedlog.config.DynamicDistributedLogConfiguration;
import org.apache.distributedlog.exceptions.AlreadyClosedException;
import org.apache.distributedlog.exceptions.DLException;
import org.apache.distributedlog.exceptions.OverCapacityException;
import org.apache.distributedlog.exceptions.OwnershipAcquireFailedException;
import org.apache.distributedlog.exceptions.StatusCode;
import org.apache.distributedlog.exceptions.StreamNotReadyException;
import org.apache.distributedlog.exceptions.StreamUnavailableException;
import org.apache.distributedlog.exceptions.UnexpectedException;
import org.apache.distributedlog.io.Abortables;
import org.apache.distributedlog.api.namespace.Namespace;
import org.apache.distributedlog.protocol.util.TwitterFutureUtils;
import org.apache.distributedlog.service.FatalErrorHandler;
import org.apache.distributedlog.service.ServerFeatureKeys;
import org.apache.distributedlog.service.config.ServerConfiguration;
import org.apache.distributedlog.service.config.StreamConfigProvider;
import org.apache.distributedlog.service.stream.limiter.StreamRequestLimiter;
import org.apache.distributedlog.service.streamset.Partition;
import org.apache.distributedlog.common.stats.BroadCastStatsLogger;
import org.apache.distributedlog.common.concurrent.FutureUtils;
import org.apache.distributedlog.util.OrderedScheduler;
import org.apache.distributedlog.util.TimeSequencer;
import org.apache.distributedlog.util.Utils;
import com.twitter.util.Duration;
import com.twitter.util.Function0;
import com.twitter.util.Future;
import com.twitter.util.FutureEventListener;
import com.twitter.util.Promise;
import com.twitter.util.Timer;
import java.io.IOException;
import java.util.ArrayDeque;
import java.util.Queue;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.locks.ReentrantReadWriteLock;
import org.apache.bookkeeper.feature.Feature;
import org.apache.bookkeeper.feature.FeatureProvider;
import org.apache.bookkeeper.stats.Counter;
import org.apache.bookkeeper.stats.Gauge;
import org.apache.bookkeeper.stats.OpStatsLogger;
import org.apache.bookkeeper.stats.StatsLogger;
import org.jboss.netty.util.HashedWheelTimer;
import org.jboss.netty.util.Timeout;
import org.jboss.netty.util.TimerTask;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import scala.runtime.AbstractFunction1;
import scala.runtime.BoxedUnit;
/**
* Implementation of {@link Stream}.
*/
public class StreamImpl implements Stream {
private static final Logger logger = LoggerFactory.getLogger(StreamImpl.class);
/**
* The status of the stream.
*
* <p>The status change of the stream should just go in one direction. If a stream hits
* any error, the stream should be put in error state. If a stream is in error state,
* it should be removed and not reused anymore.
*/
public enum StreamStatus {
UNINITIALIZED(-1),
INITIALIZING(0),
INITIALIZED(1),
CLOSING(-4),
CLOSED(-5),
// if a stream is in error state, it should be abort during closing.
ERROR(-6);
final int code;
StreamStatus(int code) {
this.code = code;
}
int getCode() {
return code;
}
public static boolean isUnavailable(StreamStatus status) {
return StreamStatus.ERROR == status || StreamStatus.CLOSING == status || StreamStatus.CLOSED == status;
}
}
private final String name;
private final Partition partition;
private DistributedLogManager manager;
private volatile AsyncLogWriter writer;
private volatile StreamStatus status;
private volatile String owner;
private volatile Throwable lastException;
private volatile Queue<StreamOp> pendingOps = new ArrayDeque<StreamOp>();
private final Promise<Void> closePromise = new Promise<Void>();
private final Object txnLock = new Object();
private final TimeSequencer sequencer = new TimeSequencer();
private final StreamRequestLimiter limiter;
private final DynamicDistributedLogConfiguration dynConf;
private final DistributedLogConfiguration dlConfig;
private final Namespace dlNamespace;
private final String clientId;
private final OrderedScheduler scheduler;
private final ReentrantReadWriteLock closeLock = new ReentrantReadWriteLock();
private final Feature featureRateLimitDisabled;
private final StreamManager streamManager;
private final StreamConfigProvider streamConfigProvider;
private final FatalErrorHandler fatalErrorHandler;
private final long streamProbationTimeoutMs;
private final long serviceTimeoutMs;
private final long writerCloseTimeoutMs;
private final boolean failFastOnStreamNotReady;
private final HashedWheelTimer requestTimer;
private final Timer futureTimer;
// Stats
private final StatsLogger streamLogger;
private final StatsLogger streamExceptionStatLogger;
private final StatsLogger limiterStatLogger;
private final Counter serviceTimeout;
private final OpStatsLogger streamAcquireStat;
private final OpStatsLogger writerCloseStatLogger;
private final Counter pendingOpsCounter;
private final Counter unexpectedExceptions;
private final Counter writerCloseTimeoutCounter;
private final StatsLogger exceptionStatLogger;
private final ConcurrentHashMap<String, Counter> exceptionCounters =
new ConcurrentHashMap<String, Counter>();
private final Gauge<Number> streamStatusGauge;
// Since we may create and discard streams at initialization if there's a race,
// must not do any expensive initialization here (particularly any locking or
// significant resource allocation etc.).
StreamImpl(final String name,
final Partition partition,
String clientId,
StreamManager streamManager,
StreamOpStats streamOpStats,
ServerConfiguration serverConfig,
DistributedLogConfiguration dlConfig,
DynamicDistributedLogConfiguration streamConf,
FeatureProvider featureProvider,
StreamConfigProvider streamConfigProvider,
Namespace dlNamespace,
OrderedScheduler scheduler,
FatalErrorHandler fatalErrorHandler,
HashedWheelTimer requestTimer,
Timer futureTimer) {
this.clientId = clientId;
this.dlConfig = dlConfig;
this.streamManager = streamManager;
this.name = name;
this.partition = partition;
this.status = StreamStatus.UNINITIALIZED;
this.lastException = new IOException("Fail to write record to stream " + name);
this.streamConfigProvider = streamConfigProvider;
this.dlNamespace = dlNamespace;
this.featureRateLimitDisabled = featureProvider.getFeature(
ServerFeatureKeys.SERVICE_RATE_LIMIT_DISABLED.name().toLowerCase());
this.scheduler = scheduler;
this.serviceTimeoutMs = serverConfig.getServiceTimeoutMs();
this.streamProbationTimeoutMs = serverConfig.getStreamProbationTimeoutMs();
this.writerCloseTimeoutMs = serverConfig.getWriterCloseTimeoutMs();
this.failFastOnStreamNotReady = dlConfig.getFailFastOnStreamNotReady();
this.fatalErrorHandler = fatalErrorHandler;
this.dynConf = streamConf;
StatsLogger limiterStatsLogger = BroadCastStatsLogger.two(
streamOpStats.baseScope("stream_limiter"),
streamOpStats.streamRequestScope(partition, "limiter"));
this.limiter = new StreamRequestLimiter(name, dynConf, limiterStatsLogger, featureRateLimitDisabled);
this.requestTimer = requestTimer;
this.futureTimer = futureTimer;
// Stats
this.streamLogger = streamOpStats.streamRequestStatsLogger(partition);
this.limiterStatLogger = streamOpStats.baseScope("request_limiter");
this.streamExceptionStatLogger = streamLogger.scope("exceptions");
this.serviceTimeout = streamOpStats.baseCounter("serviceTimeout");
StatsLogger streamsStatsLogger = streamOpStats.baseScope("streams");
this.streamAcquireStat = streamsStatsLogger.getOpStatsLogger("acquire");
this.pendingOpsCounter = streamOpStats.baseCounter("pending_ops");
this.unexpectedExceptions = streamOpStats.baseCounter("unexpected_exceptions");
this.exceptionStatLogger = streamOpStats.requestScope("exceptions");
this.writerCloseStatLogger = streamsStatsLogger.getOpStatsLogger("writer_close");
this.writerCloseTimeoutCounter = streamsStatsLogger.getCounter("writer_close_timeouts");
// Gauges
this.streamStatusGauge = new Gauge<Number>() {
@Override
public Number getDefaultValue() {
return StreamStatus.UNINITIALIZED.getCode();
}
@Override
public Number getSample() {
return status.getCode();
}
};
}
@Override
public String getOwner() {
return owner;
}
@Override
public String getStreamName() {
return name;
}
@Override
public DynamicDistributedLogConfiguration getStreamConfiguration() {
return dynConf;
}
@Override
public Partition getPartition() {
return partition;
}
private DistributedLogManager openLog(String name) throws IOException {
java.util.Optional<DistributedLogConfiguration> dlConf = java.util.Optional.empty();
java.util.Optional<DynamicDistributedLogConfiguration> dynDlConf = java.util.Optional.of(dynConf);
java.util.Optional<StatsLogger> perStreamStatsLogger = java.util.Optional.of(streamLogger);
return dlNamespace.openLog(name, dlConf, dynDlConf, perStreamStatsLogger);
}
// Expensive initialization, only called once per stream.
@Override
public void initialize() throws IOException {
manager = openLog(name);
// Better to avoid registering the gauge multiple times, so do this in init
// which only gets called once.
streamLogger.registerGauge("stream_status", this.streamStatusGauge);
// Signal initialization is complete, should be last in this method.
status = StreamStatus.INITIALIZING;
}
@Override
public String toString() {
return String.format("Stream:%s, %s, %s Status:%s", name, manager, writer, status);
}
@Override
public void start() {
// acquire the stream
acquireStream().addEventListener(new FutureEventListener<Boolean>() {
@Override
public void onSuccess(Boolean success) {
if (!success) {
// failed to acquire the stream. set the stream in error status and close it.
setStreamInErrorStatus();
requestClose("Failed to acquire the ownership");
}
}
@Override
public void onFailure(Throwable cause) {
// unhandled exceptions
logger.error("Stream {} threw unhandled exception : ", name, cause);
// failed to acquire the stream. set the stream in error status and close it.
setStreamInErrorStatus();
requestClose("Unhandled exception");
}
});
}
//
// Stats Operations
//
void countException(Throwable t, StatsLogger streamExceptionLogger) {
String exceptionName = null == t ? "null" : t.getClass().getName();
Counter counter = exceptionCounters.get(exceptionName);
if (null == counter) {
counter = exceptionStatLogger.getCounter(exceptionName);
Counter oldCounter = exceptionCounters.putIfAbsent(exceptionName, counter);
if (null != oldCounter) {
counter = oldCounter;
}
}
counter.inc();
streamExceptionLogger.getCounter(exceptionName).inc();
}
boolean isCriticalException(Throwable cause) {
return !(cause instanceof OwnershipAcquireFailedException);
}
//
// Service Timeout:
// - schedule a timeout function to handle operation timeouts: {@link #handleServiceTimeout(String)}
// - if the operation is completed within timeout period, cancel the timeout.
//
void scheduleTimeout(final StreamOp op) {
final Timeout timeout = requestTimer.newTimeout(new TimerTask() {
@Override
public void run(Timeout timeout) throws Exception {
if (!timeout.isCancelled()) {
serviceTimeout.inc();
handleServiceTimeout("Operation " + op.getClass().getName() + " timeout");
}
}
}, serviceTimeoutMs, TimeUnit.MILLISECONDS);
op.responseHeader().ensure(new Function0<BoxedUnit>() {
@Override
public BoxedUnit apply() {
timeout.cancel();
return null;
}
});
}
/**
* Close the stream and schedule cache eviction at some point in the future.
* We delay this as a way to place the stream in a probationary state--cached
* in the proxy but unusable.
* This mechanism helps the cluster adapt to situations where a proxy has
* persistent connectivity/availability issues, because it keeps an affected
* stream off the proxy for a period of time, hopefully long enough for the
* issues to be resolved, or for whoop to kick in and kill the shard.
*/
void handleServiceTimeout(String reason) {
synchronized (this) {
if (StreamStatus.isUnavailable(status)) {
return;
}
// Mark stream in error state
setStreamInErrorStatus();
}
// Async close request, and schedule eviction when its done.
Future<Void> closeFuture = requestClose(reason, false /* dont remove */);
closeFuture.onSuccess(new AbstractFunction1<Void, BoxedUnit>() {
@Override
public BoxedUnit apply(Void result) {
streamManager.scheduleRemoval(StreamImpl.this, streamProbationTimeoutMs);
return BoxedUnit.UNIT;
}
});
}
//
// Submit the operation to the stream.
//
/**
* Execute the StreamOp. If reacquire is needed, this may initiate reacquire and queue the op for
* execution once complete.
*
* @param op
* stream operation to execute.
*/
@Override
public void submit(StreamOp op) {
try {
limiter.apply(op);
} catch (OverCapacityException ex) {
op.fail(ex);
return;
}
// Timeout stream op if requested.
if (serviceTimeoutMs > 0) {
scheduleTimeout(op);
}
boolean completeOpNow = false;
boolean success = true;
if (StreamStatus.isUnavailable(status)) {
// Stream is closed, fail the op immediately
op.fail(new StreamUnavailableException("Stream " + name + " is closed."));
return;
} else if (StreamStatus.INITIALIZED == status && writer != null) {
completeOpNow = true;
success = true;
} else {
synchronized (this) {
if (StreamStatus.isUnavailable(status)) {
// Stream is closed, fail the op immediately
op.fail(new StreamUnavailableException("Stream " + name + " is closed."));
return;
} else if (StreamStatus.INITIALIZED == status) {
completeOpNow = true;
success = true;
} else if (failFastOnStreamNotReady) {
op.fail(new StreamNotReadyException("Stream " + name + " is not ready; status = " + status));
return;
} else { // the stream is still initializing
pendingOps.add(op);
pendingOpsCounter.inc();
if (1 == pendingOps.size()) {
if (op instanceof HeartbeatOp) {
((HeartbeatOp) op).setWriteControlRecord(true);
}
}
}
}
}
if (completeOpNow) {
executeOp(op, success);
}
}
//
// Execute operations and handle exceptions on operations
//
/**
* Execute the <i>op</i> immediately.
*
* @param op
* stream operation to execute.
* @param success
* whether the operation is success or not.
*/
void executeOp(final StreamOp op, boolean success) {
final AsyncLogWriter writer;
final Throwable lastException;
synchronized (this) {
writer = this.writer;
lastException = this.lastException;
}
if (null != writer && success) {
op.execute(writer, sequencer, txnLock)
.addEventListener(new FutureEventListener<Void>() {
@Override
public void onSuccess(Void value) {
// nop
}
@Override
public void onFailure(Throwable cause) {
boolean countAsException = true;
if (cause instanceof DLException) {
final DLException dle = (DLException) cause;
switch (dle.getCode()) {
case StatusCode.FOUND:
assert(cause instanceof OwnershipAcquireFailedException);
countAsException = false;
handleExceptionOnStreamOp(op, cause);
break;
case StatusCode.ALREADY_CLOSED:
assert(cause instanceof AlreadyClosedException);
op.fail(cause);
handleAlreadyClosedException((AlreadyClosedException) cause);
break;
// exceptions that mostly from client (e.g. too large record)
case StatusCode.NOT_IMPLEMENTED:
case StatusCode.METADATA_EXCEPTION:
case StatusCode.LOG_EMPTY:
case StatusCode.LOG_NOT_FOUND:
case StatusCode.TRUNCATED_TRANSACTION:
case StatusCode.END_OF_STREAM:
case StatusCode.TRANSACTION_OUT_OF_ORDER:
case StatusCode.INVALID_STREAM_NAME:
case StatusCode.TOO_LARGE_RECORD:
case StatusCode.STREAM_NOT_READY:
case StatusCode.OVER_CAPACITY:
op.fail(cause);
break;
// the DL writer hits exception, simple set the stream to error status
// and fail the request
default:
handleExceptionOnStreamOp(op, cause);
break;
}
} else {
handleExceptionOnStreamOp(op, cause);
}
if (countAsException) {
countException(cause, streamExceptionStatLogger);
}
}
});
} else {
if (null != lastException) {
op.fail(lastException);
} else {
op.fail(new StreamUnavailableException("Stream " + name + " is closed."));
}
}
}
/**
* Handle exception when executing <i>op</i>.
*
* @param op
* stream operation executing
* @param cause
* exception received when executing <i>op</i>
*/
private void handleExceptionOnStreamOp(StreamOp op, final Throwable cause) {
AsyncLogWriter oldWriter = null;
boolean statusChanged = false;
synchronized (this) {
if (StreamStatus.INITIALIZED == status) {
oldWriter = setStreamStatus(StreamStatus.ERROR, StreamStatus.INITIALIZED, null, cause);
statusChanged = true;
}
}
if (statusChanged) {
Abortables.asyncAbort(oldWriter, false);
if (isCriticalException(cause)) {
logger.error("Failed to write data into stream {} : ", name, cause);
} else {
logger.warn("Failed to write data into stream {} : {}", name, cause.getMessage());
}
requestClose("Failed to write data into stream " + name + " : " + cause.getMessage());
}
op.fail(cause);
}
/**
* Handling already closed exception.
*/
private void handleAlreadyClosedException(AlreadyClosedException ace) {
unexpectedExceptions.inc();
logger.error("Encountered unexpected exception when writing data into stream {} : ", name, ace);
fatalErrorHandler.notifyFatalError();
}
//
// Acquire streams
//
Future<Boolean> acquireStream() {
final Stopwatch stopwatch = Stopwatch.createStarted();
final Promise<Boolean> acquirePromise = new Promise<Boolean>();
manager.openAsyncLogWriter().whenCompleteAsync(
new org.apache.distributedlog.common.concurrent.FutureEventListener<AsyncLogWriter>() {
@Override
public void onSuccess(AsyncLogWriter w) {
onAcquireStreamSuccess(w, stopwatch, acquirePromise);
}
@Override
public void onFailure(Throwable cause) {
onAcquireStreamFailure(cause, stopwatch, acquirePromise);
}
}, scheduler.chooseExecutor(getStreamName()));
return acquirePromise;
}
private void onAcquireStreamSuccess(AsyncLogWriter w,
Stopwatch stopwatch,
Promise<Boolean> acquirePromise) {
synchronized (txnLock) {
sequencer.setLastId(w.getLastTxId());
}
AsyncLogWriter oldWriter;
Queue<StreamOp> oldPendingOps;
boolean success;
synchronized (StreamImpl.this) {
oldWriter = setStreamStatus(StreamStatus.INITIALIZED,
StreamStatus.INITIALIZING, w, null);
oldPendingOps = pendingOps;
pendingOps = new ArrayDeque<StreamOp>();
success = true;
}
// check if the stream is allowed to be acquired
if (!streamManager.allowAcquire(StreamImpl.this)) {
if (null != oldWriter) {
Abortables.asyncAbort(oldWriter, true);
}
int maxAcquiredPartitions = dynConf.getMaxAcquiredPartitionsPerProxy();
StreamUnavailableException sue = new StreamUnavailableException("Stream " + partition.getStream()
+ " is not allowed to acquire more than " + maxAcquiredPartitions + " partitions");
countException(sue, exceptionStatLogger);
logger.error("Failed to acquire stream {} because it is unavailable : {}",
name, sue.getMessage());
synchronized (this) {
oldWriter = setStreamStatus(StreamStatus.ERROR,
StreamStatus.INITIALIZED, null, sue);
// we don't switch the pending ops since they are already switched
// when setting the status to initialized
success = false;
}
}
processPendingRequestsAfterAcquire(success, oldWriter, oldPendingOps, stopwatch, acquirePromise);
}
private void onAcquireStreamFailure(Throwable cause,
Stopwatch stopwatch,
Promise<Boolean> acquirePromise) {
AsyncLogWriter oldWriter;
Queue<StreamOp> oldPendingOps;
boolean success;
if (cause instanceof AlreadyClosedException) {
countException(cause, streamExceptionStatLogger);
handleAlreadyClosedException((AlreadyClosedException) cause);
return;
} else {
if (isCriticalException(cause)) {
countException(cause, streamExceptionStatLogger);
logger.error("Failed to acquire stream {} : ", name, cause);
} else {
logger.warn("Failed to acquire stream {} : {}", name, cause.getMessage());
}
synchronized (StreamImpl.this) {
oldWriter = setStreamStatus(StreamStatus.ERROR,
StreamStatus.INITIALIZING, null, cause);
oldPendingOps = pendingOps;
pendingOps = new ArrayDeque<StreamOp>();
success = false;
}
}
processPendingRequestsAfterAcquire(success, oldWriter, oldPendingOps, stopwatch, acquirePromise);
}
/**
* Process the pending request after acquired stream.
*
* @param success whether the acquisition succeed or not
* @param oldWriter the old writer to abort
* @param oldPendingOps the old pending ops to execute
* @param stopwatch stopwatch to measure the time spent on acquisition
* @param acquirePromise the promise to complete the acquire operation
*/
void processPendingRequestsAfterAcquire(boolean success,
AsyncLogWriter oldWriter,
Queue<StreamOp> oldPendingOps,
Stopwatch stopwatch,
Promise<Boolean> acquirePromise) {
if (success) {
streamAcquireStat.registerSuccessfulEvent(
stopwatch.elapsed(TimeUnit.MICROSECONDS), TimeUnit.MICROSECONDS);
} else {
streamAcquireStat.registerFailedEvent(
stopwatch.elapsed(TimeUnit.MICROSECONDS), TimeUnit.MICROSECONDS);
}
for (StreamOp op : oldPendingOps) {
executeOp(op, success);
pendingOpsCounter.dec();
}
Abortables.asyncAbort(oldWriter, true);
TwitterFutureUtils.setValue(acquirePromise, success);
}
//
// Stream Status Changes
//
synchronized void setStreamInErrorStatus() {
if (StreamStatus.CLOSING == status || StreamStatus.CLOSED == status) {
return;
}
this.status = StreamStatus.ERROR;
}
/**
* Update the stream status. The changes are only applied when there isn't status changed.
*
* @param newStatus
* new status
* @param oldStatus
* old status
* @param writer
* new log writer
* @param t
* new exception
* @return old writer if it exists
*/
synchronized AsyncLogWriter setStreamStatus(StreamStatus newStatus,
StreamStatus oldStatus,
AsyncLogWriter writer,
Throwable t) {
if (oldStatus != this.status) {
logger.info("Stream {} status already changed from {} -> {} when trying to change it to {}",
new Object[] { name, oldStatus, this.status, newStatus });
return null;
}
String owner = null;
if (t instanceof OwnershipAcquireFailedException) {
owner = ((OwnershipAcquireFailedException) t).getCurrentOwner();
}
AsyncLogWriter oldWriter = this.writer;
this.writer = writer;
if (null != owner && owner.equals(clientId)) {
unexpectedExceptions.inc();
logger.error("I am waiting myself {} to release lock on stream {}, so have to shut myself down :",
new Object[] { owner, name, t });
// I lost the ownership but left a lock over zookeeper
// I should not ask client to redirect to myself again as I can't handle it :(
// shutdown myself
fatalErrorHandler.notifyFatalError();
this.owner = null;
} else {
this.owner = owner;
}
this.lastException = t;
this.status = newStatus;
if (StreamStatus.INITIALIZED == newStatus) {
streamManager.notifyAcquired(this);
logger.info("Inserted acquired stream {} -> writer {}", name, this);
} else {
streamManager.notifyReleased(this);
logger.info("Removed acquired stream {} -> writer {}", name, this);
}
return oldWriter;
}
//
// Stream Close Functions
//
void close(DistributedLogManager dlm) {
if (null != dlm) {
try {
dlm.close();
} catch (IOException ioe) {
logger.warn("Failed to close dlm for {} : ", name, ioe);
}
}
}
@Override
public Future<Void> requestClose(String reason) {
return requestClose(reason, true);
}
Future<Void> requestClose(String reason, boolean uncache) {
final boolean abort;
closeLock.writeLock().lock();
try {
if (StreamStatus.CLOSING == status
|| StreamStatus.CLOSED == status) {
return closePromise;
}
logger.info("Request to close stream {} : {}", getStreamName(), reason);
// if the stream isn't closed from INITIALIZED state, we abort the stream instead of closing it.
abort = StreamStatus.INITIALIZED != status;
status = StreamStatus.CLOSING;
streamManager.notifyReleased(this);
} finally {
closeLock.writeLock().unlock();
}
// we will fail the requests that are coming in between closing and closed only
// after the async writer is closed. so we could clear up the lock before redirect
// them.
close(abort, uncache);
return closePromise;
}
@Override
public void delete() throws IOException {
if (null != writer) {
Utils.close(writer);
synchronized (this) {
writer = null;
lastException = new StreamUnavailableException("Stream was deleted");
}
}
if (null == manager) {
throw new UnexpectedException("No stream " + name + " to delete");
}
manager.delete();
}
/**
* Post action executed after closing.
*/
private void postClose(boolean uncache) {
closeManagerAndErrorOutPendingRequests();
unregisterGauge();
if (uncache) {
if (null != owner) {
long probationTimeoutMs = 2 * dlConfig.getZKSessionTimeoutMilliseconds() / 3;
streamManager.scheduleRemoval(this, probationTimeoutMs);
} else {
streamManager.notifyRemoved(this);
logger.info("Removed cached stream {}.", getStreamName());
}
}
TwitterFutureUtils.setValue(closePromise, null);
}
/**
* Shouldn't call close directly. The callers should call #requestClose instead
*
* @param shouldAbort shall we abort the stream instead of closing
*/
private Future<Void> close(boolean shouldAbort, final boolean uncache) {
boolean abort;
closeLock.writeLock().lock();
try {
if (StreamStatus.CLOSED == status) {
return closePromise;
}
abort = shouldAbort || (StreamStatus.INITIALIZED != status && StreamStatus.CLOSING != status);
status = StreamStatus.CLOSED;
streamManager.notifyReleased(this);
} finally {
closeLock.writeLock().unlock();
}
logger.info("Closing stream {} ...", name);
// Close the writers to release the locks before failing the requests
CompletableFuture<Void> closeWriterFuture;
if (abort) {
closeWriterFuture = Abortables.asyncAbort(writer, true);
} else {
closeWriterFuture = Utils.asyncClose(writer, true);
}
// close the manager and error out pending requests after close writer
Duration closeWaitDuration;
if (writerCloseTimeoutMs <= 0) {
closeWaitDuration = Duration.Top();
} else {
closeWaitDuration = Duration.fromMilliseconds(writerCloseTimeoutMs);
}
CompletableFuture<Void> maskedFuture = FutureUtils.createFuture();
FutureUtils.proxyTo(
FutureUtils.stats(
closeWriterFuture,
writerCloseStatLogger,
Stopwatch.createStarted()
),
maskedFuture);
FutureUtils.within(
maskedFuture,
closeWaitDuration.inMillis(),
TimeUnit.MILLISECONDS,
new java.util.concurrent.TimeoutException("Timeout on closing"),
scheduler,
name
).whenCompleteAsync(
new org.apache.distributedlog.common.concurrent.FutureEventListener<Void>() {
@Override
public void onSuccess(Void value) {
postClose(uncache);
}
@Override
public void onFailure(Throwable cause) {
if (cause instanceof java.util.concurrent.TimeoutException) {
writerCloseTimeoutCounter.inc();
}
}
},
scheduler.chooseExecutor(name)
);
return closePromise;
}
private void closeManagerAndErrorOutPendingRequests() {
close(manager);
// Failed the pending requests.
Queue<StreamOp> oldPendingOps;
synchronized (this) {
oldPendingOps = pendingOps;
pendingOps = new ArrayDeque<StreamOp>();
}
StreamUnavailableException closingException =
new StreamUnavailableException("Stream " + name + " is closed.");
for (StreamOp op : oldPendingOps) {
op.fail(closingException);
pendingOpsCounter.dec();
}
limiter.close();
logger.info("Closed stream {}.", name);
}
/**
* clean up the gauge to help GC.
*/
private void unregisterGauge(){
streamLogger.unregisterGauge("stream_status", this.streamStatusGauge);
}
// Test-only apis
@VisibleForTesting
public int numPendingOps() {
Queue<StreamOp> queue = pendingOps;
return null == queue ? 0 : queue.size();
}
@VisibleForTesting
public StreamStatus getStatus() {
return status;
}
@VisibleForTesting
public void setStatus(StreamStatus status) {
this.status = status;
}
@VisibleForTesting
public AsyncLogWriter getWriter() {
return writer;
}
@VisibleForTesting
public DistributedLogManager getManager() {
return manager;
}
@VisibleForTesting
public Throwable getLastException() {
return lastException;
}
@VisibleForTesting
public Future<Void> getCloseFuture() {
return closePromise;
}
}
|
apache/geode | 36,797 | geode-core/src/main/java/org/apache/geode/internal/cache/tier/sockets/ClientUpdateMessageImpl.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more contributor license
* agreements. See the NOTICE file distributed with this work for additional information regarding
* copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance with the License. You may obtain a
* copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
package org.apache.geode.internal.cache.tier.sockets;
import java.io.DataInput;
import java.io.DataOutput;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.ConcurrentHashMap;
import org.apache.logging.log4j.Logger;
import org.jetbrains.annotations.NotNull;
import org.apache.geode.DataSerializer;
import org.apache.geode.GemFireIOException;
import org.apache.geode.InternalGemFireError;
import org.apache.geode.annotations.VisibleForTesting;
import org.apache.geode.cache.query.internal.cq.InternalCqQuery;
import org.apache.geode.cache.util.ObjectSizer;
import org.apache.geode.internal.InternalDataSerializer;
import org.apache.geode.internal.Sendable;
import org.apache.geode.internal.cache.CachedDeserializableFactory;
import org.apache.geode.internal.cache.EntryEventImpl.NewValueImporter;
import org.apache.geode.internal.cache.EnumListenerEvent;
import org.apache.geode.internal.cache.EventID;
import org.apache.geode.internal.cache.InternalRegion;
import org.apache.geode.internal.cache.WrappedCallbackArgument;
import org.apache.geode.internal.cache.ha.HAContainerRegion;
import org.apache.geode.internal.cache.tier.MessageType;
import org.apache.geode.internal.cache.versions.VersionTag;
import org.apache.geode.internal.serialization.ByteArrayDataInput;
import org.apache.geode.internal.serialization.DSCODE;
import org.apache.geode.internal.serialization.DeserializationContext;
import org.apache.geode.internal.serialization.KnownVersion;
import org.apache.geode.internal.serialization.SerializationContext;
import org.apache.geode.internal.size.Sizeable;
import org.apache.geode.logging.internal.log4j.api.LogService;
/**
* Class {@code ClientUpdateMessageImpl} is a message representing a cache operation that is
* sent from a server to an interested client.
*
* @since GemFire 4.2
*/
public class ClientUpdateMessageImpl implements ClientUpdateMessage, Sizeable, NewValueImporter {
private static final long serialVersionUID = 7037106666445312400L;
private static final Logger logger = LogService.getLogger();
/**
* The operation performed (e.g. AFTER_CREATE, AFTER_UPDATE, AFTER_DESTROY, AFTER_INVALIDATE,
* AFTER_REGION_DESTROY)
*/
EnumListenerEvent _operation;
/**
* The name of the <code>Region</code> that was updated
*/
private String _regionName;
/**
* The key that was updated
*/
private Object _keyOfInterest;
/**
* The new value
*/
private Object _value;
/**
* Whether the value is a serialized object or just a byte[]
*/
private byte _valueIsObject;
/**
* The callback argument
*/
private Object _callbackArgument;
/**
* The membership id of the originator of the event
*/
ClientProxyMembershipID _membershipId;
/**
* The event id of the event
*/
EventID _eventIdentifier;
private boolean _shouldConflate = false;
/**
* To determine if this client message is part of InterestList.
*/
private volatile boolean _isInterestListPassed;
/**
* To determine if this client message is part of CQs.
*/
private volatile boolean _hasCqs = false;
/**
* Map containing clientId and the cqs satisfied for the client.
*/
private ClientCqConcurrentMap _clientCqs = null;
/**
* Client list satisfying the interestList who want values
*/
private volatile Set<ClientProxyMembershipID> _clientInterestList;
/**
* Client list satisfying the interestList who want invalidations
*/
private volatile Set<ClientProxyMembershipID> _clientInterestListInv;
/**
* To determine if the message is result of netLoad. If its net load the message is not delivered
* to the client that has requested the load.
*/
private transient boolean _isNetLoad = false;
/**
* Represents the changed bytes of this event's _value.
*
* @since GemFire 6.1
*/
private byte[] deltaBytes = null;
private VersionTag<?> versionTag;
/* added up all constants and form single value */
private static final int CONSTANT_MEMORY_OVERHEAD;
/**
* Constructor.
*
* @param operation The operation performed (e.g. AFTER_CREATE, AFTER_UPDATE, AFTER_DESTROY,
* AFTER_INVALIDATE, AFTER_REGION_DESTROY)
* @param region The <code>Region</code> that was updated
* @param keyOfInterest The key that was updated
* @param value The new value
* @param valueIsObject false if value is an actual byte[] that isn't serialized info
* @param callbackArgument The callback argument
* @param memberId membership id of the originator of the event
*/
public ClientUpdateMessageImpl(EnumListenerEvent operation, InternalRegion region,
Object keyOfInterest, Object value, byte valueIsObject, Object callbackArgument,
ClientProxyMembershipID memberId, EventID eventIdentifier) {
this(operation, region, keyOfInterest, value, null, valueIsObject, callbackArgument, memberId,
eventIdentifier, null);
}
public ClientUpdateMessageImpl(EnumListenerEvent operation, InternalRegion region,
Object keyOfInterest, Object value, byte[] delta, byte valueIsObject, Object callbackArgument,
ClientProxyMembershipID memberId, EventID eventIdentifier, VersionTag<?> versionTag) {
_operation = operation;
_regionName = region.getFullPath();
_keyOfInterest = keyOfInterest;
_value = value;
_valueIsObject = valueIsObject;
_callbackArgument = callbackArgument;
_membershipId = memberId;
_eventIdentifier = eventIdentifier;
_shouldConflate = isUpdate() && region.getEnableConflation();
deltaBytes = delta;
this.versionTag = versionTag;
}
/**
* Constructor used by ClientInstantiatorMessage
*
* @param operation The operation performed (e.g. AFTER_CREATE, AFTER_UPDATE, AFTER_DESTROY,
* AFTER_INVALIDATE, AFTER_REGION_DESTROY)
* @param memberId membership id of the originator of the event
* @param eventIdentifier EventID of this message
*/
protected ClientUpdateMessageImpl(EnumListenerEvent operation, ClientProxyMembershipID memberId,
EventID eventIdentifier) {
_operation = operation;
_membershipId = memberId;
_eventIdentifier = eventIdentifier;
}
/**
* default constructor
*
*/
public ClientUpdateMessageImpl() {
}
@Override
public String getRegionName() {
return _regionName;
}
@Override
public Object getKeyOfInterest() {
return _keyOfInterest;
}
@Override
public EnumListenerEvent getOperation() {
return _operation;
}
@Override
public Object getValue() {
return _value;
}
@Override
public boolean valueIsObject() {
return (_valueIsObject == 0x01);
}
/**
* @return the callback argument
*/
public Object getCallbackArgument() {
return _callbackArgument;
}
/// Conflatable interface methods ///
/**
* Determines whether or not to conflate this message. This method will answer true IFF the
* message's operation is AFTER_UPDATE and its region has enabled are conflation. Otherwise, this
* method will answer false. Messages whose operation is AFTER_CREATE, AFTER_DESTROY,
* AFTER_INVALIDATE or AFTER_REGION_DESTROY are not conflated.
*
* @return Whether to conflate this message
*/
@Override
public boolean shouldBeConflated() {
// If the message is an update, it may be conflatable. If it is a
// create, destroy, invalidate or destroy-region, it is not conflatable.
// Only updates are conflated. If it is an update, then verify that
// the region has conflation enabled.
return _shouldConflate;
}
@Override
public String getRegionToConflate() {
return _regionName;
}
@Override
public Object getKeyToConflate() {
return _keyOfInterest;
}
@Override
public Object getValueToConflate() {
return _value;
}
@Override
public void setLatestValue(Object value) {
// does this also need to set _valueIsObject
_value = value;
}
/// End Conflatable interface methods ///
@Override
public ClientProxyMembershipID getMembershipId() {
return _membershipId;
}
/**
* Returns the unqiue event eventifier for event corresponding to this message.
*
* @return the unqiue event eventifier for event corresponding to this message.
*/
@Override
public EventID getEventId() {
return _eventIdentifier;
}
@Override
public VersionTag<?> getVersionTag() {
return versionTag;
}
@Override
public boolean isCreate() {
return _operation == EnumListenerEvent.AFTER_CREATE;
}
@Override
public boolean isUpdate() {
return _operation == EnumListenerEvent.AFTER_UPDATE;
}
@Override
public boolean isDestroy() {
return _operation == EnumListenerEvent.AFTER_DESTROY;
}
@Override
public boolean isInvalidate() {
return _operation == EnumListenerEvent.AFTER_INVALIDATE;
}
@Override
public boolean isDestroyRegion() {
return _operation == EnumListenerEvent.AFTER_REGION_DESTROY;
}
@Override
public boolean isClearRegion() {
return _operation == EnumListenerEvent.AFTER_REGION_CLEAR;
}
private boolean isInvalidateRegion() {
return _operation == EnumListenerEvent.AFTER_REGION_INVALIDATE;
}
@Override
public Message getMessage(CacheClientProxy proxy, boolean notify) throws IOException {
// the MessageDispatcher uses getMessage(CacheClientProxy, byte[]) for this class
throw new Error("ClientUpdateMessage.getMessage(proxy) should not be invoked");
}
/**
* Returns a <code>Message</code> generated from the fields of this
* <code>ClientUpdateMessage</code>.
*
* @param latestValue Object containing the latest value to use. This could be the original value
* if conflation is not enabled, or it could be a conflated value if conflation is enabled.
* @return a <code>Message</code> generated from the fields of this
* <code>ClientUpdateMessage</code>
* @see org.apache.geode.internal.cache.tier.sockets.Message
*/
protected Message getMessage(CacheClientProxy proxy, byte[] latestValue) throws IOException {
KnownVersion clientVersion = proxy.getVersion();
byte[] serializedValue = null;
boolean conflation;
conflation = (proxy.clientConflation == Handshake.CONFLATION_ON)
|| (proxy.clientConflation == Handshake.CONFLATION_DEFAULT && shouldBeConflated());
if (latestValue != null) {
serializedValue = latestValue;
} else {
/*
* This means latestValue is instance of Delta, and its delta has already been extracted and
* put into deltaBytes. We serialize the value.
*/
if (deltaBytes == null || isCreate()) {
// Delta could not be extracted. We would need to send full value.
// OR A CREATE operation has a value which has delta. But we send full value for CREATE.
// So serialize it.
_value = serializedValue = CacheServerHelper.serialize(latestValue);
}
}
return getGFE70Message(proxy, serializedValue, conflation, clientVersion);
}
private Message getGFE70Message(CacheClientProxy proxy, byte[] p_latestValue,
boolean conflation, KnownVersion clientVersion) throws IOException {
byte[] latestValue = p_latestValue;
Message message;
ClientProxyMembershipID proxyId = proxy.getProxyID();
// Add CQ info.
int cqMsgParts = 0;
boolean clientHasCq = _hasCqs && (getCqs(proxyId) != null);
if (clientHasCq) {
cqMsgParts = (getCqs(proxyId).length * 2) + 1;
}
if (isCreate() || isUpdate()) {
// Create or update event
if (_clientInterestListInv != null && _clientInterestListInv.contains(proxyId)) {
// Client is registered for invalidates.
if (cqMsgParts > 0) {
cqMsgParts++; // To store base operation type for CQ.
}
message = getMessage(7 + cqMsgParts, clientVersion);
message.setMessageType(MessageType.LOCAL_INVALIDATE);
message.addStringPart(_regionName, true);
message.addStringOrObjPart(_keyOfInterest);
} else {
// Notify by subscription - send the value
message = getMessage(9 + cqMsgParts, clientVersion);
if (isCreate()) {
message.setMessageType(MessageType.LOCAL_CREATE);
message.addStringPart(_regionName, true);
message.addStringOrObjPart(_keyOfInterest);
message.addObjPart(Boolean.FALSE); // NO delta
// Add the value (which has already been serialized)
message.addRawPart(latestValue, (_valueIsObject == 0x01));
} else {
message.setMessageType(MessageType.LOCAL_UPDATE);
message.addStringPart(_regionName, true);
message.addStringOrObjPart(_keyOfInterest);
if (deltaBytes != null && !conflation && !proxy.isMarkerEnqueued()
&& !proxy.getRegionsWithEmptyDataPolicy().containsKey(_regionName)) {
message.addObjPart(Boolean.TRUE);
message.addBytesPart(deltaBytes);
proxy.getStatistics().incDeltaMessagesSent();
} else {
message.addObjPart(Boolean.FALSE);
if (latestValue == null) {
if (!(_value instanceof byte[])) {
_value = CacheServerHelper.serialize(_value);
}
latestValue = (byte[]) _value;
}
// Add the value (which has already been serialized)
message.addRawPart(latestValue, (_valueIsObject == 0x01));
}
}
}
message.addObjPart(_callbackArgument);
if (versionTag != null) {
versionTag.setCanonicalIDs(proxy.getCache().getDistributionManager());
}
message.addObjPart(versionTag);
message.addObjPart(isClientInterested(proxyId));
message.addObjPart(clientHasCq);
if (clientHasCq) {
if (message.getMessageType() == MessageType.LOCAL_INVALIDATE) {
// in case of invalidate, set the region operation type.
message
.addIntPart(isCreate() ? MessageType.LOCAL_CREATE.id : MessageType.LOCAL_UPDATE.id);
}
addCqsToMessage(proxyId, message);
}
} else if (isDestroy() || isInvalidate()) {
if (isDestroy()) {
message = getMessage(7 + cqMsgParts, clientVersion);
message.setMessageType(MessageType.LOCAL_DESTROY);
} else {
if (clientHasCq) {
cqMsgParts++;/* To store the region operation for CQ */
}
message = getMessage(7 + cqMsgParts, clientVersion);
message.setMessageType(MessageType.LOCAL_INVALIDATE);
}
message.addStringPart(_regionName, true);
message.addStringOrObjPart(_keyOfInterest);
message.addObjPart(_callbackArgument);
message.addObjPart(versionTag);
message.addObjPart(isClientInterested(proxyId));
message.addObjPart(clientHasCq);
if (clientHasCq) {
if (isInvalidate()) {
// This is to take care when invalidate message is getting sent
// to the Client. See the code for create/update operation.
message.addIntPart(MessageType.LOCAL_INVALIDATE.id);
}
addCqsToMessage(proxyId, message);
}
} else if (isDestroyRegion()) {
message = getMessage(4 + cqMsgParts, clientVersion);
message.setMessageType(MessageType.LOCAL_DESTROY_REGION);
message.addStringPart(_regionName, true);
message.addObjPart(_callbackArgument);
message.addObjPart(clientHasCq);
if (clientHasCq) {
addCqsToMessage(proxyId, message);
}
} else if (isClearRegion()) {
message = getMessage(4 + cqMsgParts, clientVersion);
message.setMessageType(MessageType.CLEAR_REGION);
message.addStringPart(_regionName, true);
message.addObjPart(_callbackArgument);
message.addObjPart(clientHasCq);
if (clientHasCq) {
addCqsToMessage(proxyId, message);
}
} else if (isInvalidateRegion()) {
message = getMessage(4 + cqMsgParts, clientVersion);
message.setMessageType(MessageType.INVALIDATE_REGION);
message.addStringPart(_regionName, true);
message.addObjPart(_callbackArgument);
// Add CQ status.
message.addObjPart(clientHasCq);
if (clientHasCq) {
addCqsToMessage(proxyId, message);
}
} else {
throw new InternalGemFireError("Don't know what kind of message");
}
message.setTransactionId(0);
// Add the EventId since 5.1 (used to prevent duplicate events
// received on the client side after a failover)
message.addObjPart(_eventIdentifier);
return message;
}
private static final ThreadLocal<Map<Integer, Message>> CACHED_MESSAGES =
ThreadLocal.withInitial(HashMap::new);
private Message getMessage(int numParts, KnownVersion clientVersion) {
Message m = CACHED_MESSAGES.get().get(numParts);
if (m == null) {
m = new Message(numParts, KnownVersion.CURRENT);
CACHED_MESSAGES.get().put(numParts, m);
}
m.clearParts();
m.setVersion(clientVersion);
return m;
}
/**
* @return boolean true if the event is due to net load.
*/
@Override
public boolean isNetLoad() {
return _isNetLoad;
}
/**
* @param isNetLoad boolean true if the event is due to net load.
*/
@Override
public void setIsNetLoad(boolean isNetLoad) {
_isNetLoad = isNetLoad;
}
/**
* @return boolean true if cq info is present for the given proxy.
*/
@Override
public boolean hasCqs(ClientProxyMembershipID clientId) {
if (_clientCqs != null) {
CqNameToOp cqs = _clientCqs.get(clientId);
return cqs != null && !cqs.isEmpty();
}
return false;
}
/**
* @return boolean true if cq info is present.
*/
@Override
public boolean hasCqs() {
return _hasCqs;
}
/**
* Returns the cqs for the given client.
*
*/
private String[] getCqs(ClientProxyMembershipID clientId) {
String[] cqNames = null;
if (_clientCqs != null) {
CqNameToOp cqs = _clientCqs.get(clientId);
if (cqs != null && !cqs.isEmpty()) {
cqNames = cqs.getNames();
}
}
return cqNames;
}
public ClientCqConcurrentMap getClientCqs() {
return _clientCqs;
}
public void addOrSetClientCqs(ClientProxyMembershipID proxyID, ClientCqConcurrentMap clientCqs) {
if (_clientCqs == null) {
_clientCqs = clientCqs;
} else {
_clientCqs.put(proxyID, clientCqs.get(proxyID));
}
}
synchronized void addClientCq(ClientProxyMembershipID clientId, String cqName,
MessageType cqEvent) {
if (_clientCqs == null) {
_clientCqs = new ClientCqConcurrentMap();
_hasCqs = true;
}
CqNameToOp cqInfo = _clientCqs.get(clientId);
if (cqInfo == null) {
cqInfo = new CqNameToOpSingleEntry(cqName, cqEvent);
_clientCqs.put(clientId, cqInfo);
} else if (!cqInfo.isFull()) {
cqInfo.add(cqName, cqEvent);
} else {
cqInfo = new CqNameToOpHashMap((CqNameToOpSingleEntry) cqInfo);
cqInfo.add(cqName, cqEvent);
_clientCqs.put(clientId, cqInfo);
}
}
private void addCqsToMessage(ClientProxyMembershipID proxyId, Message message) {
if (_clientCqs != null) {
CqNameToOp cqs = _clientCqs.get(proxyId);
if (cqs != null) {
message.addIntPart(cqs.size() * 2);
cqs.addToMessage(message);
}
}
}
public void removeClientCq(ClientProxyMembershipID clientId, InternalCqQuery cqToClose) {
CqNameToOp cqs = getClientCq(clientId);
if (cqs != null) {
cqs.delete(cqToClose.getName());
// remove clientId key if no more cqs exist for this clientId
if (cqs.isEmpty()) {
_clientCqs.remove(clientId);
}
}
}
/**
* Set the region name that was updated.
*/
@Override
public void setRegionName(String regionName) {
_regionName = regionName;
}
/**
* @see HAEventWrapper#fromData(DataInput, DeserializationContext)
* @see HAContainerRegion#get(Object)
*/
public void setEventIdentifier(EventID eventId) {
if (_eventIdentifier == null) {
_eventIdentifier = eventId;
}
}
/**
* @see HAEventWrapper#fromData(DataInput, DeserializationContext)
* @see HAContainerRegion#get(Object)
*/
public void setClientCqs(ClientCqConcurrentMap clientCqs) {
if (_clientCqs == null) {
_clientCqs = clientCqs;
}
}
void addClientInterestList(Set<ClientProxyMembershipID> clientIds, boolean receiveValues) {
if (receiveValues) {
if (_clientInterestList == null) {
_clientInterestList = clientIds;
} else {
_clientInterestList.addAll(clientIds);
}
} else {
if (_clientInterestListInv == null) {
_clientInterestListInv = clientIds;
} else {
_clientInterestListInv.addAll(clientIds);
}
}
}
public void addClientInterestList(ClientProxyMembershipID clientId, boolean receiveValues) {
// This happens under synchronization on HAContainer.
if (receiveValues) {
if (_clientInterestList == null) {
_clientInterestList = ConcurrentHashMap.newKeySet();
}
_clientInterestList.add(clientId);
} else {
if (_clientInterestListInv == null) {
_clientInterestListInv = ConcurrentHashMap.newKeySet();
}
_clientInterestListInv.add(clientId);
}
}
@Override
public boolean isClientInterested(ClientProxyMembershipID clientId) {
return (_clientInterestList != null && _clientInterestList.contains(clientId))
|| (_clientInterestListInv != null && _clientInterestListInv.contains(clientId));
}
public boolean isClientInterestedInUpdates(ClientProxyMembershipID clientId) {
return (_clientInterestList != null && _clientInterestList.contains(clientId));
}
public boolean isClientInterestedInInvalidates(ClientProxyMembershipID clientId) {
return (_clientInterestListInv != null && _clientInterestListInv.contains(clientId));
}
@VisibleForTesting
boolean hasClientsInterestedInUpdates() {
return _clientInterestList != null;
}
@VisibleForTesting
boolean hasClientsInterestedInInvalidates() {
return _clientInterestListInv != null;
}
private Object deserialize(byte[] serializedBytes) {
Object deserializedObject = serializedBytes;
// This is a debugging method so ignore all exceptions like
// ClassNotFoundException
try (ByteArrayDataInput dis = new ByteArrayDataInput(serializedBytes)) {
deserializedObject = DataSerializer.readObject(dis);
} catch (Exception ignored) {
}
return deserializedObject;
}
@Override
public String toString() {
StringBuilder buffer = new StringBuilder();
buffer.append("ClientUpdateMessageImpl[").append("op=").append(_operation)
.append(";region=").append(_regionName).append(";key=").append(_keyOfInterest);
if (logger.isTraceEnabled()) {
buffer.append(";value=").append(
(_value instanceof byte[]) ? deserialize((byte[]) _value) : _value);
}
buffer.append(";isObject=").append(_valueIsObject).append(";cbArg=")
.append(_callbackArgument).append(";memberId=").append(_membershipId)
.append(";eventId=").append(_eventIdentifier).append(";shouldConflate=")
.append(_shouldConflate).append(";versionTag=").append(versionTag).append(";hasCqs=")
.append(_hasCqs)
// skip _logger :-)
.append("]");
return buffer.toString();
}
@Override
public int getDSFID() {
return CLIENT_UPDATE_MESSAGE;
}
@Override
public void toData(DataOutput out,
SerializationContext context) throws IOException {
out.writeByte(_operation.getEventCode());
DataSerializer.writeString(_regionName, out);
DataSerializer.writeObject(_keyOfInterest, out);
if (_value instanceof byte[]) {
DataSerializer.writeByteArray((byte[]) _value, out);
} else {
DataSerializer.writeByteArray(CacheServerHelper.serialize(_value), out);
}
out.writeByte(_valueIsObject);
DataSerializer.writeObject(_membershipId, out);
out.writeBoolean(_shouldConflate);
out.writeBoolean(_isInterestListPassed);
DataSerializer.writeByteArray(deltaBytes, out);
out.writeBoolean(_hasCqs);
DataSerializer.writeObject(_callbackArgument, out);
HashSet<ClientProxyMembershipID> clientInterestListSnapshot =
_clientInterestList != null
? new HashSet<>(_clientInterestList)
: null;
DataSerializer.writeHashSet(clientInterestListSnapshot, out);
HashSet<ClientProxyMembershipID> clientInterestListInvSnapshot =
_clientInterestListInv != null
? new HashSet<>(_clientInterestListInv)
: null;
DataSerializer.writeHashSet(clientInterestListInvSnapshot, out);
DataSerializer.writeObject(versionTag, out);
}
@Override
public void fromData(DataInput in,
DeserializationContext context) throws IOException, ClassNotFoundException {
_operation = EnumListenerEvent.getEnumListenerEvent(in.readByte());
_regionName = DataSerializer.readString(in);
_keyOfInterest = DataSerializer.readObject(in);
_value = DataSerializer.readByteArray(in);
_valueIsObject = in.readByte();
_membershipId = ClientProxyMembershipID.readCanonicalized(in);
_shouldConflate = in.readBoolean();
_isInterestListPassed = in.readBoolean();
deltaBytes = DataSerializer.readByteArray(in);
_hasCqs = in.readBoolean();
_callbackArgument = DataSerializer.readObject(in);
CacheClientNotifier ccn = CacheClientNotifier.getInstance();
Set<ClientProxyMembershipID> clientInterestList = DataSerializer.readHashSet(in);
_clientInterestList = ccn != null && clientInterestList != null
? ccn.getProxyIDs(clientInterestList)
: null;
Set<ClientProxyMembershipID> clientInterestListInv = DataSerializer.readHashSet(in);
_clientInterestListInv = ccn != null && clientInterestListInv != null
? ccn.getProxyIDs(clientInterestListInv)
: null;
versionTag = DataSerializer.readObject(in);
}
private Object getOriginalCallbackArgument() {
Object result = _callbackArgument;
while (result instanceof WrappedCallbackArgument) {
WrappedCallbackArgument wca = (WrappedCallbackArgument) result;
result = wca.getOriginalCallbackArg();
}
return result;
}
/*
* Statically calculate constant overhead for ClientUpdateMessageImpl instance.
*/
static {
// The sizes of the following variables are calculated:
// - primitive and object instance variable references
//
// The sizes of the following variables are not calculated:
// - the key because it is a reference
// - the region and regionName because they are references
// - the operation because it is a reference
// - the membershipId because it is a reference
// - the logger because it is a reference
// - the keyOfInterest because it is a reference
// - the clientCqs because it is a reference
// - the clientInterestList because it is a reference
// - the eventIdentifier because it is a reference
// The size of instances of the following internal datatypes were estimated
// using a NullDataOutputStream and hardcoded into this method:
// - the id (an instance of EventId)
int size = 0;
// Add overhead for this instance.
size += Sizeable.PER_OBJECT_OVERHEAD;
// Add object references
// _operation reference = 4 bytes
// _regionName reference = 4 bytes
// _keyOfInterest reference = 4 bytes
// _value reference = 4 bytes
// _callbackArgument reference = 4 bytes
// _membershipId reference = 4 bytes
// _eventIdentifier reference = 4 bytes
// _logger reference = 4 bytes
// _clientCqs reference = 4 bytes
// _clientInterestList reference = 4 bytes
size += 40;
// Add primitive references
// byte _valueIsObject = 1 byte
// boolean _shouldConflate = 1 byte
// boolean _isInterestListPassed = 1 byte
// boolean _hasCqs = 1 byte
// boolean _isNetLoad = 1 byte
size += 5;
// not sure on the kind on wrapper is around callbackArgument
// The callback argument (a GatewayEventCallbackArgument wrapping an Object
// which is the original callback argument)
// The hardcoded value below represents the GatewayEventCallbackArgument
// and was estimated using a NullDataOutputStream
size += Sizeable.PER_OBJECT_OVERHEAD + 194; // do we need it
// add overhead for callback Argument
size += Sizeable.PER_OBJECT_OVERHEAD;
// total overhead
CONSTANT_MEMORY_OVERHEAD = size;
}
@Override
public int getSizeInBytes() {
int size = CONSTANT_MEMORY_OVERHEAD;
// The value (a byte[])
if (_value != null) {
size += CachedDeserializableFactory.calcMemSize(_value);
}
// The sizeOf call gets the size of the input callback argument.
size += sizeOf(getOriginalCallbackArgument());
return size;
}
private int sizeOf(Object obj) {
int size = 0;
if (obj == null) {
return size;
}
if (obj instanceof String) {
size = ObjectSizer.DEFAULT.sizeof(obj);
} else if (obj instanceof Integer) {
size = 4; // estimate
} else if (obj instanceof Long) {
size = 8; // estimate
} else {
size = CachedDeserializableFactory.calcMemSize(obj) - Sizeable.PER_OBJECT_OVERHEAD;
}
return size;
}
/*
* (non-Javadoc)
*
* @see
* org.apache.geode.internal.cache.tier.sockets.ClientUpdateMessage#needsNoAuthorizationCheck()
*/
@Override
public boolean needsNoAuthorizationCheck() {
return false;
}
@Override
public CqNameToOp getClientCq(ClientProxyMembershipID clientId) {
if (_clientCqs != null) {
return _clientCqs.get(clientId);
} else {
return null;
}
}
@Override
public KnownVersion[] getSerializationVersions() {
return null;
}
/**
* Even though this class is just a ConcurrentHashMap I wanted it to be its own class so it could
* be easily identified in heap dumps. The concurrency level on these should be 1 to keep their
* memory footprint down.
*/
public static class ClientCqConcurrentMap
extends ConcurrentHashMap<ClientProxyMembershipID, CqNameToOp> {
public ClientCqConcurrentMap(int initialCapacity, float loadFactor, int concurrencyLevel) {
super(initialCapacity, loadFactor, concurrencyLevel);
}
ClientCqConcurrentMap() {
super(16, 1.0f, 1);
}
}
/**
* Replaces what used to be a HashMap<String, Integer>.
*/
public interface CqNameToOp extends Sendable {
boolean isEmpty();
/**
* Returns true if calling add would fail.
*/
boolean isFull();
void addToMessage(@NotNull Message message);
int size();
@NotNull
String[] getNames();
void add(@NotNull String name, @NotNull MessageType op);
void delete(@NotNull String name);
}
/**
* Contains either zero or one String to int tuples. This is a common case and this impl has a
* much smaller memory footprint than a HashMap with one entry.
*/
public static class CqNameToOpSingleEntry implements CqNameToOp {
private String[] name;
private MessageType op;
private static final String[] EMPTY_NAMES_ARRAY = new String[0];
/**
* Construct empty tuple.
*/
CqNameToOpSingleEntry() {
setEmpty();
}
CqNameToOpSingleEntry(final @NotNull String name, final @NotNull MessageType op) {
setOne(name, op);
}
@Override
public void sendTo(DataOutput out) throws IOException {
// When serialized it needs to look just as if writeObject was called on a HASH_MAP
out.writeByte(DSCODE.HASH_MAP.toByte());
final int size = size();
InternalDataSerializer.writeArrayLength(size, out);
if (size > 0) {
DataSerializer.writeObject(name[0], out);
DataSerializer.writeObject(op.id, out);
}
}
@Override
public boolean isEmpty() {
return size() == 0;
}
@Override
public void addToMessage(@NotNull Message message) {
if (!isEmpty()) {
message.addStringPart(name[0], true);
message.addIntPart(op.id);
}
}
@Override
public int size() {
return name.length;
}
@Override
public @NotNull String @NotNull [] getNames() {
return name;
}
@Override
public void add(final @NotNull String name, final @NotNull MessageType op) {
if (isEmpty()) {
setOne(name, op);
} else if (this.name[0].equals(name)) {
this.op = op;
} else {
throw new IllegalStateException("tried to add to a full CqNameToOpSingleEntry");
}
}
@Override
public void delete(@NotNull String name) {
if (!isEmpty() && name.equals(this.name[0])) {
setEmpty();
}
}
@Override
public boolean isFull() {
return !isEmpty();
}
private void setEmpty() {
name = EMPTY_NAMES_ARRAY;
this.op = null;
}
private void setOne(final @NotNull String name, final @NotNull MessageType op) {
this.name = new String[] {name};
this.op = op;
}
}
/**
* Basically just a ConcurrentHashMap<String, Integer> but limits itself to the CqNameToOp
* interface.
*/
public static class CqNameToOpHashMap extends ConcurrentHashMap<String, MessageType>
implements CqNameToOp {
public CqNameToOpHashMap(int initialCapacity) {
super(initialCapacity, 1.0f);
}
CqNameToOpHashMap(CqNameToOpSingleEntry se) {
super(2, 1.0f);
add(se.name[0], se.op);
}
@Override
public void sendTo(DataOutput out) throws IOException {
// When serialized it needs to look just as if writeObject was called on a HASH_MAP
out.writeByte(DSCODE.HASH_MAP.toByte());
Collection<Entry<String, MessageType>> entrySnapshot = new ArrayList<>(this.entrySet());
InternalDataSerializer.writeArrayLength(entrySnapshot.size(), out);
for (Map.Entry<String, MessageType> entry : entrySnapshot) {
DataSerializer.writeObject(entry.getKey(), out);
DataSerializer.writeObject(entry.getValue().id, out);
}
}
@Override
public @NotNull String @NotNull [] getNames() {
String[] cqNames = new String[size()];
cqNames = keySet().toArray(cqNames);
return cqNames;
}
@Override
public void addToMessage(@NotNull Message message) {
for (Entry<String, MessageType> entry : entrySet()) {
// Add CQ Name.
String cq = entry.getKey();
message.addStringPart(cq, true);
// Add CQ Op.
MessageType op = entry.getValue();
message.addIntPart(op.id);
}
}
@Override
public void add(@NotNull String name, @NotNull MessageType op) {
put(name, op);
}
@Override
public void delete(@NotNull String name) {
remove(name);
}
@Override
public boolean isFull() {
return false;
}
}
// NewValueImporter methods
@Override
public boolean prefersNewSerialized() {
return true;
}
@Override
public boolean isUnretainedNewReferenceOk() {
return false;
}
@Override
public void importNewObject(Object nv, boolean isSerialized) {
if (!isSerialized) {
throw new IllegalStateException("Expected importNewBytes to be called.");
}
try {
_value = CacheServerHelper.serialize(nv);
} catch (IOException e) {
throw new GemFireIOException("Exception serializing entry value", e);
}
}
@Override
public void importNewBytes(byte[] nv, boolean isSerialized) {
if (!isSerialized) {
// The value is already a byte[]. Set _valueIsObject flag to 0x00
// (not an object)
_valueIsObject = 0x00;
}
_value = nv;
}
}
|
googleapis/google-cloud-java | 36,749 | java-telcoautomation/proto-google-cloud-telcoautomation-v1/src/main/java/com/google/cloud/telcoautomation/v1/NFDeployStatus.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/telcoautomation/v1/telcoautomation.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.telcoautomation.v1;
/**
*
*
* <pre>
* Deployment status of NFDeploy.
* </pre>
*
* Protobuf type {@code google.cloud.telcoautomation.v1.NFDeployStatus}
*/
public final class NFDeployStatus extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.telcoautomation.v1.NFDeployStatus)
NFDeployStatusOrBuilder {
private static final long serialVersionUID = 0L;
// Use NFDeployStatus.newBuilder() to construct.
private NFDeployStatus(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private NFDeployStatus() {
sites_ = java.util.Collections.emptyList();
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new NFDeployStatus();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.telcoautomation.v1.TelcoautomationProto
.internal_static_google_cloud_telcoautomation_v1_NFDeployStatus_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.telcoautomation.v1.TelcoautomationProto
.internal_static_google_cloud_telcoautomation_v1_NFDeployStatus_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.telcoautomation.v1.NFDeployStatus.class,
com.google.cloud.telcoautomation.v1.NFDeployStatus.Builder.class);
}
public static final int TARGETED_NFS_FIELD_NUMBER = 1;
private int targetedNfs_ = 0;
/**
*
*
* <pre>
* Output only. Total number of NFs targeted by this deployment
* </pre>
*
* <code>int32 targeted_nfs = 1 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
*
* @return The targetedNfs.
*/
@java.lang.Override
public int getTargetedNfs() {
return targetedNfs_;
}
public static final int READY_NFS_FIELD_NUMBER = 2;
private int readyNfs_ = 0;
/**
*
*
* <pre>
* Output only. Total number of NFs targeted by this deployment with a Ready
* Condition set.
* </pre>
*
* <code>int32 ready_nfs = 2 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
*
* @return The readyNfs.
*/
@java.lang.Override
public int getReadyNfs() {
return readyNfs_;
}
public static final int SITES_FIELD_NUMBER = 3;
@SuppressWarnings("serial")
private java.util.List<com.google.cloud.telcoautomation.v1.NFDeploySiteStatus> sites_;
/**
*
*
* <pre>
* Output only. Per-Site Status.
* </pre>
*
* <code>
* repeated .google.cloud.telcoautomation.v1.NFDeploySiteStatus sites = 3 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*/
@java.lang.Override
public java.util.List<com.google.cloud.telcoautomation.v1.NFDeploySiteStatus> getSitesList() {
return sites_;
}
/**
*
*
* <pre>
* Output only. Per-Site Status.
* </pre>
*
* <code>
* repeated .google.cloud.telcoautomation.v1.NFDeploySiteStatus sites = 3 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*/
@java.lang.Override
public java.util.List<? extends com.google.cloud.telcoautomation.v1.NFDeploySiteStatusOrBuilder>
getSitesOrBuilderList() {
return sites_;
}
/**
*
*
* <pre>
* Output only. Per-Site Status.
* </pre>
*
* <code>
* repeated .google.cloud.telcoautomation.v1.NFDeploySiteStatus sites = 3 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*/
@java.lang.Override
public int getSitesCount() {
return sites_.size();
}
/**
*
*
* <pre>
* Output only. Per-Site Status.
* </pre>
*
* <code>
* repeated .google.cloud.telcoautomation.v1.NFDeploySiteStatus sites = 3 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*/
@java.lang.Override
public com.google.cloud.telcoautomation.v1.NFDeploySiteStatus getSites(int index) {
return sites_.get(index);
}
/**
*
*
* <pre>
* Output only. Per-Site Status.
* </pre>
*
* <code>
* repeated .google.cloud.telcoautomation.v1.NFDeploySiteStatus sites = 3 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*/
@java.lang.Override
public com.google.cloud.telcoautomation.v1.NFDeploySiteStatusOrBuilder getSitesOrBuilder(
int index) {
return sites_.get(index);
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (targetedNfs_ != 0) {
output.writeInt32(1, targetedNfs_);
}
if (readyNfs_ != 0) {
output.writeInt32(2, readyNfs_);
}
for (int i = 0; i < sites_.size(); i++) {
output.writeMessage(3, sites_.get(i));
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (targetedNfs_ != 0) {
size += com.google.protobuf.CodedOutputStream.computeInt32Size(1, targetedNfs_);
}
if (readyNfs_ != 0) {
size += com.google.protobuf.CodedOutputStream.computeInt32Size(2, readyNfs_);
}
for (int i = 0; i < sites_.size(); i++) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(3, sites_.get(i));
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.telcoautomation.v1.NFDeployStatus)) {
return super.equals(obj);
}
com.google.cloud.telcoautomation.v1.NFDeployStatus other =
(com.google.cloud.telcoautomation.v1.NFDeployStatus) obj;
if (getTargetedNfs() != other.getTargetedNfs()) return false;
if (getReadyNfs() != other.getReadyNfs()) return false;
if (!getSitesList().equals(other.getSitesList())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + TARGETED_NFS_FIELD_NUMBER;
hash = (53 * hash) + getTargetedNfs();
hash = (37 * hash) + READY_NFS_FIELD_NUMBER;
hash = (53 * hash) + getReadyNfs();
if (getSitesCount() > 0) {
hash = (37 * hash) + SITES_FIELD_NUMBER;
hash = (53 * hash) + getSitesList().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.telcoautomation.v1.NFDeployStatus parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.telcoautomation.v1.NFDeployStatus parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.telcoautomation.v1.NFDeployStatus parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.telcoautomation.v1.NFDeployStatus parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.telcoautomation.v1.NFDeployStatus parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.telcoautomation.v1.NFDeployStatus parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.telcoautomation.v1.NFDeployStatus parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.telcoautomation.v1.NFDeployStatus parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.telcoautomation.v1.NFDeployStatus parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.telcoautomation.v1.NFDeployStatus parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.telcoautomation.v1.NFDeployStatus parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.telcoautomation.v1.NFDeployStatus parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.cloud.telcoautomation.v1.NFDeployStatus prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Deployment status of NFDeploy.
* </pre>
*
* Protobuf type {@code google.cloud.telcoautomation.v1.NFDeployStatus}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.telcoautomation.v1.NFDeployStatus)
com.google.cloud.telcoautomation.v1.NFDeployStatusOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.telcoautomation.v1.TelcoautomationProto
.internal_static_google_cloud_telcoautomation_v1_NFDeployStatus_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.telcoautomation.v1.TelcoautomationProto
.internal_static_google_cloud_telcoautomation_v1_NFDeployStatus_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.telcoautomation.v1.NFDeployStatus.class,
com.google.cloud.telcoautomation.v1.NFDeployStatus.Builder.class);
}
// Construct using com.google.cloud.telcoautomation.v1.NFDeployStatus.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
targetedNfs_ = 0;
readyNfs_ = 0;
if (sitesBuilder_ == null) {
sites_ = java.util.Collections.emptyList();
} else {
sites_ = null;
sitesBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000004);
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.telcoautomation.v1.TelcoautomationProto
.internal_static_google_cloud_telcoautomation_v1_NFDeployStatus_descriptor;
}
@java.lang.Override
public com.google.cloud.telcoautomation.v1.NFDeployStatus getDefaultInstanceForType() {
return com.google.cloud.telcoautomation.v1.NFDeployStatus.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.telcoautomation.v1.NFDeployStatus build() {
com.google.cloud.telcoautomation.v1.NFDeployStatus result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.telcoautomation.v1.NFDeployStatus buildPartial() {
com.google.cloud.telcoautomation.v1.NFDeployStatus result =
new com.google.cloud.telcoautomation.v1.NFDeployStatus(this);
buildPartialRepeatedFields(result);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartialRepeatedFields(
com.google.cloud.telcoautomation.v1.NFDeployStatus result) {
if (sitesBuilder_ == null) {
if (((bitField0_ & 0x00000004) != 0)) {
sites_ = java.util.Collections.unmodifiableList(sites_);
bitField0_ = (bitField0_ & ~0x00000004);
}
result.sites_ = sites_;
} else {
result.sites_ = sitesBuilder_.build();
}
}
private void buildPartial0(com.google.cloud.telcoautomation.v1.NFDeployStatus result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.targetedNfs_ = targetedNfs_;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.readyNfs_ = readyNfs_;
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.telcoautomation.v1.NFDeployStatus) {
return mergeFrom((com.google.cloud.telcoautomation.v1.NFDeployStatus) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.telcoautomation.v1.NFDeployStatus other) {
if (other == com.google.cloud.telcoautomation.v1.NFDeployStatus.getDefaultInstance())
return this;
if (other.getTargetedNfs() != 0) {
setTargetedNfs(other.getTargetedNfs());
}
if (other.getReadyNfs() != 0) {
setReadyNfs(other.getReadyNfs());
}
if (sitesBuilder_ == null) {
if (!other.sites_.isEmpty()) {
if (sites_.isEmpty()) {
sites_ = other.sites_;
bitField0_ = (bitField0_ & ~0x00000004);
} else {
ensureSitesIsMutable();
sites_.addAll(other.sites_);
}
onChanged();
}
} else {
if (!other.sites_.isEmpty()) {
if (sitesBuilder_.isEmpty()) {
sitesBuilder_.dispose();
sitesBuilder_ = null;
sites_ = other.sites_;
bitField0_ = (bitField0_ & ~0x00000004);
sitesBuilder_ =
com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders
? getSitesFieldBuilder()
: null;
} else {
sitesBuilder_.addAllMessages(other.sites_);
}
}
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 8:
{
targetedNfs_ = input.readInt32();
bitField0_ |= 0x00000001;
break;
} // case 8
case 16:
{
readyNfs_ = input.readInt32();
bitField0_ |= 0x00000002;
break;
} // case 16
case 26:
{
com.google.cloud.telcoautomation.v1.NFDeploySiteStatus m =
input.readMessage(
com.google.cloud.telcoautomation.v1.NFDeploySiteStatus.parser(),
extensionRegistry);
if (sitesBuilder_ == null) {
ensureSitesIsMutable();
sites_.add(m);
} else {
sitesBuilder_.addMessage(m);
}
break;
} // case 26
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private int targetedNfs_;
/**
*
*
* <pre>
* Output only. Total number of NFs targeted by this deployment
* </pre>
*
* <code>int32 targeted_nfs = 1 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
*
* @return The targetedNfs.
*/
@java.lang.Override
public int getTargetedNfs() {
return targetedNfs_;
}
/**
*
*
* <pre>
* Output only. Total number of NFs targeted by this deployment
* </pre>
*
* <code>int32 targeted_nfs = 1 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
*
* @param value The targetedNfs to set.
* @return This builder for chaining.
*/
public Builder setTargetedNfs(int value) {
targetedNfs_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Output only. Total number of NFs targeted by this deployment
* </pre>
*
* <code>int32 targeted_nfs = 1 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
*
* @return This builder for chaining.
*/
public Builder clearTargetedNfs() {
bitField0_ = (bitField0_ & ~0x00000001);
targetedNfs_ = 0;
onChanged();
return this;
}
private int readyNfs_;
/**
*
*
* <pre>
* Output only. Total number of NFs targeted by this deployment with a Ready
* Condition set.
* </pre>
*
* <code>int32 ready_nfs = 2 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
*
* @return The readyNfs.
*/
@java.lang.Override
public int getReadyNfs() {
return readyNfs_;
}
/**
*
*
* <pre>
* Output only. Total number of NFs targeted by this deployment with a Ready
* Condition set.
* </pre>
*
* <code>int32 ready_nfs = 2 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
*
* @param value The readyNfs to set.
* @return This builder for chaining.
*/
public Builder setReadyNfs(int value) {
readyNfs_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Output only. Total number of NFs targeted by this deployment with a Ready
* Condition set.
* </pre>
*
* <code>int32 ready_nfs = 2 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
*
* @return This builder for chaining.
*/
public Builder clearReadyNfs() {
bitField0_ = (bitField0_ & ~0x00000002);
readyNfs_ = 0;
onChanged();
return this;
}
private java.util.List<com.google.cloud.telcoautomation.v1.NFDeploySiteStatus> sites_ =
java.util.Collections.emptyList();
private void ensureSitesIsMutable() {
if (!((bitField0_ & 0x00000004) != 0)) {
sites_ =
new java.util.ArrayList<com.google.cloud.telcoautomation.v1.NFDeploySiteStatus>(sites_);
bitField0_ |= 0x00000004;
}
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.telcoautomation.v1.NFDeploySiteStatus,
com.google.cloud.telcoautomation.v1.NFDeploySiteStatus.Builder,
com.google.cloud.telcoautomation.v1.NFDeploySiteStatusOrBuilder>
sitesBuilder_;
/**
*
*
* <pre>
* Output only. Per-Site Status.
* </pre>
*
* <code>
* repeated .google.cloud.telcoautomation.v1.NFDeploySiteStatus sites = 3 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*/
public java.util.List<com.google.cloud.telcoautomation.v1.NFDeploySiteStatus> getSitesList() {
if (sitesBuilder_ == null) {
return java.util.Collections.unmodifiableList(sites_);
} else {
return sitesBuilder_.getMessageList();
}
}
/**
*
*
* <pre>
* Output only. Per-Site Status.
* </pre>
*
* <code>
* repeated .google.cloud.telcoautomation.v1.NFDeploySiteStatus sites = 3 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*/
public int getSitesCount() {
if (sitesBuilder_ == null) {
return sites_.size();
} else {
return sitesBuilder_.getCount();
}
}
/**
*
*
* <pre>
* Output only. Per-Site Status.
* </pre>
*
* <code>
* repeated .google.cloud.telcoautomation.v1.NFDeploySiteStatus sites = 3 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*/
public com.google.cloud.telcoautomation.v1.NFDeploySiteStatus getSites(int index) {
if (sitesBuilder_ == null) {
return sites_.get(index);
} else {
return sitesBuilder_.getMessage(index);
}
}
/**
*
*
* <pre>
* Output only. Per-Site Status.
* </pre>
*
* <code>
* repeated .google.cloud.telcoautomation.v1.NFDeploySiteStatus sites = 3 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*/
public Builder setSites(
int index, com.google.cloud.telcoautomation.v1.NFDeploySiteStatus value) {
if (sitesBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureSitesIsMutable();
sites_.set(index, value);
onChanged();
} else {
sitesBuilder_.setMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* Output only. Per-Site Status.
* </pre>
*
* <code>
* repeated .google.cloud.telcoautomation.v1.NFDeploySiteStatus sites = 3 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*/
public Builder setSites(
int index, com.google.cloud.telcoautomation.v1.NFDeploySiteStatus.Builder builderForValue) {
if (sitesBuilder_ == null) {
ensureSitesIsMutable();
sites_.set(index, builderForValue.build());
onChanged();
} else {
sitesBuilder_.setMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* Output only. Per-Site Status.
* </pre>
*
* <code>
* repeated .google.cloud.telcoautomation.v1.NFDeploySiteStatus sites = 3 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*/
public Builder addSites(com.google.cloud.telcoautomation.v1.NFDeploySiteStatus value) {
if (sitesBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureSitesIsMutable();
sites_.add(value);
onChanged();
} else {
sitesBuilder_.addMessage(value);
}
return this;
}
/**
*
*
* <pre>
* Output only. Per-Site Status.
* </pre>
*
* <code>
* repeated .google.cloud.telcoautomation.v1.NFDeploySiteStatus sites = 3 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*/
public Builder addSites(
int index, com.google.cloud.telcoautomation.v1.NFDeploySiteStatus value) {
if (sitesBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureSitesIsMutable();
sites_.add(index, value);
onChanged();
} else {
sitesBuilder_.addMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* Output only. Per-Site Status.
* </pre>
*
* <code>
* repeated .google.cloud.telcoautomation.v1.NFDeploySiteStatus sites = 3 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*/
public Builder addSites(
com.google.cloud.telcoautomation.v1.NFDeploySiteStatus.Builder builderForValue) {
if (sitesBuilder_ == null) {
ensureSitesIsMutable();
sites_.add(builderForValue.build());
onChanged();
} else {
sitesBuilder_.addMessage(builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* Output only. Per-Site Status.
* </pre>
*
* <code>
* repeated .google.cloud.telcoautomation.v1.NFDeploySiteStatus sites = 3 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*/
public Builder addSites(
int index, com.google.cloud.telcoautomation.v1.NFDeploySiteStatus.Builder builderForValue) {
if (sitesBuilder_ == null) {
ensureSitesIsMutable();
sites_.add(index, builderForValue.build());
onChanged();
} else {
sitesBuilder_.addMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* Output only. Per-Site Status.
* </pre>
*
* <code>
* repeated .google.cloud.telcoautomation.v1.NFDeploySiteStatus sites = 3 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*/
public Builder addAllSites(
java.lang.Iterable<? extends com.google.cloud.telcoautomation.v1.NFDeploySiteStatus>
values) {
if (sitesBuilder_ == null) {
ensureSitesIsMutable();
com.google.protobuf.AbstractMessageLite.Builder.addAll(values, sites_);
onChanged();
} else {
sitesBuilder_.addAllMessages(values);
}
return this;
}
/**
*
*
* <pre>
* Output only. Per-Site Status.
* </pre>
*
* <code>
* repeated .google.cloud.telcoautomation.v1.NFDeploySiteStatus sites = 3 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*/
public Builder clearSites() {
if (sitesBuilder_ == null) {
sites_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000004);
onChanged();
} else {
sitesBuilder_.clear();
}
return this;
}
/**
*
*
* <pre>
* Output only. Per-Site Status.
* </pre>
*
* <code>
* repeated .google.cloud.telcoautomation.v1.NFDeploySiteStatus sites = 3 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*/
public Builder removeSites(int index) {
if (sitesBuilder_ == null) {
ensureSitesIsMutable();
sites_.remove(index);
onChanged();
} else {
sitesBuilder_.remove(index);
}
return this;
}
/**
*
*
* <pre>
* Output only. Per-Site Status.
* </pre>
*
* <code>
* repeated .google.cloud.telcoautomation.v1.NFDeploySiteStatus sites = 3 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*/
public com.google.cloud.telcoautomation.v1.NFDeploySiteStatus.Builder getSitesBuilder(
int index) {
return getSitesFieldBuilder().getBuilder(index);
}
/**
*
*
* <pre>
* Output only. Per-Site Status.
* </pre>
*
* <code>
* repeated .google.cloud.telcoautomation.v1.NFDeploySiteStatus sites = 3 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*/
public com.google.cloud.telcoautomation.v1.NFDeploySiteStatusOrBuilder getSitesOrBuilder(
int index) {
if (sitesBuilder_ == null) {
return sites_.get(index);
} else {
return sitesBuilder_.getMessageOrBuilder(index);
}
}
/**
*
*
* <pre>
* Output only. Per-Site Status.
* </pre>
*
* <code>
* repeated .google.cloud.telcoautomation.v1.NFDeploySiteStatus sites = 3 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*/
public java.util.List<? extends com.google.cloud.telcoautomation.v1.NFDeploySiteStatusOrBuilder>
getSitesOrBuilderList() {
if (sitesBuilder_ != null) {
return sitesBuilder_.getMessageOrBuilderList();
} else {
return java.util.Collections.unmodifiableList(sites_);
}
}
/**
*
*
* <pre>
* Output only. Per-Site Status.
* </pre>
*
* <code>
* repeated .google.cloud.telcoautomation.v1.NFDeploySiteStatus sites = 3 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*/
public com.google.cloud.telcoautomation.v1.NFDeploySiteStatus.Builder addSitesBuilder() {
return getSitesFieldBuilder()
.addBuilder(com.google.cloud.telcoautomation.v1.NFDeploySiteStatus.getDefaultInstance());
}
/**
*
*
* <pre>
* Output only. Per-Site Status.
* </pre>
*
* <code>
* repeated .google.cloud.telcoautomation.v1.NFDeploySiteStatus sites = 3 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*/
public com.google.cloud.telcoautomation.v1.NFDeploySiteStatus.Builder addSitesBuilder(
int index) {
return getSitesFieldBuilder()
.addBuilder(
index, com.google.cloud.telcoautomation.v1.NFDeploySiteStatus.getDefaultInstance());
}
/**
*
*
* <pre>
* Output only. Per-Site Status.
* </pre>
*
* <code>
* repeated .google.cloud.telcoautomation.v1.NFDeploySiteStatus sites = 3 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*/
public java.util.List<com.google.cloud.telcoautomation.v1.NFDeploySiteStatus.Builder>
getSitesBuilderList() {
return getSitesFieldBuilder().getBuilderList();
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.telcoautomation.v1.NFDeploySiteStatus,
com.google.cloud.telcoautomation.v1.NFDeploySiteStatus.Builder,
com.google.cloud.telcoautomation.v1.NFDeploySiteStatusOrBuilder>
getSitesFieldBuilder() {
if (sitesBuilder_ == null) {
sitesBuilder_ =
new com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.telcoautomation.v1.NFDeploySiteStatus,
com.google.cloud.telcoautomation.v1.NFDeploySiteStatus.Builder,
com.google.cloud.telcoautomation.v1.NFDeploySiteStatusOrBuilder>(
sites_, ((bitField0_ & 0x00000004) != 0), getParentForChildren(), isClean());
sites_ = null;
}
return sitesBuilder_;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.telcoautomation.v1.NFDeployStatus)
}
// @@protoc_insertion_point(class_scope:google.cloud.telcoautomation.v1.NFDeployStatus)
private static final com.google.cloud.telcoautomation.v1.NFDeployStatus DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.telcoautomation.v1.NFDeployStatus();
}
public static com.google.cloud.telcoautomation.v1.NFDeployStatus getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<NFDeployStatus> PARSER =
new com.google.protobuf.AbstractParser<NFDeployStatus>() {
@java.lang.Override
public NFDeployStatus parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<NFDeployStatus> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<NFDeployStatus> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.telcoautomation.v1.NFDeployStatus getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
oracle/coherence | 36,811 | prj/test/unit/coherence-tests/src/test/java/com/tangosol/util/GateTest.java | /*
* Copyright (c) 2000, 2022, Oracle and/or its affiliates.
*
* Licensed under the Universal Permissive License v 1.0 as shown at
* http://oss.oracle.com/licenses/upl.
*/
package com.tangosol.util;
import com.oracle.coherence.common.base.Blocking;
import java.util.concurrent.atomic.AtomicInteger;
import static org.junit.Assert.*;
import org.junit.Test;
/**
* Unit test of thread {@link Gate} implementations.
*
* @author op 2013.04.20
*/
public class GateTest
extends Base
{
// ----- unit tests -----------------------------------------------------
private static TestCase[] setupTests()
{
return new TestCase[]
{
setupEnterExitTest(),
//setupBarCloseTest()
};
}
/**
* Test legacy ThreadGate implementation.
*/
@Test
public void testThreadGate()
throws Throwable
{
testGate(1);
}
/**
* Test ThreadGateLite implementation.
*/
@Test
public void testThreadGateLite()
throws Throwable
{
testGate(2);
}
/**
* Test WrapperReentrantGate implementation.
*/
@Test
public void testWrapperReentrantGate()
throws Throwable
{
testGate(3);
}
protected void testGate(int iGate)
throws Throwable
{
// lock downgrade is allowed
singleThreadTest(makeGate(iGate), new GATE_OPS[]
{GATE_OPS.CLOSE, GATE_OPS.ENTER},
new boolean[]{true, true});
// lock upgrade: legacy ThreadGate allows it, but TGL and WrapperReentrantGate - do not
singleThreadTest(makeGate(iGate), new GATE_OPS[]
{GATE_OPS.ENTER, GATE_OPS.CLOSE},
new boolean[]{true, iGate == 1 ? true : false });
singleThreadTest(makeGate(iGate), new GATE_OPS[]
{GATE_OPS.CLOSE, GATE_OPS.BAR, GATE_OPS.OPEN, GATE_OPS.CLOSED_CUR_THR},
new boolean[]{true, true, true, true});
singleThreadTest(makeGate(iGate), new GATE_OPS[]
{GATE_OPS.BAR, GATE_OPS.CLOSE, GATE_OPS.OPEN, GATE_OPS.CLOSED_CUR_THR,
GATE_OPS.BAR, GATE_OPS.CLOSE, GATE_OPS.BAR, GATE_OPS.CLOSE,
GATE_OPS.OPEN, GATE_OPS.OPEN, GATE_OPS.OPEN, GATE_OPS.OPEN, GATE_OPS.OPEN,
GATE_OPS.CLOSED_CUR_THR},
new boolean[]{true, true, true, true, true, true, true, true,
true, true, true, true, true, false});
singleThreadTest(makeGate(iGate), new GATE_OPS[]
{GATE_OPS.ENTER, GATE_OPS.BAR, GATE_OPS.ENTER, GATE_OPS.EXIT, GATE_OPS.OPEN,
GATE_OPS.CLOSED_CUR_THR},
new boolean[]{true, true, true, true, true, false});
singleThreadTest(makeGate(iGate), new GATE_OPS[]
{GATE_OPS.ENTER, GATE_OPS.CLOSE},
// TGL does not allow lock promotion, WRG by default is based on TGL
new boolean[]{true, iGate == 1 ? true : false});
singleThreadTest(makeGate(iGate), new GATE_OPS[]
{GATE_OPS.ENTER, GATE_OPS.ENTER, GATE_OPS.ENTER, GATE_OPS.EXIT, GATE_OPS.EXIT,
GATE_OPS.CLOSE, GATE_OPS.EXIT, GATE_OPS.CLOSE},
new boolean[]{true, true, true, true, true, iGate == 1 ? true : false, true, true});
//todo 1. can proceed after barEntry by another thread if already in; 2. dead thread detected
fourThreadTest(makeGate(iGate), new GATE_OPS[]
{GATE_OPS.CLOSE, GATE_OPS.BAR, GATE_OPS.ENTER, GATE_OPS.OPEN},
new Boolean[]{true, false, false, null});
fourThreadTest(makeGate(iGate), new GATE_OPS[]
{GATE_OPS.ENTER, GATE_OPS.CLOSE, GATE_OPS.BAR, GATE_OPS.OPEN},
new Boolean[]{true, false, true, null});
fourThreadTest(makeGate(iGate), new GATE_OPS[]
{GATE_OPS.BAR, GATE_OPS.CLOSE, GATE_OPS.ENTER, GATE_OPS.EXIT},
new Boolean[]{true, false, false, null});
fourThreadTest(makeGate(iGate), new GATE_OPS[]
{GATE_OPS.ENTER, GATE_OPS.BAR, GATE_OPS.ENTER, GATE_OPS.CLOSED},
new Boolean[]{true, true, false, false});
TestCase[] aTestCases = setupTests();
runTesCase(aTestCases, iGate);
// clean up the threads
for (int i = 0; i < aTestCases.length; i++)
{
aTestCases[i].cleanup();
}
}
// ---- Helper methods ---------------------------------
protected static Gate makeGate(int iGate)
{
switch (iGate)
{
case 1:
return new ThreadGate();
case 2:
return new ThreadGateLite();
case 3:
return new WrapperReentrantGate();
}
return null;
}
/**
* Test ThreadGateLite implementation with the specified tests.
*/
public static void runTesCase(TestCase[] aTest, int iGate)
throws Throwable
{
for (int i = 0; i < aTest.length; i++)
{
aTest[i].execute(makeGate(iGate));
}
}
private static TestCase setupEnterExitTest()
{
Runner[] aThread = new Runner[NUM_THREADS];
for (int i = 0; i < NUM_THREADS; ++i)
{
aThread[i] = new EnterExitRunner(NUM_ITERATIONS);
aThread[i].setName("EnterExitRunner(" + i + ")");
}
return new TestCase("testEnterExit", aThread);
}
private static TestCase setupBarCloseTest()
{
Runner[] aThread = new Runner[NUM_THREADS + 2];
// add special threads
for (int i = 0; i < 2; ++i)
{
aThread[i] = new BarCloseRunner(NUM_ITERATIONS);
aThread[i].setName(Integer.toString(i));
}
// add enter/exit threads to the test
for (int i = 2; i < NUM_THREADS + 2; ++i)
{
aThread[i] = new ReEnterRunner(NUM_ITERATIONS);
aThread[i].setName("ReEnterRunner(" + i + ")");
}
return new TestCase("testBarClose", aThread);
}
private void singleThreadTest(Gate gate, GATE_OPS[] aOps, boolean[] afResults)
throws Throwable
{
System.out.println("* * * Single-thread test for " + gate.getClass() + " * * * ");
int nOpsLen = aOps.length;
for (int i = 0; i < nOpsLen; i++)
{
doOperation(gate, aOps[i], afResults[i], false);
}
}
private void fourThreadTest(Gate gate, GATE_OPS[] aOps, Boolean[] afResults)
throws Throwable
{
assertEquals(4, aOps.length);
System.out.println("* * * Four-thread test for " + gate.getClass()
+ " * * * \n* * * afResults=" + afResults[0]+", "+afResults[1]+", "
+afResults[2]+", " + ""+afResults[3]);
final Object oMon = new Object();
OrderedRunner first = new OrderedRunner(aOps[0], 1, afResults[0] == null ? false : afResults[0],
oMon, afResults[0] == null ? true : false);
first.setName("First Runner-" + aOps[0]);
first.setMaxRun(3000);
first.setStart(System.currentTimeMillis());
OrderedRunner second = new OrderedRunner(aOps[1], 2, afResults[1] == null ? false : afResults[1],
oMon, afResults[1] == null ? true : false);
second.setName("Second Runner-" + aOps[1]);
second.setMaxRun(4000);
second.setStart(System.currentTimeMillis());
OrderedRunner third = new OrderedRunner(aOps[2], 3, afResults[2] == null ? false : afResults[2],
oMon, afResults[2] == null ? true : false);
third.setName("Third Runner-" + aOps[2]);
third.setMaxRun(5000);
third.setStart(System.currentTimeMillis());
OrderedRunner fourth = new OrderedRunner(aOps[3], 4, afResults[3] == null ? false : afResults[3],
oMon, afResults[3] == null ? true : false);
fourth.setName("Fourth Runner-" + aOps[3]);
fourth.setMaxRun(6000);
fourth.setStart(System.currentTimeMillis());
Runner[] aThread = new Runner[]{first, second, third, fourth};
TestCase test = new TestCase("Four-thread Test: " + aOps[0] + "-" + aOps[1]
+ "-" + aOps[2] + "-" + aOps[3], aThread);
first.setTestCase(test);
second.setTestCase(test);
third.setTestCase(test);
fourth.setTestCase(test);
test.execute(gate);
}
public static class TestCase
{
/**
* Construct a new TestCase with the specified name to be run by the
* specified array of Runner threads.
*
* @param sTestName the name of the TestCase
* @param aThread the array of Runner threads
*/
public TestCase(String sTestName, Runner[] aThread)
{
m_aThread = aThread;
m_sTestName = sTestName;
for (int i = 0; i < aThread.length; i++)
{
aThread[i].setTestCase(this);
aThread[i].start();
}
}
/**
* Run the TestCase on the specified thread gate
*
* @param gate the Gate to test
*/
public void execute(Gate gate)
throws Throwable
{
Runner[] aThread = m_aThread;
// init all threads with the gate
for (int i = 0; i < aThread.length; i++)
{
aThread[i].setGate(gate);
}
System.out.println("==== Running " + m_sTestName + " for " + gate.getClass().getName()
+ " in " + m_aThread.length + " threads with "
+ aThread[m_aThread.length - 1].getIterations() + " " + "iterations ====");
while(m_atomicThreads.get() < m_aThread.length)
{
sleep(100);
}
synchronized (this)
{
// wake up test threads and run the test
notifyAll();
try
{
// wait for all the test threads to finish
Blocking.wait(this);
}
catch (InterruptedException ie)
{
System.out.println(m_sTestName + ".execute: InterruptedException: "
+ "currentThread().interrupt(); rethrow ex");
Thread.currentThread().interrupt();
throw ensureRuntimeException(ie);
}
}
// propagate exception from runner thread to main
if (m_ex != null)
{
throw new RuntimeException(m_ex);
}
}
/**
* Used by the Runner thread to notify the TestCase that it has
* finished a TestCase execution.
*
* @param t a possible exception thrown by the Runner
*/
private void threadFinished(Throwable t)
throws InterruptedException
{
synchronized (this)
{
if (t != null)
{
// if one of the threads produced an exception, terminate test case
m_ex = t;
notifyAll();
}
else if (m_atomicThreads.decrementAndGet() == 0)
{
// last thread - signal end of test case
notifyAll();
}
else
{
// wait for TestCase to end all threads, see #execute
Blocking.wait(this);
}
}
}
/**
* Clean up threads used by TestCase.
*/
public void cleanup()
{
for (int i = 0; i < m_aThread.length; i++)
{
Runner runner = m_aThread[i];
if (runner.isAlive())
{
runner.interrupt();
}
}
}
// ----- data members -----------------------------------------------
/**
* The counter of active test threads.
*/
private AtomicInteger m_atomicThreads = new AtomicInteger(0);
/**
* The array of test threads.
*/
private Runner[] m_aThread;
/**
* The name of this test.
*/
private String m_sTestName;
/**
* The counter of threads currently in the gate (entered and not exited)
*/
private AtomicInteger m_atomicEntered = new AtomicInteger(0);
/**
* The number of threads inside the gate when entry was barred
*/
private volatile boolean m_fBarred;
/**
* Current status of the gate being tested
*/
private volatile boolean m_fClosed;
/**
* Used to delay the start of the first BarCloseRunner
* until ReEnterThreads have progressed enough
*/
private volatile boolean[] m_afCloseWait = new boolean[] {true};
/**
* An exception or error thrown by one of the threads,
* which needs to be saved and re-thrown by junit thread
*/
protected Throwable m_ex;
}
// ----- Runner inner class ---------------------------------------------
/**
* Basic TestCase runner thread.
*/
public static abstract class Runner
extends Thread
{
// ----- constructors ---------------------------------------------
/**
* Create a new Runner thread that will perform the specified
* number of test iterations.
*
* @param cIteration the number of test iterations to perform
*/
public Runner(int cIteration)
{
m_cIteration = cIteration;
}
/**
* Get the number of test iterations this thread will perform
*/
public int getIterations()
{
return m_cIteration;
}
/**
* Set the thread gate to be tested.
*
* @param g the Gate to be tested
*/
public void setGate(Gate g)
{
m_gate = g;
}
/**
* Set the TestCase instance to run on this Thread.
*
* @param test the TestCase instance to run on this Thread
*/
public void setTestCase(TestCase test)
{
m_test = test;
}
/**
* Get time in milliseconds when the thread was started
*/
public long getStart()
{
return m_ldtStart;
}
public void setStart(long start)
{
m_ldtStart = start;
}
/**
* Get maximum time in milliseconds the thread is allowed to run
*/
public long getMaxRun()
{
return m_ldtMaxRun;
}
/**
* Set maximum time in milliseconds the thread is allowed to run
*/
public void setMaxRun(long maxRun)
{
m_ldtMaxRun = maxRun;
}
/**
* Artificially spin some CPU time to provide an approximation
* of the actual thread gate workload.
*/
public int spinTime(int i)
{
if (i == 0 || i == 1)
{
return 1;
}
return spinTime(i - 1) + spinTime(i - 2);
}
/**
* Perform a single test iteration.
*
* @param nIter the iteration number
*/
protected abstract void doIteration(int nIter) throws Throwable;
// ----- Thread methods -------------------------------------------
/**
* {@inheritDoc}
*/
public void run()
{
synchronized (m_test)
{
try
{
// wait for all the other threads to start
m_test.m_atomicThreads.incrementAndGet();
Blocking.wait(m_test);
}
catch (InterruptedException ie)
{
Thread.currentThread().interrupt();
return;
}
}
System.out.println(" started " + getName());
try
{
try
{
for (int i = 0, c = m_cIteration; i < c; ++i)
{
doIteration(i);
}
}
catch (Throwable t)
{
System.out.println("==> finished " + getName() + " with EXCEPTION!");
m_test.threadFinished(t);
return;
}
System.out.println(" successfully finished " + getName());
m_test.threadFinished(null);
}
catch (InterruptedException ie)
{
Thread.currentThread().interrupt();
return;
}
}
// ----- data members -----------------------------------------------
/**
* The TestCase being run by this thread.
*/
protected TestCase m_test;
/**
* The target Gate.
*/
protected Gate m_gate;
/**
* The number of iterations.
*/
protected int m_cIteration;
/**
* Time in milliseconds when the thread was started
*/
protected long m_ldtStart;
/**
* Maximum time in milliseconds the thread is allowed to run
*/
protected long m_ldtMaxRun;
}
/**
* Thread that performs successive enter() and exit() operations against
* a specified Gate.
*/
public static class EnterExitRunner
extends Runner
{
/**
* Construct a new EnterExitRunner thread.
*
* @param cIteration the number of times to run enter/exit step
*/
public EnterExitRunner(int cIteration)
{
super(cIteration);
}
/**
* {@inheritDoc}
*/
protected void doIteration(int nIter)
{
Gate gate = m_gate;
gate.enter(-1);
assertTrue(gate.isEnteredByCurrentThread());
spinTime(15*(nIter%2 + 1));
assertTrue(gate.isEnteredByCurrentThread());
gate.exit();
assertFalse(gate.isEnteredByCurrentThread());
}
}
/**
* Thread that performs a sequence of enter() operations
* followed by a sequence of exit() operations against a specified Gate.
*/
public static class ReEnterRunner
extends Runner
{
/**
* Construct a new EnterExitRunner thread.
*
* @param cIteration the number of times to run enter/exit step
*/
public ReEnterRunner(int cIteration)
{
super(cIteration);
}
/**
* {@inheritDoc}
*/
protected void doIteration(int nIter)
{
Gate gate = m_gate;
int cBefore = tloCntEntered.get();
boolean fEnter;
// when enough threads have entered, release BarCloseRunner threads from waiting
if (m_test.m_afCloseWait[0] && m_test.m_atomicEntered.get() > NUM_THREADS / 2)
{
synchronized (m_test.m_afCloseWait)
{
if (m_test.m_afCloseWait[0])
{
m_test.m_afCloseWait[0] = false;
m_test.m_afCloseWait.notifyAll();
}
}
}
// split iterations equally between enter and exit attempts
if (nIter < NUM_ITERATIONS / 2)
{
fEnter = gate.enter(0);
if (fEnter)
{
// count only the first enter by the thread
if (cBefore == 0)
{
m_test.m_atomicEntered.getAndIncrement();
}
tloCntEntered.set(tloCntEntered.get() + 1);
spinTime(15*(nIter%2 + 1));
}
}
else
{
spinTime(15 * (nIter % 2 + 1));
int cThisEntered = tloCntEntered.get();
if (cThisEntered > 1)
{
gate.exit();
assertTrue("#### cThisEntered="+cThisEntered, gate.isEnteredByCurrentThread());
tloCntEntered.set(cThisEntered - 1);
}
else if (cThisEntered == 1)
{
gate.exit();
m_test.m_atomicEntered.getAndDecrement();
assertFalse(gate.isEnteredByCurrentThread());
tloCntEntered.set(0);
}
else
{
assertFalse(gate.isEnteredByCurrentThread());
}
}
}
// ---- data members ------------------------------------
private ThreadLocal<Integer> tloCntEntered = new ThreadLocal<Integer>()
{
protected Integer initialValue()
{
return 0;
}
};
}
/**
* Thread that performs successive close() and open() operations against
* a specified Gate.
*/
public static class BarCloseRunner
extends Runner
{
/**
* Construct a new BarCloseRunner thread.
*
* @param cIteration the number of iterations
*/
public BarCloseRunner(int cIteration)
{
super(cIteration);
}
/**
* {@inheritDoc}
*/
protected void doIteration(int nIter)
{
if (tloFinished.get())
{
return;
}
if (m_test.m_afCloseWait[0])
{
synchronized (m_test.m_afCloseWait)
{
if (m_test.m_afCloseWait[0])
{
try
{
Blocking.wait(m_test.m_afCloseWait);
}
catch (InterruptedException e)
{
}
}
}
}
int nThread = Integer.parseInt(getName());
Gate gate = m_gate;
spinTime(15*(nIter%2 + 1));
assertFalse(gate.isEnteredByCurrentThread());
switch (nThread)
{
// thread "0" bars entry and locks successfully:
// it waits for a decent number of threads to enter
// the gate, then bars the entry, only once,
// then tries to close
case 0:
if (m_test.m_fClosed)
{
// this thread has closed the gate,
// it is still allowed enter and barEntry
assertTrue(gate.enter(0));
gate.exit();
assertTrue(gate.isClosedByCurrentThread());
assertTrue(gate.barEntry(0));
// this open matches barEntry above
gate.open();
assertTrue(gate.isClosedByCurrentThread());
// let the gate stay closed a while
if (nIter > NUM_ITERATIONS*2/3)
{
// this open matches close done by this thread
// in previous iteration
gate.open();
System.out.print(" Thread 0 opened gate, ");
// but predicate remains true, b/c all barEntry
// calls have not been matched with open calls yet
assertTrue(gate.isClosedByCurrentThread());
System.out.println("but isClosedByCurrentThread() is true.");
spinTime(15);
// this open matches the first barEntry, gate is finally open.
// Even if thread "1" does barEntry(), isClosed() should be false
gate.open();
System.out.print(" Thread 0 opened gate, ");
assertFalse(gate.isClosed());
System.out.println("now isClosedByCurrentThread() is false.");
// gate may be already barred by thread "1",
// but not by this thread
assertFalse(gate.isClosedByCurrentThread());
tloFinished.set(true);
}
}
else if (m_test.m_fBarred)
{
// check that the thread which barred entry is still allowed to enter
assertTrue(gate.enter(0));
System.out.println(" Thread 0 can still enter gate.");
gate.exit();
// keep trying to close
int nInsideBeforeClose = m_test.m_atomicEntered.get();
m_test.m_fClosed = gate.close(0);
int nInsideAfterClose = m_test.m_atomicEntered.get();
// do the checks if we were lucky
if (nInsideBeforeClose == nInsideAfterClose)
{
assertTrue(m_test.m_fClosed && nInsideBeforeClose == 0
|| !m_test.m_fClosed && nInsideBeforeClose > 0);
System.out.println(" Thread "+getName()+": checked correctness of Gate.close().");
}
// at the end of the run wait up to 3 min to succeed
if (!m_test.m_fClosed && nIter == NUM_ITERATIONS - 5)
{
for (int i = 0; i < 180; i++)
{
if (m_test.m_fClosed = gate.close(1000))
{
break;
}
}
assertTrue(m_test.m_fClosed);
System.out.println(" Thread 0 closed gate.");
}
}
else if (m_test.m_atomicEntered.get() >= NUM_THREADS/3)
{
// operation should succeed immediately,
// because no other thread is doing barEntry or close
m_test.m_fBarred = gate.barEntry(0);
assertTrue(m_test.m_fBarred);
System.out.print(" Thread 0 barred gate, ");
// barEntry != close, gate is not "closed" yet
assertFalse(gate.isClosedByCurrentThread());
assertFalse(gate.isClosed());
System.out.println(" but not closed it.");
}
break;
// thread "1" tries to bar entry, but fails until the very end.
case 1:
spinTime(15);
// on the last iteration wait more than 3 min to succeed
if (nIter == NUM_ITERATIONS - 1)
{
System.out.println(" Thread 1: last iteration.");
// this makes sure that thread "0" goes first
while (!m_test.m_fBarred)
{
Base.sleep(100);
}
boolean fBarred = false;
for (int i = 0; i < 200; i++)
{
if (fBarred = gate.barEntry(1000))
{
System.out.println(" Thread 1: barred gate.");
break;
}
}
assertTrue(fBarred);
gate.open();
System.out.println(" Thread 1: opened gate.");
tloFinished.set(true);
}
break;
default:
throw ensureRuntimeException(
new Exception("BarCloseThread with number " + nThread + "cannot run."));
}
}
// ---- data members ------------------------------------
private ThreadLocal<Boolean> tloFinished = new ThreadLocal<Boolean>()
{
protected Boolean initialValue()
{
return Boolean.FALSE;
}
};
}
protected static void doOperation(Gate gate, GATE_OPS eOperation,
boolean fExpect, boolean fException)
throws Throwable
{
boolean fResult = false;
boolean fAssert = false;
System.out.print(Thread.currentThread());
try
{
switch (eOperation)
{
case CLOSE:
fResult = gate.close(100);
fAssert = true;
System.out.println(" CLOSE: expect="+fExpect+", actual="+fResult);
break;
case BAR:
fResult = gate.barEntry(100);
fAssert = true;
System.out.println(" BAR: expect="+fExpect+", actual="+fResult);
break;
case OPEN:
System.out.println(" before OPEN, fException=" + fException);
gate.open();
System.out.println(" after OPEN");
break;
case ENTER:
fResult = gate.enter(100);
fAssert = true;
System.out.println(" ENTER: expect="+fExpect+", actual="+fResult);
break;
case EXIT:
System.out.println(" before EXIT");
gate.exit();
System.out.println(" after EXIT");
break;
case CLOSED:
fResult = gate.isClosed();
fAssert = true;
System.out.println(" CLOSED: expect="+fExpect+", actual="+fResult);
break;
case CLOSED_CUR_THR:
fResult = gate.isClosedByCurrentThread();
fAssert = true;
System.out.println(" CLOSE_CUR_THR: expect="+fExpect+", actual="+fResult);
break;
default:
throw new RuntimeException("Runner initialized with unknown gate operation");
}
}
catch (Throwable t)
{
if (fException)
{
System.out.println("caught expected exception "+t);
return;
}
else
{
System.out.println("==> Unexpected exception! "+t);
throw t;
}
}
if (fException)
{
fail("Exception was expected.");
}
if (fAssert && fExpect)
{
assertTrue(fResult);
}
else if (fAssert && !fExpect)
{
assertFalse(fResult);
}
}
/**
* This thread goes second, after FirstRunner, and does one gate operation.
*/
public static class OrderedRunner extends Runner
{
public OrderedRunner(GATE_OPS op, int nOrder, boolean fExpect,
Object oMon, boolean fException)
{
super(nOrder);
m_eOperation = op;
m_nOrder = nOrder;
m_oMon = oMon;
m_fExpect = fExpect;
m_fException = fException;
}
public void doIteration(int nIter)
throws Throwable
{
assertTrue(nIter < m_nOrder);
if (nIter < m_nOrder - 1)
{
synchronized (m_oMon)
{
try
{
Blocking.wait(m_oMon, getMaxRun());
}
catch (InterruptedException e)
{
if (System.currentTimeMillis() - getStart() < getMaxRun())
{
doIteration(0);
}
else
{
fail(getName() + " could not proceed in time allowed.");
}
}
}
}
else
{
try
{
doOperation(m_gate, m_eOperation, m_fExpect, m_fException);
}
finally
{
// signal completion of your step and release waiting Runners
synchronized (m_oMon)
{
m_oMon.notifyAll();
}
}
}
}
// ---- data members ------------------------------------
/**
* The order in a multi-threaded scenario when this thread
* should execute its operation
*/
private int m_nOrder;
/**
* The Gate operation to be executed by this thread.
*/
private GATE_OPS m_eOperation;
/**
* The external object to synchronize on (cannot use Runner.m_test)
*/
private Object m_oMon;
/**
* The expected result of a boolean gate operation, to be asserted
* by the thread, ignored for operations returning void.
*/
private boolean m_fExpect;
/**
* True iff operation should throw an exception,
* usually IllegalMonitorStateException
*/
private boolean m_fException;
}
// ------ Data members ---------------------------------------
/**
* Enumeration of gate operations
*/
public enum GATE_OPS {CLOSE, BAR, OPEN, ENTER, EXIT, CLOSED, CLOSED_CUR_THR};
/**
* The number of iterations to run each test.
* Should be an even number.
*/
public static final int NUM_ITERATIONS = 10;
/**
* The number of concurrent test threads.
*/
public static final int NUM_THREADS = 20;
}
|
googleapis/google-cloud-java | 36,903 | java-compute/proto-google-cloud-compute-v1/src/main/java/com/google/cloud/compute/v1/RouteAsPath.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/compute/v1/compute.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.compute.v1;
/**
*
*
* <pre>
* </pre>
*
* Protobuf type {@code google.cloud.compute.v1.RouteAsPath}
*/
public final class RouteAsPath extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.compute.v1.RouteAsPath)
RouteAsPathOrBuilder {
private static final long serialVersionUID = 0L;
// Use RouteAsPath.newBuilder() to construct.
private RouteAsPath(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private RouteAsPath() {
asLists_ = emptyIntList();
pathSegmentType_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new RouteAsPath();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.compute.v1.Compute
.internal_static_google_cloud_compute_v1_RouteAsPath_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.compute.v1.Compute
.internal_static_google_cloud_compute_v1_RouteAsPath_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.compute.v1.RouteAsPath.class,
com.google.cloud.compute.v1.RouteAsPath.Builder.class);
}
/**
*
*
* <pre>
* [Output Only] The type of the AS Path, which can be one of the following values: - 'AS_SET': unordered set of autonomous systems that the route in has traversed - 'AS_SEQUENCE': ordered set of autonomous systems that the route has traversed - 'AS_CONFED_SEQUENCE': ordered set of Member Autonomous Systems in the local confederation that the route has traversed - 'AS_CONFED_SET': unordered set of Member Autonomous Systems in the local confederation that the route has traversed
* </pre>
*
* Protobuf enum {@code google.cloud.compute.v1.RouteAsPath.PathSegmentType}
*/
public enum PathSegmentType implements com.google.protobuf.ProtocolMessageEnum {
/**
*
*
* <pre>
* A value indicating that the enum field is not set.
* </pre>
*
* <code>UNDEFINED_PATH_SEGMENT_TYPE = 0;</code>
*/
UNDEFINED_PATH_SEGMENT_TYPE(0),
/** <code>AS_CONFED_SEQUENCE = 222152624;</code> */
AS_CONFED_SEQUENCE(222152624),
/** <code>AS_CONFED_SET = 374040307;</code> */
AS_CONFED_SET(374040307),
/** <code>AS_SEQUENCE = 106735918;</code> */
AS_SEQUENCE(106735918),
/** <code>AS_SET = 329846453;</code> */
AS_SET(329846453),
UNRECOGNIZED(-1),
;
/**
*
*
* <pre>
* A value indicating that the enum field is not set.
* </pre>
*
* <code>UNDEFINED_PATH_SEGMENT_TYPE = 0;</code>
*/
public static final int UNDEFINED_PATH_SEGMENT_TYPE_VALUE = 0;
/** <code>AS_CONFED_SEQUENCE = 222152624;</code> */
public static final int AS_CONFED_SEQUENCE_VALUE = 222152624;
/** <code>AS_CONFED_SET = 374040307;</code> */
public static final int AS_CONFED_SET_VALUE = 374040307;
/** <code>AS_SEQUENCE = 106735918;</code> */
public static final int AS_SEQUENCE_VALUE = 106735918;
/** <code>AS_SET = 329846453;</code> */
public static final int AS_SET_VALUE = 329846453;
public final int getNumber() {
if (this == UNRECOGNIZED) {
throw new java.lang.IllegalArgumentException(
"Can't get the number of an unknown enum value.");
}
return value;
}
/**
* @param value The numeric wire value of the corresponding enum entry.
* @return The enum associated with the given numeric wire value.
* @deprecated Use {@link #forNumber(int)} instead.
*/
@java.lang.Deprecated
public static PathSegmentType valueOf(int value) {
return forNumber(value);
}
/**
* @param value The numeric wire value of the corresponding enum entry.
* @return The enum associated with the given numeric wire value.
*/
public static PathSegmentType forNumber(int value) {
switch (value) {
case 0:
return UNDEFINED_PATH_SEGMENT_TYPE;
case 222152624:
return AS_CONFED_SEQUENCE;
case 374040307:
return AS_CONFED_SET;
case 106735918:
return AS_SEQUENCE;
case 329846453:
return AS_SET;
default:
return null;
}
}
public static com.google.protobuf.Internal.EnumLiteMap<PathSegmentType> internalGetValueMap() {
return internalValueMap;
}
private static final com.google.protobuf.Internal.EnumLiteMap<PathSegmentType>
internalValueMap =
new com.google.protobuf.Internal.EnumLiteMap<PathSegmentType>() {
public PathSegmentType findValueByNumber(int number) {
return PathSegmentType.forNumber(number);
}
};
public final com.google.protobuf.Descriptors.EnumValueDescriptor getValueDescriptor() {
if (this == UNRECOGNIZED) {
throw new java.lang.IllegalStateException(
"Can't get the descriptor of an unrecognized enum value.");
}
return getDescriptor().getValues().get(ordinal());
}
public final com.google.protobuf.Descriptors.EnumDescriptor getDescriptorForType() {
return getDescriptor();
}
public static final com.google.protobuf.Descriptors.EnumDescriptor getDescriptor() {
return com.google.cloud.compute.v1.RouteAsPath.getDescriptor().getEnumTypes().get(0);
}
private static final PathSegmentType[] VALUES = values();
public static PathSegmentType valueOf(
com.google.protobuf.Descriptors.EnumValueDescriptor desc) {
if (desc.getType() != getDescriptor()) {
throw new java.lang.IllegalArgumentException("EnumValueDescriptor is not for this type.");
}
if (desc.getIndex() == -1) {
return UNRECOGNIZED;
}
return VALUES[desc.getIndex()];
}
private final int value;
private PathSegmentType(int value) {
this.value = value;
}
// @@protoc_insertion_point(enum_scope:google.cloud.compute.v1.RouteAsPath.PathSegmentType)
}
private int bitField0_;
public static final int AS_LISTS_FIELD_NUMBER = 134112584;
@SuppressWarnings("serial")
private com.google.protobuf.Internal.IntList asLists_ = emptyIntList();
/**
*
*
* <pre>
* [Output Only] The AS numbers of the AS Path.
* </pre>
*
* <code>repeated uint32 as_lists = 134112584;</code>
*
* @return A list containing the asLists.
*/
@java.lang.Override
public java.util.List<java.lang.Integer> getAsListsList() {
return asLists_;
}
/**
*
*
* <pre>
* [Output Only] The AS numbers of the AS Path.
* </pre>
*
* <code>repeated uint32 as_lists = 134112584;</code>
*
* @return The count of asLists.
*/
public int getAsListsCount() {
return asLists_.size();
}
/**
*
*
* <pre>
* [Output Only] The AS numbers of the AS Path.
* </pre>
*
* <code>repeated uint32 as_lists = 134112584;</code>
*
* @param index The index of the element to return.
* @return The asLists at the given index.
*/
public int getAsLists(int index) {
return asLists_.getInt(index);
}
private int asListsMemoizedSerializedSize = -1;
public static final int PATH_SEGMENT_TYPE_FIELD_NUMBER = 513464992;
@SuppressWarnings("serial")
private volatile java.lang.Object pathSegmentType_ = "";
/**
*
*
* <pre>
* [Output Only] The type of the AS Path, which can be one of the following values: - 'AS_SET': unordered set of autonomous systems that the route in has traversed - 'AS_SEQUENCE': ordered set of autonomous systems that the route has traversed - 'AS_CONFED_SEQUENCE': ordered set of Member Autonomous Systems in the local confederation that the route has traversed - 'AS_CONFED_SET': unordered set of Member Autonomous Systems in the local confederation that the route has traversed
* Check the PathSegmentType enum for the list of possible values.
* </pre>
*
* <code>optional string path_segment_type = 513464992;</code>
*
* @return Whether the pathSegmentType field is set.
*/
@java.lang.Override
public boolean hasPathSegmentType() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* [Output Only] The type of the AS Path, which can be one of the following values: - 'AS_SET': unordered set of autonomous systems that the route in has traversed - 'AS_SEQUENCE': ordered set of autonomous systems that the route has traversed - 'AS_CONFED_SEQUENCE': ordered set of Member Autonomous Systems in the local confederation that the route has traversed - 'AS_CONFED_SET': unordered set of Member Autonomous Systems in the local confederation that the route has traversed
* Check the PathSegmentType enum for the list of possible values.
* </pre>
*
* <code>optional string path_segment_type = 513464992;</code>
*
* @return The pathSegmentType.
*/
@java.lang.Override
public java.lang.String getPathSegmentType() {
java.lang.Object ref = pathSegmentType_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
pathSegmentType_ = s;
return s;
}
}
/**
*
*
* <pre>
* [Output Only] The type of the AS Path, which can be one of the following values: - 'AS_SET': unordered set of autonomous systems that the route in has traversed - 'AS_SEQUENCE': ordered set of autonomous systems that the route has traversed - 'AS_CONFED_SEQUENCE': ordered set of Member Autonomous Systems in the local confederation that the route has traversed - 'AS_CONFED_SET': unordered set of Member Autonomous Systems in the local confederation that the route has traversed
* Check the PathSegmentType enum for the list of possible values.
* </pre>
*
* <code>optional string path_segment_type = 513464992;</code>
*
* @return The bytes for pathSegmentType.
*/
@java.lang.Override
public com.google.protobuf.ByteString getPathSegmentTypeBytes() {
java.lang.Object ref = pathSegmentType_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
pathSegmentType_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
getSerializedSize();
if (getAsListsList().size() > 0) {
output.writeUInt32NoTag(1072900674);
output.writeUInt32NoTag(asListsMemoizedSerializedSize);
}
for (int i = 0; i < asLists_.size(); i++) {
output.writeUInt32NoTag(asLists_.getInt(i));
}
if (((bitField0_ & 0x00000001) != 0)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 513464992, pathSegmentType_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
{
int dataSize = 0;
for (int i = 0; i < asLists_.size(); i++) {
dataSize +=
com.google.protobuf.CodedOutputStream.computeUInt32SizeNoTag(asLists_.getInt(i));
}
size += dataSize;
if (!getAsListsList().isEmpty()) {
size += 5;
size += com.google.protobuf.CodedOutputStream.computeInt32SizeNoTag(dataSize);
}
asListsMemoizedSerializedSize = dataSize;
}
if (((bitField0_ & 0x00000001) != 0)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(513464992, pathSegmentType_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.compute.v1.RouteAsPath)) {
return super.equals(obj);
}
com.google.cloud.compute.v1.RouteAsPath other = (com.google.cloud.compute.v1.RouteAsPath) obj;
if (!getAsListsList().equals(other.getAsListsList())) return false;
if (hasPathSegmentType() != other.hasPathSegmentType()) return false;
if (hasPathSegmentType()) {
if (!getPathSegmentType().equals(other.getPathSegmentType())) return false;
}
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (getAsListsCount() > 0) {
hash = (37 * hash) + AS_LISTS_FIELD_NUMBER;
hash = (53 * hash) + getAsListsList().hashCode();
}
if (hasPathSegmentType()) {
hash = (37 * hash) + PATH_SEGMENT_TYPE_FIELD_NUMBER;
hash = (53 * hash) + getPathSegmentType().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.compute.v1.RouteAsPath parseFrom(java.nio.ByteBuffer data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.compute.v1.RouteAsPath parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.compute.v1.RouteAsPath parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.compute.v1.RouteAsPath parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.compute.v1.RouteAsPath parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.compute.v1.RouteAsPath parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.compute.v1.RouteAsPath parseFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.compute.v1.RouteAsPath parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.compute.v1.RouteAsPath parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.compute.v1.RouteAsPath parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.compute.v1.RouteAsPath parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.compute.v1.RouteAsPath parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.cloud.compute.v1.RouteAsPath prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* </pre>
*
* Protobuf type {@code google.cloud.compute.v1.RouteAsPath}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.compute.v1.RouteAsPath)
com.google.cloud.compute.v1.RouteAsPathOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.compute.v1.Compute
.internal_static_google_cloud_compute_v1_RouteAsPath_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.compute.v1.Compute
.internal_static_google_cloud_compute_v1_RouteAsPath_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.compute.v1.RouteAsPath.class,
com.google.cloud.compute.v1.RouteAsPath.Builder.class);
}
// Construct using com.google.cloud.compute.v1.RouteAsPath.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
asLists_ = emptyIntList();
pathSegmentType_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.compute.v1.Compute
.internal_static_google_cloud_compute_v1_RouteAsPath_descriptor;
}
@java.lang.Override
public com.google.cloud.compute.v1.RouteAsPath getDefaultInstanceForType() {
return com.google.cloud.compute.v1.RouteAsPath.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.compute.v1.RouteAsPath build() {
com.google.cloud.compute.v1.RouteAsPath result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.compute.v1.RouteAsPath buildPartial() {
com.google.cloud.compute.v1.RouteAsPath result =
new com.google.cloud.compute.v1.RouteAsPath(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(com.google.cloud.compute.v1.RouteAsPath result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000001) != 0)) {
asLists_.makeImmutable();
result.asLists_ = asLists_;
}
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000002) != 0)) {
result.pathSegmentType_ = pathSegmentType_;
to_bitField0_ |= 0x00000001;
}
result.bitField0_ |= to_bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.compute.v1.RouteAsPath) {
return mergeFrom((com.google.cloud.compute.v1.RouteAsPath) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.compute.v1.RouteAsPath other) {
if (other == com.google.cloud.compute.v1.RouteAsPath.getDefaultInstance()) return this;
if (!other.asLists_.isEmpty()) {
if (asLists_.isEmpty()) {
asLists_ = other.asLists_;
asLists_.makeImmutable();
bitField0_ |= 0x00000001;
} else {
ensureAsListsIsMutable();
asLists_.addAll(other.asLists_);
}
onChanged();
}
if (other.hasPathSegmentType()) {
pathSegmentType_ = other.pathSegmentType_;
bitField0_ |= 0x00000002;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 1072900672:
{
int v = input.readUInt32();
ensureAsListsIsMutable();
asLists_.addInt(v);
break;
} // case 1072900672
case 1072900674:
{
int length = input.readRawVarint32();
int limit = input.pushLimit(length);
ensureAsListsIsMutable();
while (input.getBytesUntilLimit() > 0) {
asLists_.addInt(input.readUInt32());
}
input.popLimit(limit);
break;
} // case 1072900674
case -187247358:
{
pathSegmentType_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000002;
break;
} // case -187247358
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private com.google.protobuf.Internal.IntList asLists_ = emptyIntList();
private void ensureAsListsIsMutable() {
if (!asLists_.isModifiable()) {
asLists_ = makeMutableCopy(asLists_);
}
bitField0_ |= 0x00000001;
}
/**
*
*
* <pre>
* [Output Only] The AS numbers of the AS Path.
* </pre>
*
* <code>repeated uint32 as_lists = 134112584;</code>
*
* @return A list containing the asLists.
*/
public java.util.List<java.lang.Integer> getAsListsList() {
asLists_.makeImmutable();
return asLists_;
}
/**
*
*
* <pre>
* [Output Only] The AS numbers of the AS Path.
* </pre>
*
* <code>repeated uint32 as_lists = 134112584;</code>
*
* @return The count of asLists.
*/
public int getAsListsCount() {
return asLists_.size();
}
/**
*
*
* <pre>
* [Output Only] The AS numbers of the AS Path.
* </pre>
*
* <code>repeated uint32 as_lists = 134112584;</code>
*
* @param index The index of the element to return.
* @return The asLists at the given index.
*/
public int getAsLists(int index) {
return asLists_.getInt(index);
}
/**
*
*
* <pre>
* [Output Only] The AS numbers of the AS Path.
* </pre>
*
* <code>repeated uint32 as_lists = 134112584;</code>
*
* @param index The index to set the value at.
* @param value The asLists to set.
* @return This builder for chaining.
*/
public Builder setAsLists(int index, int value) {
ensureAsListsIsMutable();
asLists_.setInt(index, value);
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* [Output Only] The AS numbers of the AS Path.
* </pre>
*
* <code>repeated uint32 as_lists = 134112584;</code>
*
* @param value The asLists to add.
* @return This builder for chaining.
*/
public Builder addAsLists(int value) {
ensureAsListsIsMutable();
asLists_.addInt(value);
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* [Output Only] The AS numbers of the AS Path.
* </pre>
*
* <code>repeated uint32 as_lists = 134112584;</code>
*
* @param values The asLists to add.
* @return This builder for chaining.
*/
public Builder addAllAsLists(java.lang.Iterable<? extends java.lang.Integer> values) {
ensureAsListsIsMutable();
com.google.protobuf.AbstractMessageLite.Builder.addAll(values, asLists_);
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* [Output Only] The AS numbers of the AS Path.
* </pre>
*
* <code>repeated uint32 as_lists = 134112584;</code>
*
* @return This builder for chaining.
*/
public Builder clearAsLists() {
asLists_ = emptyIntList();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
return this;
}
private java.lang.Object pathSegmentType_ = "";
/**
*
*
* <pre>
* [Output Only] The type of the AS Path, which can be one of the following values: - 'AS_SET': unordered set of autonomous systems that the route in has traversed - 'AS_SEQUENCE': ordered set of autonomous systems that the route has traversed - 'AS_CONFED_SEQUENCE': ordered set of Member Autonomous Systems in the local confederation that the route has traversed - 'AS_CONFED_SET': unordered set of Member Autonomous Systems in the local confederation that the route has traversed
* Check the PathSegmentType enum for the list of possible values.
* </pre>
*
* <code>optional string path_segment_type = 513464992;</code>
*
* @return Whether the pathSegmentType field is set.
*/
public boolean hasPathSegmentType() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
*
*
* <pre>
* [Output Only] The type of the AS Path, which can be one of the following values: - 'AS_SET': unordered set of autonomous systems that the route in has traversed - 'AS_SEQUENCE': ordered set of autonomous systems that the route has traversed - 'AS_CONFED_SEQUENCE': ordered set of Member Autonomous Systems in the local confederation that the route has traversed - 'AS_CONFED_SET': unordered set of Member Autonomous Systems in the local confederation that the route has traversed
* Check the PathSegmentType enum for the list of possible values.
* </pre>
*
* <code>optional string path_segment_type = 513464992;</code>
*
* @return The pathSegmentType.
*/
public java.lang.String getPathSegmentType() {
java.lang.Object ref = pathSegmentType_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
pathSegmentType_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* [Output Only] The type of the AS Path, which can be one of the following values: - 'AS_SET': unordered set of autonomous systems that the route in has traversed - 'AS_SEQUENCE': ordered set of autonomous systems that the route has traversed - 'AS_CONFED_SEQUENCE': ordered set of Member Autonomous Systems in the local confederation that the route has traversed - 'AS_CONFED_SET': unordered set of Member Autonomous Systems in the local confederation that the route has traversed
* Check the PathSegmentType enum for the list of possible values.
* </pre>
*
* <code>optional string path_segment_type = 513464992;</code>
*
* @return The bytes for pathSegmentType.
*/
public com.google.protobuf.ByteString getPathSegmentTypeBytes() {
java.lang.Object ref = pathSegmentType_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
pathSegmentType_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* [Output Only] The type of the AS Path, which can be one of the following values: - 'AS_SET': unordered set of autonomous systems that the route in has traversed - 'AS_SEQUENCE': ordered set of autonomous systems that the route has traversed - 'AS_CONFED_SEQUENCE': ordered set of Member Autonomous Systems in the local confederation that the route has traversed - 'AS_CONFED_SET': unordered set of Member Autonomous Systems in the local confederation that the route has traversed
* Check the PathSegmentType enum for the list of possible values.
* </pre>
*
* <code>optional string path_segment_type = 513464992;</code>
*
* @param value The pathSegmentType to set.
* @return This builder for chaining.
*/
public Builder setPathSegmentType(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
pathSegmentType_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* [Output Only] The type of the AS Path, which can be one of the following values: - 'AS_SET': unordered set of autonomous systems that the route in has traversed - 'AS_SEQUENCE': ordered set of autonomous systems that the route has traversed - 'AS_CONFED_SEQUENCE': ordered set of Member Autonomous Systems in the local confederation that the route has traversed - 'AS_CONFED_SET': unordered set of Member Autonomous Systems in the local confederation that the route has traversed
* Check the PathSegmentType enum for the list of possible values.
* </pre>
*
* <code>optional string path_segment_type = 513464992;</code>
*
* @return This builder for chaining.
*/
public Builder clearPathSegmentType() {
pathSegmentType_ = getDefaultInstance().getPathSegmentType();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
return this;
}
/**
*
*
* <pre>
* [Output Only] The type of the AS Path, which can be one of the following values: - 'AS_SET': unordered set of autonomous systems that the route in has traversed - 'AS_SEQUENCE': ordered set of autonomous systems that the route has traversed - 'AS_CONFED_SEQUENCE': ordered set of Member Autonomous Systems in the local confederation that the route has traversed - 'AS_CONFED_SET': unordered set of Member Autonomous Systems in the local confederation that the route has traversed
* Check the PathSegmentType enum for the list of possible values.
* </pre>
*
* <code>optional string path_segment_type = 513464992;</code>
*
* @param value The bytes for pathSegmentType to set.
* @return This builder for chaining.
*/
public Builder setPathSegmentTypeBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
pathSegmentType_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.compute.v1.RouteAsPath)
}
// @@protoc_insertion_point(class_scope:google.cloud.compute.v1.RouteAsPath)
private static final com.google.cloud.compute.v1.RouteAsPath DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.compute.v1.RouteAsPath();
}
public static com.google.cloud.compute.v1.RouteAsPath getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<RouteAsPath> PARSER =
new com.google.protobuf.AbstractParser<RouteAsPath>() {
@java.lang.Override
public RouteAsPath parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<RouteAsPath> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<RouteAsPath> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.compute.v1.RouteAsPath getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleapis/google-cloud-java | 36,996 | java-compute/google-cloud-compute/src/main/java/com/google/cloud/compute/v1/RegionTargetTcpProxiesClient.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.cloud.compute.v1;
import com.google.api.core.ApiFuture;
import com.google.api.core.ApiFutures;
import com.google.api.gax.core.BackgroundResource;
import com.google.api.gax.longrunning.OperationFuture;
import com.google.api.gax.paging.AbstractFixedSizeCollection;
import com.google.api.gax.paging.AbstractPage;
import com.google.api.gax.paging.AbstractPagedListResponse;
import com.google.api.gax.rpc.OperationCallable;
import com.google.api.gax.rpc.PageContext;
import com.google.api.gax.rpc.UnaryCallable;
import com.google.cloud.compute.v1.stub.RegionTargetTcpProxiesStub;
import com.google.cloud.compute.v1.stub.RegionTargetTcpProxiesStubSettings;
import com.google.common.util.concurrent.MoreExecutors;
import java.io.IOException;
import java.util.List;
import java.util.concurrent.TimeUnit;
import javax.annotation.Generated;
// AUTO-GENERATED DOCUMENTATION AND CLASS.
/**
* Service Description: The RegionTargetTcpProxies API.
*
* <p>This class provides the ability to make remote calls to the backing service through method
* calls that map to API methods. Sample code to get started:
*
* <pre>{@code
* // This snippet has been automatically generated and should be regarded as a code template only.
* // It will require modifications to work:
* // - It may require correct/in-range values for request initialization.
* // - It may require specifying regional endpoints when creating the service client as shown in
* // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library
* try (RegionTargetTcpProxiesClient regionTargetTcpProxiesClient =
* RegionTargetTcpProxiesClient.create()) {
* String project = "project-309310695";
* String region = "region-934795532";
* String targetTcpProxy = "targetTcpProxy-337144898";
* TargetTcpProxy response = regionTargetTcpProxiesClient.get(project, region, targetTcpProxy);
* }
* }</pre>
*
* <p>Note: close() needs to be called on the RegionTargetTcpProxiesClient object to clean up
* resources such as threads. In the example above, try-with-resources is used, which automatically
* calls close().
*
* <table>
* <caption>Methods</caption>
* <tr>
* <th>Method</th>
* <th>Description</th>
* <th>Method Variants</th>
* </tr>
* <tr>
* <td><p> Delete</td>
* <td><p> Deletes the specified TargetTcpProxy resource.</td>
* <td>
* <p>Request object method variants only take one parameter, a request object, which must be constructed before the call.</p>
* <ul>
* <li><p> deleteAsync(DeleteRegionTargetTcpProxyRequest request)
* </ul>
* <p>Methods that return long-running operations have "Async" method variants that return `OperationFuture`, which is used to track polling of the service.</p>
* <ul>
* <li><p> deleteAsync(String project, String region, String targetTcpProxy)
* </ul>
* <p>Callable method variants take no parameters and return an immutable API callable object, which can be used to initiate calls to the service.</p>
* <ul>
* <li><p> deleteOperationCallable()
* <li><p> deleteCallable()
* </ul>
* </td>
* </tr>
* <tr>
* <td><p> Get</td>
* <td><p> Returns the specified TargetTcpProxy resource.</td>
* <td>
* <p>Request object method variants only take one parameter, a request object, which must be constructed before the call.</p>
* <ul>
* <li><p> get(GetRegionTargetTcpProxyRequest request)
* </ul>
* <p>"Flattened" method variants have converted the fields of the request object into function parameters to enable multiple ways to call the same method.</p>
* <ul>
* <li><p> get(String project, String region, String targetTcpProxy)
* </ul>
* <p>Callable method variants take no parameters and return an immutable API callable object, which can be used to initiate calls to the service.</p>
* <ul>
* <li><p> getCallable()
* </ul>
* </td>
* </tr>
* <tr>
* <td><p> Insert</td>
* <td><p> Creates a TargetTcpProxy resource in the specified project and region using the data included in the request.</td>
* <td>
* <p>Request object method variants only take one parameter, a request object, which must be constructed before the call.</p>
* <ul>
* <li><p> insertAsync(InsertRegionTargetTcpProxyRequest request)
* </ul>
* <p>Methods that return long-running operations have "Async" method variants that return `OperationFuture`, which is used to track polling of the service.</p>
* <ul>
* <li><p> insertAsync(String project, String region, TargetTcpProxy targetTcpProxyResource)
* </ul>
* <p>Callable method variants take no parameters and return an immutable API callable object, which can be used to initiate calls to the service.</p>
* <ul>
* <li><p> insertOperationCallable()
* <li><p> insertCallable()
* </ul>
* </td>
* </tr>
* <tr>
* <td><p> List</td>
* <td><p> Retrieves a list of TargetTcpProxy resources available to the specified project in a given region.</td>
* <td>
* <p>Request object method variants only take one parameter, a request object, which must be constructed before the call.</p>
* <ul>
* <li><p> list(ListRegionTargetTcpProxiesRequest request)
* </ul>
* <p>"Flattened" method variants have converted the fields of the request object into function parameters to enable multiple ways to call the same method.</p>
* <ul>
* <li><p> list(String project, String region)
* </ul>
* <p>Callable method variants take no parameters and return an immutable API callable object, which can be used to initiate calls to the service.</p>
* <ul>
* <li><p> listPagedCallable()
* <li><p> listCallable()
* </ul>
* </td>
* </tr>
* </table>
*
* <p>See the individual methods for example code.
*
* <p>Many parameters require resource names to be formatted in a particular way. To assist with
* these names, this class includes a format method for each type of name, and additionally a parse
* method to extract the individual identifiers contained within names that are returned.
*
* <p>This class can be customized by passing in a custom instance of RegionTargetTcpProxiesSettings
* to create(). For example:
*
* <p>To customize credentials:
*
* <pre>{@code
* // This snippet has been automatically generated and should be regarded as a code template only.
* // It will require modifications to work:
* // - It may require correct/in-range values for request initialization.
* // - It may require specifying regional endpoints when creating the service client as shown in
* // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library
* RegionTargetTcpProxiesSettings regionTargetTcpProxiesSettings =
* RegionTargetTcpProxiesSettings.newBuilder()
* .setCredentialsProvider(FixedCredentialsProvider.create(myCredentials))
* .build();
* RegionTargetTcpProxiesClient regionTargetTcpProxiesClient =
* RegionTargetTcpProxiesClient.create(regionTargetTcpProxiesSettings);
* }</pre>
*
* <p>To customize the endpoint:
*
* <pre>{@code
* // This snippet has been automatically generated and should be regarded as a code template only.
* // It will require modifications to work:
* // - It may require correct/in-range values for request initialization.
* // - It may require specifying regional endpoints when creating the service client as shown in
* // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library
* RegionTargetTcpProxiesSettings regionTargetTcpProxiesSettings =
* RegionTargetTcpProxiesSettings.newBuilder().setEndpoint(myEndpoint).build();
* RegionTargetTcpProxiesClient regionTargetTcpProxiesClient =
* RegionTargetTcpProxiesClient.create(regionTargetTcpProxiesSettings);
* }</pre>
*
* <p>Please refer to the GitHub repository's samples for more quickstart code snippets.
*/
@Generated("by gapic-generator-java")
public class RegionTargetTcpProxiesClient implements BackgroundResource {
private final RegionTargetTcpProxiesSettings settings;
private final RegionTargetTcpProxiesStub stub;
/** Constructs an instance of RegionTargetTcpProxiesClient with default settings. */
public static final RegionTargetTcpProxiesClient create() throws IOException {
return create(RegionTargetTcpProxiesSettings.newBuilder().build());
}
/**
* Constructs an instance of RegionTargetTcpProxiesClient, using the given settings. The channels
* are created based on the settings passed in, or defaults for any settings that are not set.
*/
public static final RegionTargetTcpProxiesClient create(RegionTargetTcpProxiesSettings settings)
throws IOException {
return new RegionTargetTcpProxiesClient(settings);
}
/**
* Constructs an instance of RegionTargetTcpProxiesClient, using the given stub for making calls.
* This is for advanced usage - prefer using create(RegionTargetTcpProxiesSettings).
*/
public static final RegionTargetTcpProxiesClient create(RegionTargetTcpProxiesStub stub) {
return new RegionTargetTcpProxiesClient(stub);
}
/**
* Constructs an instance of RegionTargetTcpProxiesClient, using the given settings. This is
* protected so that it is easy to make a subclass, but otherwise, the static factory methods
* should be preferred.
*/
protected RegionTargetTcpProxiesClient(RegionTargetTcpProxiesSettings settings)
throws IOException {
this.settings = settings;
this.stub = ((RegionTargetTcpProxiesStubSettings) settings.getStubSettings()).createStub();
}
protected RegionTargetTcpProxiesClient(RegionTargetTcpProxiesStub stub) {
this.settings = null;
this.stub = stub;
}
public final RegionTargetTcpProxiesSettings getSettings() {
return settings;
}
public RegionTargetTcpProxiesStub getStub() {
return stub;
}
// AUTO-GENERATED DOCUMENTATION AND METHOD.
/**
* Deletes the specified TargetTcpProxy resource.
*
* <p>Sample code:
*
* <pre>{@code
* // This snippet has been automatically generated and should be regarded as a code template only.
* // It will require modifications to work:
* // - It may require correct/in-range values for request initialization.
* // - It may require specifying regional endpoints when creating the service client as shown in
* // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library
* try (RegionTargetTcpProxiesClient regionTargetTcpProxiesClient =
* RegionTargetTcpProxiesClient.create()) {
* String project = "project-309310695";
* String region = "region-934795532";
* String targetTcpProxy = "targetTcpProxy-337144898";
* Operation response =
* regionTargetTcpProxiesClient.deleteAsync(project, region, targetTcpProxy).get();
* }
* }</pre>
*
* @param project Project ID for this request.
* @param region Name of the region scoping this request.
* @param targetTcpProxy Name of the TargetTcpProxy resource to delete.
* @throws com.google.api.gax.rpc.ApiException if the remote call fails
*/
public final OperationFuture<Operation, Operation> deleteAsync(
String project, String region, String targetTcpProxy) {
DeleteRegionTargetTcpProxyRequest request =
DeleteRegionTargetTcpProxyRequest.newBuilder()
.setProject(project)
.setRegion(region)
.setTargetTcpProxy(targetTcpProxy)
.build();
return deleteAsync(request);
}
// AUTO-GENERATED DOCUMENTATION AND METHOD.
/**
* Deletes the specified TargetTcpProxy resource.
*
* <p>Sample code:
*
* <pre>{@code
* // This snippet has been automatically generated and should be regarded as a code template only.
* // It will require modifications to work:
* // - It may require correct/in-range values for request initialization.
* // - It may require specifying regional endpoints when creating the service client as shown in
* // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library
* try (RegionTargetTcpProxiesClient regionTargetTcpProxiesClient =
* RegionTargetTcpProxiesClient.create()) {
* DeleteRegionTargetTcpProxyRequest request =
* DeleteRegionTargetTcpProxyRequest.newBuilder()
* .setProject("project-309310695")
* .setRegion("region-934795532")
* .setRequestId("requestId693933066")
* .setTargetTcpProxy("targetTcpProxy-337144898")
* .build();
* Operation response = regionTargetTcpProxiesClient.deleteAsync(request).get();
* }
* }</pre>
*
* @param request The request object containing all of the parameters for the API call.
* @throws com.google.api.gax.rpc.ApiException if the remote call fails
*/
public final OperationFuture<Operation, Operation> deleteAsync(
DeleteRegionTargetTcpProxyRequest request) {
return deleteOperationCallable().futureCall(request);
}
// AUTO-GENERATED DOCUMENTATION AND METHOD.
/**
* Deletes the specified TargetTcpProxy resource.
*
* <p>Sample code:
*
* <pre>{@code
* // This snippet has been automatically generated and should be regarded as a code template only.
* // It will require modifications to work:
* // - It may require correct/in-range values for request initialization.
* // - It may require specifying regional endpoints when creating the service client as shown in
* // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library
* try (RegionTargetTcpProxiesClient regionTargetTcpProxiesClient =
* RegionTargetTcpProxiesClient.create()) {
* DeleteRegionTargetTcpProxyRequest request =
* DeleteRegionTargetTcpProxyRequest.newBuilder()
* .setProject("project-309310695")
* .setRegion("region-934795532")
* .setRequestId("requestId693933066")
* .setTargetTcpProxy("targetTcpProxy-337144898")
* .build();
* OperationFuture<Operation, Operation> future =
* regionTargetTcpProxiesClient.deleteOperationCallable().futureCall(request);
* // Do something.
* Operation response = future.get();
* }
* }</pre>
*/
public final OperationCallable<DeleteRegionTargetTcpProxyRequest, Operation, Operation>
deleteOperationCallable() {
return stub.deleteOperationCallable();
}
// AUTO-GENERATED DOCUMENTATION AND METHOD.
/**
* Deletes the specified TargetTcpProxy resource.
*
* <p>Sample code:
*
* <pre>{@code
* // This snippet has been automatically generated and should be regarded as a code template only.
* // It will require modifications to work:
* // - It may require correct/in-range values for request initialization.
* // - It may require specifying regional endpoints when creating the service client as shown in
* // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library
* try (RegionTargetTcpProxiesClient regionTargetTcpProxiesClient =
* RegionTargetTcpProxiesClient.create()) {
* DeleteRegionTargetTcpProxyRequest request =
* DeleteRegionTargetTcpProxyRequest.newBuilder()
* .setProject("project-309310695")
* .setRegion("region-934795532")
* .setRequestId("requestId693933066")
* .setTargetTcpProxy("targetTcpProxy-337144898")
* .build();
* ApiFuture<Operation> future =
* regionTargetTcpProxiesClient.deleteCallable().futureCall(request);
* // Do something.
* Operation response = future.get();
* }
* }</pre>
*/
public final UnaryCallable<DeleteRegionTargetTcpProxyRequest, Operation> deleteCallable() {
return stub.deleteCallable();
}
// AUTO-GENERATED DOCUMENTATION AND METHOD.
/**
* Returns the specified TargetTcpProxy resource.
*
* <p>Sample code:
*
* <pre>{@code
* // This snippet has been automatically generated and should be regarded as a code template only.
* // It will require modifications to work:
* // - It may require correct/in-range values for request initialization.
* // - It may require specifying regional endpoints when creating the service client as shown in
* // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library
* try (RegionTargetTcpProxiesClient regionTargetTcpProxiesClient =
* RegionTargetTcpProxiesClient.create()) {
* String project = "project-309310695";
* String region = "region-934795532";
* String targetTcpProxy = "targetTcpProxy-337144898";
* TargetTcpProxy response = regionTargetTcpProxiesClient.get(project, region, targetTcpProxy);
* }
* }</pre>
*
* @param project Project ID for this request.
* @param region Name of the region scoping this request.
* @param targetTcpProxy Name of the TargetTcpProxy resource to return.
* @throws com.google.api.gax.rpc.ApiException if the remote call fails
*/
public final TargetTcpProxy get(String project, String region, String targetTcpProxy) {
GetRegionTargetTcpProxyRequest request =
GetRegionTargetTcpProxyRequest.newBuilder()
.setProject(project)
.setRegion(region)
.setTargetTcpProxy(targetTcpProxy)
.build();
return get(request);
}
// AUTO-GENERATED DOCUMENTATION AND METHOD.
/**
* Returns the specified TargetTcpProxy resource.
*
* <p>Sample code:
*
* <pre>{@code
* // This snippet has been automatically generated and should be regarded as a code template only.
* // It will require modifications to work:
* // - It may require correct/in-range values for request initialization.
* // - It may require specifying regional endpoints when creating the service client as shown in
* // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library
* try (RegionTargetTcpProxiesClient regionTargetTcpProxiesClient =
* RegionTargetTcpProxiesClient.create()) {
* GetRegionTargetTcpProxyRequest request =
* GetRegionTargetTcpProxyRequest.newBuilder()
* .setProject("project-309310695")
* .setRegion("region-934795532")
* .setTargetTcpProxy("targetTcpProxy-337144898")
* .build();
* TargetTcpProxy response = regionTargetTcpProxiesClient.get(request);
* }
* }</pre>
*
* @param request The request object containing all of the parameters for the API call.
* @throws com.google.api.gax.rpc.ApiException if the remote call fails
*/
public final TargetTcpProxy get(GetRegionTargetTcpProxyRequest request) {
return getCallable().call(request);
}
// AUTO-GENERATED DOCUMENTATION AND METHOD.
/**
* Returns the specified TargetTcpProxy resource.
*
* <p>Sample code:
*
* <pre>{@code
* // This snippet has been automatically generated and should be regarded as a code template only.
* // It will require modifications to work:
* // - It may require correct/in-range values for request initialization.
* // - It may require specifying regional endpoints when creating the service client as shown in
* // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library
* try (RegionTargetTcpProxiesClient regionTargetTcpProxiesClient =
* RegionTargetTcpProxiesClient.create()) {
* GetRegionTargetTcpProxyRequest request =
* GetRegionTargetTcpProxyRequest.newBuilder()
* .setProject("project-309310695")
* .setRegion("region-934795532")
* .setTargetTcpProxy("targetTcpProxy-337144898")
* .build();
* ApiFuture<TargetTcpProxy> future =
* regionTargetTcpProxiesClient.getCallable().futureCall(request);
* // Do something.
* TargetTcpProxy response = future.get();
* }
* }</pre>
*/
public final UnaryCallable<GetRegionTargetTcpProxyRequest, TargetTcpProxy> getCallable() {
return stub.getCallable();
}
// AUTO-GENERATED DOCUMENTATION AND METHOD.
/**
* Creates a TargetTcpProxy resource in the specified project and region using the data included
* in the request.
*
* <p>Sample code:
*
* <pre>{@code
* // This snippet has been automatically generated and should be regarded as a code template only.
* // It will require modifications to work:
* // - It may require correct/in-range values for request initialization.
* // - It may require specifying regional endpoints when creating the service client as shown in
* // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library
* try (RegionTargetTcpProxiesClient regionTargetTcpProxiesClient =
* RegionTargetTcpProxiesClient.create()) {
* String project = "project-309310695";
* String region = "region-934795532";
* TargetTcpProxy targetTcpProxyResource = TargetTcpProxy.newBuilder().build();
* Operation response =
* regionTargetTcpProxiesClient.insertAsync(project, region, targetTcpProxyResource).get();
* }
* }</pre>
*
* @param project Project ID for this request.
* @param region Name of the region scoping this request.
* @param targetTcpProxyResource The body resource for this request
* @throws com.google.api.gax.rpc.ApiException if the remote call fails
*/
public final OperationFuture<Operation, Operation> insertAsync(
String project, String region, TargetTcpProxy targetTcpProxyResource) {
InsertRegionTargetTcpProxyRequest request =
InsertRegionTargetTcpProxyRequest.newBuilder()
.setProject(project)
.setRegion(region)
.setTargetTcpProxyResource(targetTcpProxyResource)
.build();
return insertAsync(request);
}
// AUTO-GENERATED DOCUMENTATION AND METHOD.
/**
* Creates a TargetTcpProxy resource in the specified project and region using the data included
* in the request.
*
* <p>Sample code:
*
* <pre>{@code
* // This snippet has been automatically generated and should be regarded as a code template only.
* // It will require modifications to work:
* // - It may require correct/in-range values for request initialization.
* // - It may require specifying regional endpoints when creating the service client as shown in
* // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library
* try (RegionTargetTcpProxiesClient regionTargetTcpProxiesClient =
* RegionTargetTcpProxiesClient.create()) {
* InsertRegionTargetTcpProxyRequest request =
* InsertRegionTargetTcpProxyRequest.newBuilder()
* .setProject("project-309310695")
* .setRegion("region-934795532")
* .setRequestId("requestId693933066")
* .setTargetTcpProxyResource(TargetTcpProxy.newBuilder().build())
* .build();
* Operation response = regionTargetTcpProxiesClient.insertAsync(request).get();
* }
* }</pre>
*
* @param request The request object containing all of the parameters for the API call.
* @throws com.google.api.gax.rpc.ApiException if the remote call fails
*/
public final OperationFuture<Operation, Operation> insertAsync(
InsertRegionTargetTcpProxyRequest request) {
return insertOperationCallable().futureCall(request);
}
// AUTO-GENERATED DOCUMENTATION AND METHOD.
/**
* Creates a TargetTcpProxy resource in the specified project and region using the data included
* in the request.
*
* <p>Sample code:
*
* <pre>{@code
* // This snippet has been automatically generated and should be regarded as a code template only.
* // It will require modifications to work:
* // - It may require correct/in-range values for request initialization.
* // - It may require specifying regional endpoints when creating the service client as shown in
* // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library
* try (RegionTargetTcpProxiesClient regionTargetTcpProxiesClient =
* RegionTargetTcpProxiesClient.create()) {
* InsertRegionTargetTcpProxyRequest request =
* InsertRegionTargetTcpProxyRequest.newBuilder()
* .setProject("project-309310695")
* .setRegion("region-934795532")
* .setRequestId("requestId693933066")
* .setTargetTcpProxyResource(TargetTcpProxy.newBuilder().build())
* .build();
* OperationFuture<Operation, Operation> future =
* regionTargetTcpProxiesClient.insertOperationCallable().futureCall(request);
* // Do something.
* Operation response = future.get();
* }
* }</pre>
*/
public final OperationCallable<InsertRegionTargetTcpProxyRequest, Operation, Operation>
insertOperationCallable() {
return stub.insertOperationCallable();
}
// AUTO-GENERATED DOCUMENTATION AND METHOD.
/**
* Creates a TargetTcpProxy resource in the specified project and region using the data included
* in the request.
*
* <p>Sample code:
*
* <pre>{@code
* // This snippet has been automatically generated and should be regarded as a code template only.
* // It will require modifications to work:
* // - It may require correct/in-range values for request initialization.
* // - It may require specifying regional endpoints when creating the service client as shown in
* // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library
* try (RegionTargetTcpProxiesClient regionTargetTcpProxiesClient =
* RegionTargetTcpProxiesClient.create()) {
* InsertRegionTargetTcpProxyRequest request =
* InsertRegionTargetTcpProxyRequest.newBuilder()
* .setProject("project-309310695")
* .setRegion("region-934795532")
* .setRequestId("requestId693933066")
* .setTargetTcpProxyResource(TargetTcpProxy.newBuilder().build())
* .build();
* ApiFuture<Operation> future =
* regionTargetTcpProxiesClient.insertCallable().futureCall(request);
* // Do something.
* Operation response = future.get();
* }
* }</pre>
*/
public final UnaryCallable<InsertRegionTargetTcpProxyRequest, Operation> insertCallable() {
return stub.insertCallable();
}
// AUTO-GENERATED DOCUMENTATION AND METHOD.
/**
* Retrieves a list of TargetTcpProxy resources available to the specified project in a given
* region.
*
* <p>Sample code:
*
* <pre>{@code
* // This snippet has been automatically generated and should be regarded as a code template only.
* // It will require modifications to work:
* // - It may require correct/in-range values for request initialization.
* // - It may require specifying regional endpoints when creating the service client as shown in
* // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library
* try (RegionTargetTcpProxiesClient regionTargetTcpProxiesClient =
* RegionTargetTcpProxiesClient.create()) {
* String project = "project-309310695";
* String region = "region-934795532";
* for (TargetTcpProxy element :
* regionTargetTcpProxiesClient.list(project, region).iterateAll()) {
* // doThingsWith(element);
* }
* }
* }</pre>
*
* @param project Project ID for this request.
* @param region Name of the region scoping this request.
* @throws com.google.api.gax.rpc.ApiException if the remote call fails
*/
public final ListPagedResponse list(String project, String region) {
ListRegionTargetTcpProxiesRequest request =
ListRegionTargetTcpProxiesRequest.newBuilder()
.setProject(project)
.setRegion(region)
.build();
return list(request);
}
// AUTO-GENERATED DOCUMENTATION AND METHOD.
/**
* Retrieves a list of TargetTcpProxy resources available to the specified project in a given
* region.
*
* <p>Sample code:
*
* <pre>{@code
* // This snippet has been automatically generated and should be regarded as a code template only.
* // It will require modifications to work:
* // - It may require correct/in-range values for request initialization.
* // - It may require specifying regional endpoints when creating the service client as shown in
* // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library
* try (RegionTargetTcpProxiesClient regionTargetTcpProxiesClient =
* RegionTargetTcpProxiesClient.create()) {
* ListRegionTargetTcpProxiesRequest request =
* ListRegionTargetTcpProxiesRequest.newBuilder()
* .setFilter("filter-1274492040")
* .setMaxResults(1128457243)
* .setOrderBy("orderBy-1207110587")
* .setPageToken("pageToken873572522")
* .setProject("project-309310695")
* .setRegion("region-934795532")
* .setReturnPartialSuccess(true)
* .build();
* for (TargetTcpProxy element : regionTargetTcpProxiesClient.list(request).iterateAll()) {
* // doThingsWith(element);
* }
* }
* }</pre>
*
* @param request The request object containing all of the parameters for the API call.
* @throws com.google.api.gax.rpc.ApiException if the remote call fails
*/
public final ListPagedResponse list(ListRegionTargetTcpProxiesRequest request) {
return listPagedCallable().call(request);
}
// AUTO-GENERATED DOCUMENTATION AND METHOD.
/**
* Retrieves a list of TargetTcpProxy resources available to the specified project in a given
* region.
*
* <p>Sample code:
*
* <pre>{@code
* // This snippet has been automatically generated and should be regarded as a code template only.
* // It will require modifications to work:
* // - It may require correct/in-range values for request initialization.
* // - It may require specifying regional endpoints when creating the service client as shown in
* // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library
* try (RegionTargetTcpProxiesClient regionTargetTcpProxiesClient =
* RegionTargetTcpProxiesClient.create()) {
* ListRegionTargetTcpProxiesRequest request =
* ListRegionTargetTcpProxiesRequest.newBuilder()
* .setFilter("filter-1274492040")
* .setMaxResults(1128457243)
* .setOrderBy("orderBy-1207110587")
* .setPageToken("pageToken873572522")
* .setProject("project-309310695")
* .setRegion("region-934795532")
* .setReturnPartialSuccess(true)
* .build();
* ApiFuture<TargetTcpProxy> future =
* regionTargetTcpProxiesClient.listPagedCallable().futureCall(request);
* // Do something.
* for (TargetTcpProxy element : future.get().iterateAll()) {
* // doThingsWith(element);
* }
* }
* }</pre>
*/
public final UnaryCallable<ListRegionTargetTcpProxiesRequest, ListPagedResponse>
listPagedCallable() {
return stub.listPagedCallable();
}
// AUTO-GENERATED DOCUMENTATION AND METHOD.
/**
* Retrieves a list of TargetTcpProxy resources available to the specified project in a given
* region.
*
* <p>Sample code:
*
* <pre>{@code
* // This snippet has been automatically generated and should be regarded as a code template only.
* // It will require modifications to work:
* // - It may require correct/in-range values for request initialization.
* // - It may require specifying regional endpoints when creating the service client as shown in
* // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library
* try (RegionTargetTcpProxiesClient regionTargetTcpProxiesClient =
* RegionTargetTcpProxiesClient.create()) {
* ListRegionTargetTcpProxiesRequest request =
* ListRegionTargetTcpProxiesRequest.newBuilder()
* .setFilter("filter-1274492040")
* .setMaxResults(1128457243)
* .setOrderBy("orderBy-1207110587")
* .setPageToken("pageToken873572522")
* .setProject("project-309310695")
* .setRegion("region-934795532")
* .setReturnPartialSuccess(true)
* .build();
* while (true) {
* TargetTcpProxyList response = regionTargetTcpProxiesClient.listCallable().call(request);
* for (TargetTcpProxy element : response.getItemsList()) {
* // doThingsWith(element);
* }
* String nextPageToken = response.getNextPageToken();
* if (!Strings.isNullOrEmpty(nextPageToken)) {
* request = request.toBuilder().setPageToken(nextPageToken).build();
* } else {
* break;
* }
* }
* }
* }</pre>
*/
public final UnaryCallable<ListRegionTargetTcpProxiesRequest, TargetTcpProxyList> listCallable() {
return stub.listCallable();
}
@Override
public final void close() {
stub.close();
}
@Override
public void shutdown() {
stub.shutdown();
}
@Override
public boolean isShutdown() {
return stub.isShutdown();
}
@Override
public boolean isTerminated() {
return stub.isTerminated();
}
@Override
public void shutdownNow() {
stub.shutdownNow();
}
@Override
public boolean awaitTermination(long duration, TimeUnit unit) throws InterruptedException {
return stub.awaitTermination(duration, unit);
}
public static class ListPagedResponse
extends AbstractPagedListResponse<
ListRegionTargetTcpProxiesRequest,
TargetTcpProxyList,
TargetTcpProxy,
ListPage,
ListFixedSizeCollection> {
public static ApiFuture<ListPagedResponse> createAsync(
PageContext<ListRegionTargetTcpProxiesRequest, TargetTcpProxyList, TargetTcpProxy> context,
ApiFuture<TargetTcpProxyList> futureResponse) {
ApiFuture<ListPage> futurePage =
ListPage.createEmptyPage().createPageAsync(context, futureResponse);
return ApiFutures.transform(
futurePage, input -> new ListPagedResponse(input), MoreExecutors.directExecutor());
}
private ListPagedResponse(ListPage page) {
super(page, ListFixedSizeCollection.createEmptyCollection());
}
}
public static class ListPage
extends AbstractPage<
ListRegionTargetTcpProxiesRequest, TargetTcpProxyList, TargetTcpProxy, ListPage> {
private ListPage(
PageContext<ListRegionTargetTcpProxiesRequest, TargetTcpProxyList, TargetTcpProxy> context,
TargetTcpProxyList response) {
super(context, response);
}
private static ListPage createEmptyPage() {
return new ListPage(null, null);
}
@Override
protected ListPage createPage(
PageContext<ListRegionTargetTcpProxiesRequest, TargetTcpProxyList, TargetTcpProxy> context,
TargetTcpProxyList response) {
return new ListPage(context, response);
}
@Override
public ApiFuture<ListPage> createPageAsync(
PageContext<ListRegionTargetTcpProxiesRequest, TargetTcpProxyList, TargetTcpProxy> context,
ApiFuture<TargetTcpProxyList> futureResponse) {
return super.createPageAsync(context, futureResponse);
}
}
public static class ListFixedSizeCollection
extends AbstractFixedSizeCollection<
ListRegionTargetTcpProxiesRequest,
TargetTcpProxyList,
TargetTcpProxy,
ListPage,
ListFixedSizeCollection> {
private ListFixedSizeCollection(List<ListPage> pages, int collectionSize) {
super(pages, collectionSize);
}
private static ListFixedSizeCollection createEmptyCollection() {
return new ListFixedSizeCollection(null, 0);
}
@Override
protected ListFixedSizeCollection createCollection(List<ListPage> pages, int collectionSize) {
return new ListFixedSizeCollection(pages, collectionSize);
}
}
}
|
apache/zookeeper | 36,214 | zookeeper-server/src/test/java/org/apache/zookeeper/common/CertificatesToPlayWith.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.zookeeper.common;
/**
* Some X509 certificates to test against.
* <p>
* Note: some of these certificates have Japanese Kanji in the "subjectAlt"
* field (UTF8). Not sure how realistic that is since international characters
* in DNS names usually get translated into ASCII using "xn--" style DNS
* entries. "xn--i8s592g.co.jp" is what FireFox actually uses when trying to
* find 花子.co.jp. So would the CN in the certificate contain
* "xn--i8s592g.co.jp" in ASCII, or "花子.co.jp" in UTF8? (Both?)
* </p>
*
* @since 11-Dec-2006
*/
public class CertificatesToPlayWith {
/**
* CN=foo.com
*/
public static final byte[] X509_FOO = ("-----BEGIN CERTIFICATE-----\n"
+ "MIIERjCCAy6gAwIBAgIJAIz+EYMBU6aQMA0GCSqGSIb3DQEBBQUAMIGiMQswCQYD\n"
+ "VQQGEwJDQTELMAkGA1UECBMCQkMxEjAQBgNVBAcTCVZhbmNvdXZlcjEWMBQGA1UE\n"
+ "ChMNd3d3LmN1Y2JjLmNvbTEUMBIGA1UECxQLY29tbW9uc19zc2wxHTAbBgNVBAMU\n"
+ "FGRlbW9faW50ZXJtZWRpYXRlX2NhMSUwIwYJKoZIhvcNAQkBFhZqdWxpdXNkYXZp\n"
+ "ZXNAZ21haWwuY29tMB4XDTA2MTIxMTE1MzE0MVoXDTI4MTEwNTE1MzE0MVowgaQx\n"
+ "CzAJBgNVBAYTAlVTMREwDwYDVQQIEwhNYXJ5bGFuZDEUMBIGA1UEBxMLRm9yZXN0\n"
+ "IEhpbGwxFzAVBgNVBAoTDmh0dHBjb21wb25lbnRzMRowGAYDVQQLExF0ZXN0IGNl\n"
+ "cnRpZmljYXRlczEQMA4GA1UEAxMHZm9vLmNvbTElMCMGCSqGSIb3DQEJARYWanVs\n"
+ "aXVzZGF2aWVzQGdtYWlsLmNvbTCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoC\n"
+ "ggEBAMhjr5aCPoyp0R1iroWAfnEyBMGYWoCidH96yGPFjYLowez5aYKY1IOKTY2B\n"
+ "lYho4O84X244QrZTRl8kQbYtxnGh4gSCD+Z8gjZ/gMvLUlhqOb+WXPAUHMB39GRy\n"
+ "zerA/ZtrlUqf+lKo0uWcocxeRc771KN8cPH3nHZ0rV0Hx4ZAZy6U4xxObe4rtSVY\n"
+ "07hNKXAb2odnVqgzcYiDkLV8ilvEmoNWMWrp8UBqkTcpEhYhCYp3cTkgJwMSuqv8\n"
+ "BqnGd87xQU3FVZI4tbtkB+KzjD9zz8QCDJAfDjZHR03KNQ5mxOgXwxwKw6lGMaiV\n"
+ "JTxpTKqym93whYk93l3ocEe55c0CAwEAAaN7MHkwCQYDVR0TBAIwADAsBglghkgB\n"
+ "hvhCAQ0EHxYdT3BlblNTTCBHZW5lcmF0ZWQgQ2VydGlmaWNhdGUwHQYDVR0OBBYE\n"
+ "FJ8Ud78/OrbKOIJCSBYs2tDLXofYMB8GA1UdIwQYMBaAFHua2o+QmU5S0qzbswNS\n"
+ "yoemDT4NMA0GCSqGSIb3DQEBBQUAA4IBAQC3jRmEya6sQCkmieULcvx8zz1euCk9\n"
+ "fSez7BEtki8+dmfMXe3K7sH0lI8f4jJR0rbSCjpmCQLYmzC3NxBKeJOW0RcjNBpO\n"
+ "c2JlGO9auXv2GDP4IYiXElLJ6VSqc8WvDikv0JmCCWm0Zga+bZbR/EWN5DeEtFdF\n"
+ "815CLpJZNcYwiYwGy/CVQ7w2TnXlG+mraZOz+owr+cL6J/ZesbdEWfjoS1+cUEhE\n"
+ "HwlNrAu8jlZ2UqSgskSWlhYdMTAP9CPHiUv9N7FcT58Itv/I4fKREINQYjDpvQcx\n"
+ "SaTYb9dr5sB4WLNglk7zxDtM80H518VvihTcP7FHL+Gn6g4j5fkI98+S\n"
+ "-----END CERTIFICATE-----\n").getBytes();
/**
* CN=花子.co.jp
*/
public static final byte[] X509_HANAKO = ("-----BEGIN CERTIFICATE-----\n"
+ "MIIESzCCAzOgAwIBAgIJAIz+EYMBU6aTMA0GCSqGSIb3DQEBBQUAMIGiMQswCQYD\n"
+ "VQQGEwJDQTELMAkGA1UECBMCQkMxEjAQBgNVBAcTCVZhbmNvdXZlcjEWMBQGA1UE\n"
+ "ChMNd3d3LmN1Y2JjLmNvbTEUMBIGA1UECxQLY29tbW9uc19zc2wxHTAbBgNVBAMU\n"
+ "FGRlbW9faW50ZXJtZWRpYXRlX2NhMSUwIwYJKoZIhvcNAQkBFhZqdWxpdXNkYXZp\n"
+ "ZXNAZ21haWwuY29tMB4XDTA2MTIxMTE1NDIxNVoXDTI4MTEwNTE1NDIxNVowgakx\n"
+ "CzAJBgNVBAYTAlVTMREwDwYDVQQIDAhNYXJ5bGFuZDEUMBIGA1UEBwwLRm9yZXN0\n"
+ "IEhpbGwxFzAVBgNVBAoMDmh0dHBjb21wb25lbnRzMRowGAYDVQQLDBF0ZXN0IGNl\n"
+ "cnRpZmljYXRlczEVMBMGA1UEAwwM6Iqx5a2QLmNvLmpwMSUwIwYJKoZIhvcNAQkB\n"
+ "FhZqdWxpdXNkYXZpZXNAZ21haWwuY29tMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8A\n"
+ "MIIBCgKCAQEAyGOvloI+jKnRHWKuhYB+cTIEwZhagKJ0f3rIY8WNgujB7PlpgpjU\n"
+ "g4pNjYGViGjg7zhfbjhCtlNGXyRBti3GcaHiBIIP5nyCNn+Ay8tSWGo5v5Zc8BQc\n"
+ "wHf0ZHLN6sD9m2uVSp/6UqjS5ZyhzF5FzvvUo3xw8fecdnStXQfHhkBnLpTjHE5t\n"
+ "7iu1JVjTuE0pcBvah2dWqDNxiIOQtXyKW8Sag1YxaunxQGqRNykSFiEJindxOSAn\n"
+ "AxK6q/wGqcZ3zvFBTcVVkji1u2QH4rOMP3PPxAIMkB8ONkdHTco1DmbE6BfDHArD\n"
+ "qUYxqJUlPGlMqrKb3fCFiT3eXehwR7nlzQIDAQABo3sweTAJBgNVHRMEAjAAMCwG\n"
+ "CWCGSAGG+EIBDQQfFh1PcGVuU1NMIEdlbmVyYXRlZCBDZXJ0aWZpY2F0ZTAdBgNV\n"
+ "HQ4EFgQUnxR3vz86tso4gkJIFiza0Mteh9gwHwYDVR0jBBgwFoAUe5raj5CZTlLS\n"
+ "rNuzA1LKh6YNPg0wDQYJKoZIhvcNAQEFBQADggEBALJ27i3okV/KvlDp6KMID3gd\n"
+ "ITl68PyItzzx+SquF8gahMh016NX73z/oVZoVUNdftla8wPUB1GwIkAnGkhQ9LHK\n"
+ "spBdbRiCj0gMmLCsX8SrjFvr7cYb2cK6J/fJe92l1tg/7Y4o7V/s4JBe/cy9U9w8\n"
+ "a0ctuDmEBCgC784JMDtT67klRfr/2LlqWhlOEq7pUFxRLbhpquaAHSOjmIcWnVpw\n"
+ "9BsO7qe46hidgn39hKh1WjKK2VcL/3YRsC4wUi0PBtFW6ScMCuMhgIRXSPU55Rae\n"
+ "UIlOdPjjr1SUNWGId1rD7W16Scpwnknn310FNxFMHVI0GTGFkNdkilNCFJcIoRA=\n"
+ "-----END CERTIFICATE-----\n").getBytes();
/**
* CN=foo.com, subjectAlt=bar.com
*/
public static final byte[] X509_FOO_BAR = ("-----BEGIN CERTIFICATE-----\n"
+ "MIIEXDCCA0SgAwIBAgIJAIz+EYMBU6aRMA0GCSqGSIb3DQEBBQUAMIGiMQswCQYD\n"
+ "VQQGEwJDQTELMAkGA1UECBMCQkMxEjAQBgNVBAcTCVZhbmNvdXZlcjEWMBQGA1UE\n"
+ "ChMNd3d3LmN1Y2JjLmNvbTEUMBIGA1UECxQLY29tbW9uc19zc2wxHTAbBgNVBAMU\n"
+ "FGRlbW9faW50ZXJtZWRpYXRlX2NhMSUwIwYJKoZIhvcNAQkBFhZqdWxpdXNkYXZp\n"
+ "ZXNAZ21haWwuY29tMB4XDTA2MTIxMTE1MzYyOVoXDTI4MTEwNTE1MzYyOVowgaQx\n"
+ "CzAJBgNVBAYTAlVTMREwDwYDVQQIEwhNYXJ5bGFuZDEUMBIGA1UEBxMLRm9yZXN0\n"
+ "IEhpbGwxFzAVBgNVBAoTDmh0dHBjb21wb25lbnRzMRowGAYDVQQLExF0ZXN0IGNl\n"
+ "cnRpZmljYXRlczEQMA4GA1UEAxMHZm9vLmNvbTElMCMGCSqGSIb3DQEJARYWanVs\n"
+ "aXVzZGF2aWVzQGdtYWlsLmNvbTCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoC\n"
+ "ggEBAMhjr5aCPoyp0R1iroWAfnEyBMGYWoCidH96yGPFjYLowez5aYKY1IOKTY2B\n"
+ "lYho4O84X244QrZTRl8kQbYtxnGh4gSCD+Z8gjZ/gMvLUlhqOb+WXPAUHMB39GRy\n"
+ "zerA/ZtrlUqf+lKo0uWcocxeRc771KN8cPH3nHZ0rV0Hx4ZAZy6U4xxObe4rtSVY\n"
+ "07hNKXAb2odnVqgzcYiDkLV8ilvEmoNWMWrp8UBqkTcpEhYhCYp3cTkgJwMSuqv8\n"
+ "BqnGd87xQU3FVZI4tbtkB+KzjD9zz8QCDJAfDjZHR03KNQ5mxOgXwxwKw6lGMaiV\n"
+ "JTxpTKqym93whYk93l3ocEe55c0CAwEAAaOBkDCBjTAJBgNVHRMEAjAAMCwGCWCG\n"
+ "SAGG+EIBDQQfFh1PcGVuU1NMIEdlbmVyYXRlZCBDZXJ0aWZpY2F0ZTAdBgNVHQ4E\n"
+ "FgQUnxR3vz86tso4gkJIFiza0Mteh9gwHwYDVR0jBBgwFoAUe5raj5CZTlLSrNuz\n"
+ "A1LKh6YNPg0wEgYDVR0RBAswCYIHYmFyLmNvbTANBgkqhkiG9w0BAQUFAAOCAQEA\n"
+ "dQyprNZBmVnvuVWjV42sey/PTfkYShJwy1j0/jcFZR/ypZUovpiHGDO1DgL3Y3IP\n"
+ "zVQ26uhUsSw6G0gGRiaBDe/0LUclXZoJzXX1qpS55OadxW73brziS0sxRgGrZE/d\n"
+ "3g5kkio6IED47OP6wYnlmZ7EKP9cqjWwlnvHnnUcZ2SscoLNYs9rN9ccp8tuq2by\n"
+ "88OyhKwGjJfhOudqfTNZcDzRHx4Fzm7UsVaycVw4uDmhEHJrAsmMPpj/+XRK9/42\n"
+ "2xq+8bc6HojdtbCyug/fvBZvZqQXSmU8m8IVcMmWMz0ZQO8ee3QkBHMZfCy7P/kr\n"
+ "VbWx/uETImUu+NZg22ewEw==\n" + "-----END CERTIFICATE-----\n").getBytes();
/**
* CN=foo.com, subjectAlt=bar.com, subjectAlt=花子.co.jp
* (hanako.co.jp in kanji)
*/
public static final byte[] X509_FOO_BAR_HANAKO = ("-----BEGIN CERTIFICATE-----\n"
+ "MIIEajCCA1KgAwIBAgIJAIz+EYMBU6aSMA0GCSqGSIb3DQEBBQUAMIGiMQswCQYD\n"
+ "VQQGEwJDQTELMAkGA1UECBMCQkMxEjAQBgNVBAcTCVZhbmNvdXZlcjEWMBQGA1UE\n"
+ "ChMNd3d3LmN1Y2JjLmNvbTEUMBIGA1UECxQLY29tbW9uc19zc2wxHTAbBgNVBAMU\n"
+ "FGRlbW9faW50ZXJtZWRpYXRlX2NhMSUwIwYJKoZIhvcNAQkBFhZqdWxpdXNkYXZp\n"
+ "ZXNAZ21haWwuY29tMB4XDTA2MTIxMTE1MzgxM1oXDTI4MTEwNTE1MzgxM1owgaQx\n"
+ "CzAJBgNVBAYTAlVTMREwDwYDVQQIEwhNYXJ5bGFuZDEUMBIGA1UEBxMLRm9yZXN0\n"
+ "IEhpbGwxFzAVBgNVBAoTDmh0dHBjb21wb25lbnRzMRowGAYDVQQLExF0ZXN0IGNl\n"
+ "cnRpZmljYXRlczEQMA4GA1UEAxMHZm9vLmNvbTElMCMGCSqGSIb3DQEJARYWanVs\n"
+ "aXVzZGF2aWVzQGdtYWlsLmNvbTCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoC\n"
+ "ggEBAMhjr5aCPoyp0R1iroWAfnEyBMGYWoCidH96yGPFjYLowez5aYKY1IOKTY2B\n"
+ "lYho4O84X244QrZTRl8kQbYtxnGh4gSCD+Z8gjZ/gMvLUlhqOb+WXPAUHMB39GRy\n"
+ "zerA/ZtrlUqf+lKo0uWcocxeRc771KN8cPH3nHZ0rV0Hx4ZAZy6U4xxObe4rtSVY\n"
+ "07hNKXAb2odnVqgzcYiDkLV8ilvEmoNWMWrp8UBqkTcpEhYhCYp3cTkgJwMSuqv8\n"
+ "BqnGd87xQU3FVZI4tbtkB+KzjD9zz8QCDJAfDjZHR03KNQ5mxOgXwxwKw6lGMaiV\n"
+ "JTxpTKqym93whYk93l3ocEe55c0CAwEAAaOBnjCBmzAJBgNVHRMEAjAAMCwGCWCG\n"
+ "SAGG+EIBDQQfFh1PcGVuU1NMIEdlbmVyYXRlZCBDZXJ0aWZpY2F0ZTAdBgNVHQ4E\n"
+ "FgQUnxR3vz86tso4gkJIFiza0Mteh9gwHwYDVR0jBBgwFoAUe5raj5CZTlLSrNuz\n"
+ "A1LKh6YNPg0wIAYDVR0RBBkwF4IHYmFyLmNvbYIM6Iqx5a2QLmNvLmpwMA0GCSqG\n"
+ "SIb3DQEBBQUAA4IBAQBeZs7ZIYyKtdnVxVvdLgwySEPOE4pBSXii7XYv0Q9QUvG/\n"
+ "++gFGQh89HhABzA1mVUjH5dJTQqSLFvRfqTHqLpxSxSWqMHnvRM4cPBkIRp/XlMK\n"
+ "PlXadYtJLPTgpbgvulA1ickC9EwlNYWnowZ4uxnfsMghW4HskBqaV+PnQ8Zvy3L0\n"
+ "12c7Cg4mKKS5pb1HdRuiD2opZ+Hc77gRQLvtWNS8jQvd/iTbh6fuvTKfAOFoXw22\n"
+ "sWIKHYrmhCIRshUNohGXv50m2o+1w9oWmQ6Dkq7lCjfXfUB4wIbggJjpyEtbNqBt\n"
+ "j4MC2x5rfsLKKqToKmNE7pFEgqwe8//Aar1b+Qj+\n" + "-----END CERTIFICATE-----\n").getBytes();
/**
* CN=*.foo.com
*/
public static final byte[] X509_WILD_FOO = ("-----BEGIN CERTIFICATE-----\n"
+ "MIIESDCCAzCgAwIBAgIJAIz+EYMBU6aUMA0GCSqGSIb3DQEBBQUAMIGiMQswCQYD\n"
+ "VQQGEwJDQTELMAkGA1UECBMCQkMxEjAQBgNVBAcTCVZhbmNvdXZlcjEWMBQGA1UE\n"
+ "ChMNd3d3LmN1Y2JjLmNvbTEUMBIGA1UECxQLY29tbW9uc19zc2wxHTAbBgNVBAMU\n"
+ "FGRlbW9faW50ZXJtZWRpYXRlX2NhMSUwIwYJKoZIhvcNAQkBFhZqdWxpdXNkYXZp\n"
+ "ZXNAZ21haWwuY29tMB4XDTA2MTIxMTE2MTU1NVoXDTI4MTEwNTE2MTU1NVowgaYx\n"
+ "CzAJBgNVBAYTAlVTMREwDwYDVQQIEwhNYXJ5bGFuZDEUMBIGA1UEBxMLRm9yZXN0\n"
+ "IEhpbGwxFzAVBgNVBAoTDmh0dHBjb21wb25lbnRzMRowGAYDVQQLExF0ZXN0IGNl\n"
+ "cnRpZmljYXRlczESMBAGA1UEAxQJKi5mb28uY29tMSUwIwYJKoZIhvcNAQkBFhZq\n"
+ "dWxpdXNkYXZpZXNAZ21haWwuY29tMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIB\n"
+ "CgKCAQEAyGOvloI+jKnRHWKuhYB+cTIEwZhagKJ0f3rIY8WNgujB7PlpgpjUg4pN\n"
+ "jYGViGjg7zhfbjhCtlNGXyRBti3GcaHiBIIP5nyCNn+Ay8tSWGo5v5Zc8BQcwHf0\n"
+ "ZHLN6sD9m2uVSp/6UqjS5ZyhzF5FzvvUo3xw8fecdnStXQfHhkBnLpTjHE5t7iu1\n"
+ "JVjTuE0pcBvah2dWqDNxiIOQtXyKW8Sag1YxaunxQGqRNykSFiEJindxOSAnAxK6\n"
+ "q/wGqcZ3zvFBTcVVkji1u2QH4rOMP3PPxAIMkB8ONkdHTco1DmbE6BfDHArDqUYx\n"
+ "qJUlPGlMqrKb3fCFiT3eXehwR7nlzQIDAQABo3sweTAJBgNVHRMEAjAAMCwGCWCG\n"
+ "SAGG+EIBDQQfFh1PcGVuU1NMIEdlbmVyYXRlZCBDZXJ0aWZpY2F0ZTAdBgNVHQ4E\n"
+ "FgQUnxR3vz86tso4gkJIFiza0Mteh9gwHwYDVR0jBBgwFoAUe5raj5CZTlLSrNuz\n"
+ "A1LKh6YNPg0wDQYJKoZIhvcNAQEFBQADggEBAH0ipG6J561UKUfgkeW7GvYwW98B\n"
+ "N1ZooWX+JEEZK7+Pf/96d3Ij0rw9ACfN4bpfnCq0VUNZVSYB+GthQ2zYuz7tf/UY\n"
+ "A6nxVgR/IjG69BmsBl92uFO7JTNtHztuiPqBn59pt+vNx4yPvno7zmxsfI7jv0ww\n"
+ "yfs+0FNm7FwdsC1k47GBSOaGw38kuIVWqXSAbL4EX9GkryGGOKGNh0qvAENCdRSB\n"
+ "G9Z6tyMbmfRY+dLSh3a9JwoEcBUso6EWYBakLbq4nG/nvYdYvG9ehrnLVwZFL82e\n"
+ "l3Q/RK95bnA6cuRClGusLad0e6bjkBzx/VQ3VarDEpAkTLUGVAa0CLXtnyc=\n"
+ "-----END CERTIFICATE-----\n").getBytes();
/**
* CN=*.co.jp
*/
public static final byte[] X509_WILD_CO_JP = ("-----BEGIN CERTIFICATE-----\n"
+ "MIIERjCCAy6gAwIBAgIJAIz+EYMBU6aVMA0GCSqGSIb3DQEBBQUAMIGiMQswCQYD\n"
+ "VQQGEwJDQTELMAkGA1UECBMCQkMxEjAQBgNVBAcTCVZhbmNvdXZlcjEWMBQGA1UE\n"
+ "ChMNd3d3LmN1Y2JjLmNvbTEUMBIGA1UECxQLY29tbW9uc19zc2wxHTAbBgNVBAMU\n"
+ "FGRlbW9faW50ZXJtZWRpYXRlX2NhMSUwIwYJKoZIhvcNAQkBFhZqdWxpdXNkYXZp\n"
+ "ZXNAZ21haWwuY29tMB4XDTA2MTIxMTE2MTYzMFoXDTI4MTEwNTE2MTYzMFowgaQx\n"
+ "CzAJBgNVBAYTAlVTMREwDwYDVQQIEwhNYXJ5bGFuZDEUMBIGA1UEBxMLRm9yZXN0\n"
+ "IEhpbGwxFzAVBgNVBAoTDmh0dHBjb21wb25lbnRzMRowGAYDVQQLExF0ZXN0IGNl\n"
+ "cnRpZmljYXRlczEQMA4GA1UEAxQHKi5jby5qcDElMCMGCSqGSIb3DQEJARYWanVs\n"
+ "aXVzZGF2aWVzQGdtYWlsLmNvbTCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoC\n"
+ "ggEBAMhjr5aCPoyp0R1iroWAfnEyBMGYWoCidH96yGPFjYLowez5aYKY1IOKTY2B\n"
+ "lYho4O84X244QrZTRl8kQbYtxnGh4gSCD+Z8gjZ/gMvLUlhqOb+WXPAUHMB39GRy\n"
+ "zerA/ZtrlUqf+lKo0uWcocxeRc771KN8cPH3nHZ0rV0Hx4ZAZy6U4xxObe4rtSVY\n"
+ "07hNKXAb2odnVqgzcYiDkLV8ilvEmoNWMWrp8UBqkTcpEhYhCYp3cTkgJwMSuqv8\n"
+ "BqnGd87xQU3FVZI4tbtkB+KzjD9zz8QCDJAfDjZHR03KNQ5mxOgXwxwKw6lGMaiV\n"
+ "JTxpTKqym93whYk93l3ocEe55c0CAwEAAaN7MHkwCQYDVR0TBAIwADAsBglghkgB\n"
+ "hvhCAQ0EHxYdT3BlblNTTCBHZW5lcmF0ZWQgQ2VydGlmaWNhdGUwHQYDVR0OBBYE\n"
+ "FJ8Ud78/OrbKOIJCSBYs2tDLXofYMB8GA1UdIwQYMBaAFHua2o+QmU5S0qzbswNS\n"
+ "yoemDT4NMA0GCSqGSIb3DQEBBQUAA4IBAQA0sWglVlMx2zNGvUqFC73XtREwii53\n"
+ "CfMM6mtf2+f3k/d8KXhLNySrg8RRlN11zgmpPaLtbdTLrmG4UdAHHYr8O4y2BBmE\n"
+ "1cxNfGxxechgF8HX10QV4dkyzp6Z1cfwvCeMrT5G/V1pejago0ayXx+GPLbWlNeZ\n"
+ "S+Kl0m3p+QplXujtwG5fYcIpaGpiYraBLx3Tadih39QN65CnAh/zRDhLCUzKyt9l\n"
+ "UGPLEUDzRHMPHLnSqT1n5UU5UDRytbjJPXzF+l/+WZIsanefWLsxnkgAuZe/oMMF\n"
+ "EJMryEzOjg4Tfuc5qM0EXoPcQ/JlheaxZ40p2IyHqbsWV4MRYuFH4bkM\n"
+ "-----END CERTIFICATE-----\n").getBytes();
/**
* CN=*.foo.com, subjectAlt=*.bar.com, subjectAlt=*.花子.co.jp
* (*.hanako.co.jp in kanji)
*/
public static final byte[] X509_WILD_FOO_BAR_HANAKO = ("-----BEGIN CERTIFICATE-----\n"
+ "MIIEcDCCA1igAwIBAgIJAIz+EYMBU6aWMA0GCSqGSIb3DQEBBQUAMIGiMQswCQYD\n"
+ "VQQGEwJDQTELMAkGA1UECBMCQkMxEjAQBgNVBAcTCVZhbmNvdXZlcjEWMBQGA1UE\n"
+ "ChMNd3d3LmN1Y2JjLmNvbTEUMBIGA1UECxQLY29tbW9uc19zc2wxHTAbBgNVBAMU\n"
+ "FGRlbW9faW50ZXJtZWRpYXRlX2NhMSUwIwYJKoZIhvcNAQkBFhZqdWxpdXNkYXZp\n"
+ "ZXNAZ21haWwuY29tMB4XDTA2MTIxMTE2MTczMVoXDTI4MTEwNTE2MTczMVowgaYx\n"
+ "CzAJBgNVBAYTAlVTMREwDwYDVQQIEwhNYXJ5bGFuZDEUMBIGA1UEBxMLRm9yZXN0\n"
+ "IEhpbGwxFzAVBgNVBAoTDmh0dHBjb21wb25lbnRzMRowGAYDVQQLExF0ZXN0IGNl\n"
+ "cnRpZmljYXRlczESMBAGA1UEAxQJKi5mb28uY29tMSUwIwYJKoZIhvcNAQkBFhZq\n"
+ "dWxpdXNkYXZpZXNAZ21haWwuY29tMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIB\n"
+ "CgKCAQEAyGOvloI+jKnRHWKuhYB+cTIEwZhagKJ0f3rIY8WNgujB7PlpgpjUg4pN\n"
+ "jYGViGjg7zhfbjhCtlNGXyRBti3GcaHiBIIP5nyCNn+Ay8tSWGo5v5Zc8BQcwHf0\n"
+ "ZHLN6sD9m2uVSp/6UqjS5ZyhzF5FzvvUo3xw8fecdnStXQfHhkBnLpTjHE5t7iu1\n"
+ "JVjTuE0pcBvah2dWqDNxiIOQtXyKW8Sag1YxaunxQGqRNykSFiEJindxOSAnAxK6\n"
+ "q/wGqcZ3zvFBTcVVkji1u2QH4rOMP3PPxAIMkB8ONkdHTco1DmbE6BfDHArDqUYx\n"
+ "qJUlPGlMqrKb3fCFiT3eXehwR7nlzQIDAQABo4GiMIGfMAkGA1UdEwQCMAAwLAYJ\n"
+ "YIZIAYb4QgENBB8WHU9wZW5TU0wgR2VuZXJhdGVkIENlcnRpZmljYXRlMB0GA1Ud\n"
+ "DgQWBBSfFHe/Pzq2yjiCQkgWLNrQy16H2DAfBgNVHSMEGDAWgBR7mtqPkJlOUtKs\n"
+ "27MDUsqHpg0+DTAkBgNVHREEHTAbggkqLmJhci5jb22CDiou6Iqx5a2QLmNvLmpw\n"
+ "MA0GCSqGSIb3DQEBBQUAA4IBAQBobWC+D5/lx6YhX64CwZ26XLjxaE0S415ajbBq\n"
+ "DK7lz+Rg7zOE3GsTAMi+ldUYnhyz0wDiXB8UwKXl0SDToB2Z4GOgqQjAqoMmrP0u\n"
+ "WB6Y6dpkfd1qDRUzI120zPYgSdsXjHW9q2H77iV238hqIU7qCvEz+lfqqWEY504z\n"
+ "hYNlknbUnR525ItosEVwXFBJTkZ3Yw8gg02c19yi8TAh5Li3Ad8XQmmSJMWBV4XK\n"
+ "qFr0AIZKBlg6NZZFf/0dP9zcKhzSriW27bY0XfzA6GSiRDXrDjgXq6baRT6YwgIg\n"
+ "pgJsDbJtZfHnV1nd3M6zOtQPm1TIQpNmMMMd/DPrGcUQerD3\n" + "-----END CERTIFICATE-----\n")
.getBytes();
/**
* CN=foo.com, CN=bar.com, CN=花子.co.jp
*/
public static final byte[] X509_THREE_CNS_FOO_BAR_HANAKO = ("-----BEGIN CERTIFICATE-----\n"
+ "MIIEbzCCA1egAwIBAgIJAIz+EYMBU6aXMA0GCSqGSIb3DQEBBQUAMIGiMQswCQYD\n"
+ "VQQGEwJDQTELMAkGA1UECBMCQkMxEjAQBgNVBAcTCVZhbmNvdXZlcjEWMBQGA1UE\n"
+ "ChMNd3d3LmN1Y2JjLmNvbTEUMBIGA1UECxQLY29tbW9uc19zc2wxHTAbBgNVBAMU\n"
+ "FGRlbW9faW50ZXJtZWRpYXRlX2NhMSUwIwYJKoZIhvcNAQkBFhZqdWxpdXNkYXZp\n"
+ "ZXNAZ21haWwuY29tMB4XDTA2MTIxMTE2MTk0NVoXDTI4MTEwNTE2MTk0NVowgc0x\n"
+ "CzAJBgNVBAYTAlVTMREwDwYDVQQIDAhNYXJ5bGFuZDEUMBIGA1UEBwwLRm9yZXN0\n"
+ "IEhpbGwxFzAVBgNVBAoMDmh0dHBjb21wb25lbnRzMRowGAYDVQQLDBF0ZXN0IGNl\n"
+ "cnRpZmljYXRlczEQMA4GA1UEAwwHZm9vLmNvbTEQMA4GA1UEAwwHYmFyLmNvbTEV\n"
+ "MBMGA1UEAwwM6Iqx5a2QLmNvLmpwMSUwIwYJKoZIhvcNAQkBFhZqdWxpdXNkYXZp\n"
+ "ZXNAZ21haWwuY29tMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAyGOv\n"
+ "loI+jKnRHWKuhYB+cTIEwZhagKJ0f3rIY8WNgujB7PlpgpjUg4pNjYGViGjg7zhf\n"
+ "bjhCtlNGXyRBti3GcaHiBIIP5nyCNn+Ay8tSWGo5v5Zc8BQcwHf0ZHLN6sD9m2uV\n"
+ "Sp/6UqjS5ZyhzF5FzvvUo3xw8fecdnStXQfHhkBnLpTjHE5t7iu1JVjTuE0pcBva\n"
+ "h2dWqDNxiIOQtXyKW8Sag1YxaunxQGqRNykSFiEJindxOSAnAxK6q/wGqcZ3zvFB\n"
+ "TcVVkji1u2QH4rOMP3PPxAIMkB8ONkdHTco1DmbE6BfDHArDqUYxqJUlPGlMqrKb\n"
+ "3fCFiT3eXehwR7nlzQIDAQABo3sweTAJBgNVHRMEAjAAMCwGCWCGSAGG+EIBDQQf\n"
+ "Fh1PcGVuU1NMIEdlbmVyYXRlZCBDZXJ0aWZpY2F0ZTAdBgNVHQ4EFgQUnxR3vz86\n"
+ "tso4gkJIFiza0Mteh9gwHwYDVR0jBBgwFoAUe5raj5CZTlLSrNuzA1LKh6YNPg0w\n"
+ "DQYJKoZIhvcNAQEFBQADggEBAGuZb8ai1NO2j4v3y9TLZvd5s0vh5/TE7n7RX+8U\n"
+ "y37OL5k7x9nt0mM1TyAKxlCcY+9h6frue8MemZIILSIvMrtzccqNz0V1WKgA+Orf\n"
+ "uUrabmn+CxHF5gpy6g1Qs2IjVYWA5f7FROn/J+Ad8gJYc1azOWCLQqSyfpNRLSvY\n"
+ "EriQFEV63XvkJ8JrG62b+2OT2lqT4OO07gSPetppdlSa8NBSKP6Aro9RIX1ZjUZQ\n"
+ "SpQFCfo02NO0uNRDPUdJx2huycdNb+AXHaO7eXevDLJ+QnqImIzxWiY6zLOdzjjI\n"
+ "VBMkLHmnP7SjGSQ3XA4ByrQOxfOUTyLyE7NuemhHppuQPxE=\n" + "-----END CERTIFICATE-----\n")
.getBytes();
/**
* subjectAlt=foo.com
*/
public static final byte[] X509_NO_CNS_FOO = ("-----BEGIN CERTIFICATE-----\n"
+ "MIIESjCCAzKgAwIBAgIJAIz+EYMBU6aYMA0GCSqGSIb3DQEBBQUAMIGiMQswCQYD\n"
+ "VQQGEwJDQTELMAkGA1UECBMCQkMxEjAQBgNVBAcTCVZhbmNvdXZlcjEWMBQGA1UE\n"
+ "ChMNd3d3LmN1Y2JjLmNvbTEUMBIGA1UECxQLY29tbW9uc19zc2wxHTAbBgNVBAMU\n"
+ "FGRlbW9faW50ZXJtZWRpYXRlX2NhMSUwIwYJKoZIhvcNAQkBFhZqdWxpdXNkYXZp\n"
+ "ZXNAZ21haWwuY29tMB4XDTA2MTIxMTE2MjYxMFoXDTI4MTEwNTE2MjYxMFowgZIx\n"
+ "CzAJBgNVBAYTAlVTMREwDwYDVQQIDAhNYXJ5bGFuZDEUMBIGA1UEBwwLRm9yZXN0\n"
+ "IEhpbGwxFzAVBgNVBAoMDmh0dHBjb21wb25lbnRzMRowGAYDVQQLDBF0ZXN0IGNl\n"
+ "cnRpZmljYXRlczElMCMGCSqGSIb3DQEJARYWanVsaXVzZGF2aWVzQGdtYWlsLmNv\n"
+ "bTCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAMhjr5aCPoyp0R1iroWA\n"
+ "fnEyBMGYWoCidH96yGPFjYLowez5aYKY1IOKTY2BlYho4O84X244QrZTRl8kQbYt\n"
+ "xnGh4gSCD+Z8gjZ/gMvLUlhqOb+WXPAUHMB39GRyzerA/ZtrlUqf+lKo0uWcocxe\n"
+ "Rc771KN8cPH3nHZ0rV0Hx4ZAZy6U4xxObe4rtSVY07hNKXAb2odnVqgzcYiDkLV8\n"
+ "ilvEmoNWMWrp8UBqkTcpEhYhCYp3cTkgJwMSuqv8BqnGd87xQU3FVZI4tbtkB+Kz\n"
+ "jD9zz8QCDJAfDjZHR03KNQ5mxOgXwxwKw6lGMaiVJTxpTKqym93whYk93l3ocEe5\n"
+ "5c0CAwEAAaOBkDCBjTAJBgNVHRMEAjAAMCwGCWCGSAGG+EIBDQQfFh1PcGVuU1NM\n"
+ "IEdlbmVyYXRlZCBDZXJ0aWZpY2F0ZTAdBgNVHQ4EFgQUnxR3vz86tso4gkJIFiza\n"
+ "0Mteh9gwHwYDVR0jBBgwFoAUe5raj5CZTlLSrNuzA1LKh6YNPg0wEgYDVR0RBAsw\n"
+ "CYIHZm9vLmNvbTANBgkqhkiG9w0BAQUFAAOCAQEAjl78oMjzFdsMy6F1sGg/IkO8\n"
+ "tF5yUgPgFYrs41yzAca7IQu6G9qtFDJz/7ehh/9HoG+oqCCIHPuIOmS7Sd0wnkyJ\n"
+ "Y7Y04jVXIb3a6f6AgBkEFP1nOT0z6kjT7vkA5LJ2y3MiDcXuRNMSta5PYVnrX8aZ\n"
+ "yiqVUNi40peuZ2R8mAUSBvWgD7z2qWhF8YgDb7wWaFjg53I36vWKn90ZEti3wNCw\n"
+ "qAVqixM+J0qJmQStgAc53i2aTMvAQu3A3snvH/PHTBo+5UL72n9S1kZyNCsVf1Qo\n"
+ "n8jKTiRriEM+fMFlcgQP284EBFzYHyCXFb9O/hMjK2+6mY9euMB1U1aFFzM/Bg==\n"
+ "-----END CERTIFICATE-----\n").getBytes();
/**
* Intermediate CA for all of these.
*/
public static final byte[] X509_INTERMEDIATE_CA = ("-----BEGIN CERTIFICATE-----\n"
+ "MIIEnDCCA4SgAwIBAgIJAJTNwZ6yNa5cMA0GCSqGSIb3DQEBBQUAMIGGMQswCQYD\n"
+ "VQQGEwJDQTELMAkGA1UECBMCQkMxFjAUBgNVBAoTDXd3dy5jdWNiYy5jb20xFDAS\n"
+ "BgNVBAsUC2NvbW1vbnNfc3NsMRUwEwYDVQQDFAxkZW1vX3Jvb3RfY2ExJTAjBgkq\n"
+ "hkiG9w0BCQEWFmp1bGl1c2Rhdmllc0BnbWFpbC5jb20wHhcNMDYxMTA1MjE0OTMx\n"
+ "WhcNMDcxMTA1MjE0OTMxWjCBojELMAkGA1UEBhMCQ0ExCzAJBgNVBAgTAkJDMRIw\n"
+ "EAYDVQQHEwlWYW5jb3V2ZXIxFjAUBgNVBAoTDXd3dy5jdWNiYy5jb20xFDASBgNV\n"
+ "BAsUC2NvbW1vbnNfc3NsMR0wGwYDVQQDFBRkZW1vX2ludGVybWVkaWF0ZV9jYTEl\n"
+ "MCMGCSqGSIb3DQEJARYWanVsaXVzZGF2aWVzQGdtYWlsLmNvbTCCASIwDQYJKoZI\n"
+ "hvcNAQEBBQADggEPADCCAQoCggEBAL0S4y3vUO0EM6lwqOEfK8fvrUprIbsikXaG\n"
+ "XzejcZ+T3l2Dc7t8WtBfRf78i4JypMqJQSijrUicj3H6mOMIReKaXm6ls4hA5d8w\n"
+ "Lhmgiqsz/kW+gA8SeWGWRN683BD/RbQmzOls6ynBvap9jZlthXWBrSIlPCQoBLXY\n"
+ "KVaxGzbL4ezaq+XFMKMQSm2uKwVmHHQNbfmZlPsuendBVomb/ked53Ab9IH6dwwN\n"
+ "qJH9WIrvIzIVEXWlpvQ5MCqozM7u1akU+G8cazr8theGPCaYkzoXnigWua4OjdpV\n"
+ "9z5ZDknhfBzG1AjapdG07FIirwWWgIyZXqZSD96ikmLtwT29qnsCAwEAAaOB7jCB\n"
+ "6zAdBgNVHQ4EFgQUe5raj5CZTlLSrNuzA1LKh6YNPg0wgbsGA1UdIwSBszCBsIAU\n"
+ "rN8eFIvMiRFXXgDqKumS0/W2AhOhgYykgYkwgYYxCzAJBgNVBAYTAkNBMQswCQYD\n"
+ "VQQIEwJCQzEWMBQGA1UEChMNd3d3LmN1Y2JjLmNvbTEUMBIGA1UECxQLY29tbW9u\n"
+ "c19zc2wxFTATBgNVBAMUDGRlbW9fcm9vdF9jYTElMCMGCSqGSIb3DQEJARYWanVs\n"
+ "aXVzZGF2aWVzQGdtYWlsLmNvbYIJAJTNwZ6yNa5bMAwGA1UdEwQFMAMBAf8wDQYJ\n"
+ "KoZIhvcNAQEFBQADggEBAIB4KMZvHD20pdKajFtMBpL7X4W4soq6EeTtjml3NYa9\n"
+ "Qc52bsQEGNccKY9afYSBIndaQvFdtmz6HdoN+B8TjYShw2KhyjtKimGLpWYoi1YF\n"
+ "e4aHdmA/Gp5xk8pZzR18FmooxC9RqBux+NAM2iTFSLgDtGIIj4sg2rbn6Bb6ZlQT\n"
+ "1rg6VucXCA1629lNfMeNcu7CBNmUKIdaxHR/YJQallE0KfGRiOIWPrPj/VNk0YA6\n"
+ "XFg0ocjqXJ2/N0N9rWVshMUaXgOh7m4D/5zga5/nuxDU+PoToA6mQ4bV6eCYqZbh\n"
+ "aa1kQYtR9B4ZiG6pB82qVc2dCqStOH2FAEWos2gAVkQ=\n" + "-----END CERTIFICATE-----\n")
.getBytes();
/**
* Root CA for all of these.
*/
public static final byte[] X509_ROOT_CA = ("-----BEGIN CERTIFICATE-----\n"
+ "MIIEgDCCA2igAwIBAgIJAJTNwZ6yNa5bMA0GCSqGSIb3DQEBBQUAMIGGMQswCQYD\n"
+ "VQQGEwJDQTELMAkGA1UECBMCQkMxFjAUBgNVBAoTDXd3dy5jdWNiYy5jb20xFDAS\n"
+ "BgNVBAsUC2NvbW1vbnNfc3NsMRUwEwYDVQQDFAxkZW1vX3Jvb3RfY2ExJTAjBgkq\n"
+ "hkiG9w0BCQEWFmp1bGl1c2Rhdmllc0BnbWFpbC5jb20wHhcNMDYxMTA1MjEzNjQz\n"
+ "WhcNMjYxMTA1MjEzNjQzWjCBhjELMAkGA1UEBhMCQ0ExCzAJBgNVBAgTAkJDMRYw\n"
+ "FAYDVQQKEw13d3cuY3VjYmMuY29tMRQwEgYDVQQLFAtjb21tb25zX3NzbDEVMBMG\n"
+ "A1UEAxQMZGVtb19yb290X2NhMSUwIwYJKoZIhvcNAQkBFhZqdWxpdXNkYXZpZXNA\n"
+ "Z21haWwuY29tMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAv+OnocmJ\n"
+ "79UeO2hlCwK+Cle5uZWnU6uwJl+08z5cvebb5tT64WL9+psDbfgUH/Gm9JsuxKTg\n"
+ "w1tZO/4duIgnaLNSx4HoqaTjwigd/hR3TsoGEPXTCkz1ikgTCOEDvl+iMid6aOrd\n"
+ "mViE8HhscxKZ+h5FE7oHZyuT6gFoiaIXhFq+xK2w4ZwDz9L+paiwqywyUJJMnh9U\n"
+ "jKorY+nua81N0oxpIhHPspCanDU4neMzCzYOZyLR/LqV5xORvHcFY84GWMz5hI25\n"
+ "JbgaWJsYKuCAvNsnQwVoqKPGa7x1fn7x6oGsXJaCVt8weUwIj2xwg1lxMhrNaisH\n"
+ "EvKpEAEnGGwWKQIDAQABo4HuMIHrMB0GA1UdDgQWBBSs3x4Ui8yJEVdeAOoq6ZLT\n"
+ "9bYCEzCBuwYDVR0jBIGzMIGwgBSs3x4Ui8yJEVdeAOoq6ZLT9bYCE6GBjKSBiTCB\n"
+ "hjELMAkGA1UEBhMCQ0ExCzAJBgNVBAgTAkJDMRYwFAYDVQQKEw13d3cuY3VjYmMu\n"
+ "Y29tMRQwEgYDVQQLFAtjb21tb25zX3NzbDEVMBMGA1UEAxQMZGVtb19yb290X2Nh\n"
+ "MSUwIwYJKoZIhvcNAQkBFhZqdWxpdXNkYXZpZXNAZ21haWwuY29tggkAlM3BnrI1\n"
+ "rlswDAYDVR0TBAUwAwEB/zANBgkqhkiG9w0BAQUFAAOCAQEAlPl3/8h1LttR1svC\n"
+ "S8RXbHpAWIT2BEDhGHUNjSmgDQNkE/itf/FCEXh0tlU4bYdtBSOHzflbnzOyIPId\n"
+ "VZeSWs33V38xDFy6KoVg1gT8JxkLmE5S1vWkpsHIlpw/U6r7KD0Kx9FYx5AiXjw0\n"
+ "lzz/zlVNuO2U09KIDwDPVG1mBzQiMiSWj1U1pM4KxINkWQwDy/fvu/I983s8lW5z\n"
+ "hf2WuFNzQN3fcMK5dpBE9NVIu27oYuGYh2sak34v+7T700W2ooBB71qFXtm9P5rl\n"
+ "Yp9RCEsg3KEEPNTtCBs8fROeXvLDrP0cmBIqwGYDuRNCxFDTOdjv6YGdA8nLOjaH\n" + "2dDk0g==\n"
+ "-----END CERTIFICATE-----\n").getBytes();
/**
* Below is the private key for all the server certificates above (but
* not the intermediate CA or the root CA). All of those server certs
* came from the same private key.
*/
public static final String RSA_PUBLIC_MODULUS =
"00c863af96823e8ca9d11d62ae85807e713204c1985a80a2747f7ac863c5"
+ "8d82e8c1ecf9698298d4838a4d8d81958868e0ef385f6e3842b653465f24"
+ "41b62dc671a1e204820fe67c82367f80cbcb52586a39bf965cf0141cc077"
+ "f46472cdeac0fd9b6b954a9ffa52a8d2e59ca1cc5e45cefbd4a37c70f1f7"
+ "9c7674ad5d07c78640672e94e31c4e6dee2bb52558d3b84d29701bda8767"
+ "56a83371888390b57c8a5bc49a8356316ae9f1406a913729121621098a77"
+ "713920270312baabfc06a9c677cef1414dc5559238b5bb6407e2b38c3f73"
+ "cfc4020c901f0e3647474dca350e66c4e817c31c0ac3a94631a895253c69"
+ "4caab29bddf085893dde5de87047b9e5cd";
public static final String RSA_PUBLIC_EXPONENT = "65537";
public static final String RSA_PRIVATE_EXPONENT =
"577abd3295553d0efd4d38c13b62a6d03fa7b7e40cce4f1d5071877d96c6"
+ "7a39a63f0f7ab21a89db8acae45587b3ef251309a70f74dc1ac02bde68f3"
+ "8ed658e54e685ed370a18c054449512ea66a2252ed36e82b565b5159ec83"
+ "f23df40ae189550a183865b25fd77789e960f0d8cedcd72f32d7a66edb4b"
+ "a0a2baf3fbeb6c7d75f56ef0af9a7cff1c8c7f297d72eae7982164e50a89"
+ "d450698cf598d39343201094241d2d180a95882a7111e58f4a5bdbc5c125"
+ "a967dd6ed9ec614c5853e88e4c71e8b682a7cf89cb1d82b6fe78cc865084"
+ "c8c5dfbb50c939df2b839c977b0245bfa3615e0592b527b1013d5b675ecb"
+ "44e6b355c1df581f50997175166eef39";
public static final String RSA_PRIME1 =
"00fe759c4f0ce8b763880215e82767e7a937297668f4e4b1e119c6b22a3c"
+ "a2c7b06c547d88d0aa45f645d7d3aeadaf7f8bc594deae0978529592977c"
+ "b1ff890f05033a9e9e15551cad9fbf9c41d12139ccd99c1c3ac7b2197eff"
+ "350d236bb900c1440953b64956e0a058ef824a2e16894af175177c77dbe1" + "fef7d8b532608d2513";
public static final String RSA_PRIME2 =
"00c99a45878737a4cf73f9896680b75487f1b669b7686a6ba07103856f31"
+ "db668c2c440c44cdd116f708f631c37a9adf119f5b5cb58ffe3dc62e20af"
+ "af72693d936dc6bb3c5194996468389c1f094079b81522e94572b4ad7d39"
+ "529178e9b8ebaeb1f0fdd83b8731c5223f1dea125341d1d64917f6b1a6ae" + "c18d320510d79f859f";
public static final String RSA_EXPONENT1 =
"029febf0d4cd41b7011c2465b4a259bd6118486464c247236f44a169d61e"
+ "47b9062508f674508d5031003ceabc57e714e600d71b2c75d5443db2da52"
+ "6bb45a374f0537c5a1aab3150764ce93cf386c84346a6bd01f6732e42075"
+ "c7a0e9e78a9e73b934e7d871d0f75673820089e129a1604438edcbbeb4e2" + "106467da112ce389";
public static final String RSA_EXPONENT2 =
"00827e76650c946afcd170038d32e1f8386ab00d6be78d830efe382e45d4"
+ "7ad4bd04e6231ee22e66740efbf52838134932c9f8c460cdccdec58a1424"
+ "4427859192fd6ab6c58b74e97941b0eaf577f2a11713af5e5952af3ae124"
+ "9a9a892e98410dfa2628d9af668a43b5302fb7d496c9b2fec69f595292b6" + "e997f079b0f6314eb7";
public static final String RSA_COEFFICIENT =
"00e6b62add350f1a2a8968903ff76c31cf703b0d7326c4a620aef01225b7"
+ "1640b3f2ec375208c5f7299863f6005b7799b6e529bb1133c8435bf5fdb5"
+ "a786f6cd8a19ee7094a384e6557c600a38845a0960ddbfd1df18d0af5740"
+ "001853788f1b5ccbf9affb4c52c9d2efdb8aab0183d86735b32737fb4e79" + "2b8a9c7d91c7d175ae";
/**
* subjectAlt=IP Address:127.0.0.1, email:oleg@ural.ru, DNS:localhost.localdomain
*/
public static final byte[] X509_MULTIPLE_SUBJECT_ALT = ("-----BEGIN CERTIFICATE-----\n"
+ "MIIDcTCCAtqgAwIBAgIBATANBgkqhkiG9w0BAQUFADBAMQswCQYDVQQGEwJDSDEL\n"
+ "MAkGA1UECBMCWkgxDzANBgNVBAcTBlp1cmljaDETMBEGA1UEAxMKTXkgVGVzdCBD\n"
+ "QTAeFw0wODEwMzExMTU3NDVaFw0wOTEwMzExMTU3NDVaMGkxCzAJBgNVBAYTAkNI\n"
+ "MRAwDgYDVQQIEwdVbmtub3duMRAwDgYDVQQHEwdVbmtub3duMRAwDgYDVQQKEwdV\n"
+ "bmtub3duMRAwDgYDVQQLEwdVbmtub3duMRIwEAYDVQQDEwlsb2NhbGhvc3QwggG4\n"
+ "MIIBLAYHKoZIzjgEATCCAR8CgYEA/X9TgR11EilS30qcLuzk5/YRt1I870QAwx4/\n"
+ "gLZRJmlFXUAiUftZPY1Y+r/F9bow9subVWzXgTuAHTRv8mZgt2uZUKWkn5/oBHsQ\n"
+ "IsJPu6nX/rfGG/g7V+fGqKYVDwT7g/bTxR7DAjVUE1oWkTL2dfOuK2HXKu/yIgMZ\n"
+ "ndFIAccCFQCXYFCPFSMLzLKSuYKi64QL8Fgc9QKBgQD34aCF1ps93su8q1w2uFe5\n"
+ "eZSvu/o66oL5V0wLPQeCZ1FZV4661FlP5nEHEIGAtEkWcSPoTCgWE7fPCTKMyKbh\n"
+ "PBZ6i1R8jSjgo64eK7OmdZFuo38L+iE1YvH7YnoBJDvMpPG+qFGQiaiD3+Fa5Z8G\n"
+ "kotmXoB7VSVkAUw7/s9JKgOBhQACgYEA6ogAb/YLM1Rz9AoXKW4LA70VtFf7Mqqp\n"
+ "divdu9f72WQc1vMKo1YMf3dQadkMfBYRvAAa1IXDnoiFCHhXnVRkWkoUBJyNebLB\n"
+ "N92CZc0RVFZiMFgQMEh8UldnvAIi4cBk0/YuN3BGl4MzmquVIGrFovdWGqeaveOu\n"
+ "Xcu4lKGJNiqjODA2MDQGA1UdEQQtMCuHBH8AAAGBDG9sZWdAdXJhbC5ydYIVbG9j\n"
+ "YWxob3N0LmxvY2FsZG9tYWluMA0GCSqGSIb3DQEBBQUAA4GBAIgEwIoCSRkU3O7K\n"
+ "USYaOYyfJB9hsvs6YpClvYXiQ/5kPGARP60pM62v4wC7wI9shEizokIAxY2+O3cC\n"
+ "vwuJhNYaa2FJMELIwRN3XES8X8R6JHWbPaRjaAAPhczuEd8SZYy8yiVLmJTgw0gH\n"
+ "BSW775NHlkjsscFVgXkNf0PobqJ9\n" + "-----END CERTIFICATE-----").getBytes();
/**
* subject CN=repository.infonotary.com (Multiple AVA in RDN).
*/
public static final byte[] X509_MULTIPLE_VALUE_AVA = ("-----BEGIN CERTIFICATE-----\n"
+ "MIIFxzCCBK+gAwIBAgIIRO/2+/XA7z4wDQYJKoZIhvcNAQEFBQAwgZwxgZkwCQYD\n"
+ "VQQGDAJCRzAVBgNVBAoMDkluZm9Ob3RhcnkgUExDMBcGCgmSJomT8ixkARkWCWRv\n"
+ "bWFpbi1jYTAtBgNVBAMMJmktTm90YXJ5IFRydXN0UGF0aCBWYWxpZGF0ZWQgRG9t\n"
+ "YWluIENBMC0GA1UECwwmaS1Ob3RhcnkgVHJ1c3RQYXRoIFZhbGlkYXRlZCBEb21h\n"
+ "aW4gQ0EwHhcNMTIwNjE4MDg1MzIyWhcNMTMwNjE4MDg1MzIyWjCBxjGBwzAJBgNV\n"
+ "BAYTAkJHMBUGA1UEChMOSW5mb05vdGFyeSBQTEMwFwYDVQQLExBGaWxlcyBSZXBv\n"
+ "c2l0b3J5MBcGCgmSJomT8ixkARkWCWRvbWFpbi1jYTAgBgNVBAMTGXJlcG9zaXRv\n"
+ "cnkuaW5mb25vdGFyeS5jb20wIwYJKoZIhvcNAQkBFhZzdXBwb3J0QGluZm9ub3Rh\n"
+ "cnkuY29tMCYGCSqGSIb3DQEJAhMZcmVwb3NpdG9yeS5pbmZvbm90YXJ5LmNvbTCC\n"
+ "ASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBALKWjGpgsuz103xVEW/GSg5I\n"
+ "tBoLbXPxockabOTHnOh0VO2sImycyhBH78nMj+VMexn4y+kdCOuJqAA5LApxyhTA\n"
+ "KgKlRN7TfoC90IYHjB1dqLMIseg4YM7Oe0e4Z2nL50bHoqXg7OUHaILUQn7ufpYp\n"
+ "+VCWxyI43KvaR4+HnST3x47wqeArg/rULGV1a16X+46cxq2eoMAcDfostXHaemvz\n"
+ "vg/Wd5xcWfPbF/oY1/sBXH+AK+peVBMen82+3GtAWtNWbyPE3bT4RG+WgKUyfLZ1\n"
+ "7A67rX9DkUEVMPQpa50MpLnrRveiM9w6R3mrMHMHbNnwID0Tqfds5zzOi/7cLD0C\n"
+ "AwEAAaOCAd8wggHbMA4GA1UdDwEB/wQEAwIDuDATBgNVHSUEDDAKBggrBgEFBQcD\n"
+ "ATBEBggrBgEFBQcBAQQ4MDYwNAYIKwYBBQUHMAGGKGh0dHA6Ly9vY3NwLmluZm9u\n"
+ "b3RhcnkuY29tL3Jlc3BvbmRlci5jZ2kwgZAGA1UdIASBiDCBhTCBggYMKwYBBAGB\n"
+ "rQABAgMBMHIwOAYIKwYBBQUHAgEWLGh0dHA6Ly9yZXBvc2l0b3J5LmluZm9ub3Rh\n"
+ "cnkuY29tL2RvbWFpbi5odG1sMDYGCCsGAQUFBwICMCoaKGktTm90YXJ5IFZhbGlk\n"
+ "YXRlZCBEb21haW4gQ2VydGlmaWNhdGUgQ1AwgYkGA1UdHwSBgTB/MDWgL6Athito\n"
+ "dHRwOi8vY3JsLmluZm9ub3RhcnkuY29tL2NybC9kb21haW4tY2EuY3JsgQIBVjBG\n"
+ "oECgPoY8bGRhcDovL2xkYXAuaW5mb25vdGFyeS5jb20vZGM9ZG9tYWluLWNhLGRj\n"
+ "PWluZm9ub3RhcnksZGM9Y29tgQIBVjAPBgNVHRMBAf8EBTADAQEAMB0GA1UdDgQW\n"
+ "BBTImKJZrgV/8n7mHrA0U5EeGsBvbzAfBgNVHSMEGDAWgBTbkorEK+bPdVPpvyVI\n"
+ "PTxGFnuOoDANBgkqhkiG9w0BAQUFAAOCAQEAhsMbqsqvkbfVaKZ+wDY9rX3EtuDS\n"
+ "isdAo4AjmWgTtj/aBGiEiXcIGP312x+0JF+mEEQ75ZOKN+WsM8eLB0F4aqylklk7\n"
+ "6yRYauRXp8dfbXrT3ozxekt0cpSMqbzze456krI12nL+C00V2Iwq96k5J/yZboNW\n"
+ "Q+ibCaEAHNiL4tGVHSHm6znkWvIuUTbDgDEsm5RdafO27suz5H6zMnV+VE6onN1J\n"
+ "I1mQmUs44cg2HZAqnFBpDyJQhNYy8M7yGVaRkbfuVaMqiPa+xDPR5v7NFB3kxRq2\n"
+ "Za2Snopi52eUxDEhJ0MNqFi3Jfj/ZSmJ+XHra5lU4R8lijCAq8SVLZCmIQ==\n"
+ "-----END CERTIFICATE-----").getBytes();
public static final byte[] S_GOOGLE_COM = ("-----BEGIN CERTIFICATE-----\n"
+ "MIICpzCCAY+gAwIBAgIBATANBgkqhkiG9w0BAQUFADAXMRUwEwYDVQQDDAwqLmdv\n"
+ "b2dsZS5jb20wHhcNMTcwMTEzMjI0OTAzWhcNMTgwMTEzMjI0OTAzWjAXMRUwEwYD\n"
+ "VQQDDAwqLmdvb2dsZS5jb20wggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIB\n"
+ "AQDHuzznuHdJ5PH344xCyGYnUnIRhyLGBKN3WDLLrXWtr/5Sf3Q1qkiMiJ4BINsh\n"
+ "3Xy0z7VvHmMFlntgHXtkofBUPvTihxsVIypRkCZb5hpsWLotR10AW2JpVl/oxLP2\n"
+ "227/36X1zKh33fjImLJl9KzGWHLsbCBleQQJOn7YRsNR/QBZO0XGGkN/R2rRfLF3\n"
+ "rseRfI5gJjZkO0WDxocnf/iieOe0XNR0NAZaY1aozzPmZ/pRrOKYB8OFH7F73WOC\n"
+ "lPIUGai/byJ9SpbXdLUcMlGhml/4XzcnV/WVRD2P/mlY+xEFG3UEy3ufhNnKFJul\n"
+ "yjZrOaKbagamqtOyktzkjnerAgMBAAEwDQYJKoZIhvcNAQEFBQADggEBADaMcwVs\n"
+ "w5kbnoDJzMBJ01H16T4u8k78i/ybwz7u7krgkU0tABXCRj7S/4Dt3jqQ/rV6evj4\n"
+ "gIJ/2kZUp/PHKkV7CxWI48XBTAQUu9LEpxj0Hut3AtNMD9y/J6cFn2978tWsHFHI\n"
+ "mYgvclKUDE4WFMvuxfQVuX3RcGQ5i8khEMczY/KVhZYDcLU1PU0GTTJqqrQm59Z4\n"
+ "T4UyI3OPBR7Nb/kaU1fcgQ083uxRXcNYRMMZnU6c2oFnR+c6pO6aGoXo0C6rgC4R\n"
+ "pOj4hPvHCfZO2xg6HAdQ7UPALLX8pu5KGot7GRc8yiJ/Q1nBEuiPKKu0MIwQoFgP\n"
+ "WUux/APTsgLR7Vc=\n" + "-----END CERTIFICATE-----").getBytes();
public static final byte[] IP_1_1_1_1 = ("-----BEGIN CERTIFICATE-----\n"
+ "MIICwjCCAaqgAwIBAgIBATANBgkqhkiG9w0BAQUFADAaMRgwFgYDVQQDEw9kdW1t\n"
+ "eS12YWx1ZS5jb20wHhcNMTcwMTEzMjI1MTQ2WhcNMTgwMTEzMjI1MTQ2WjAaMRgw\n"
+ "FgYDVQQDEw9kdW1teS12YWx1ZS5jb20wggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAw\n"
+ "ggEKAoIBAQDfrapp3jHLp1RlElzpR/4sF9AcTYwMF1N+adkHRoVtmTlJV2lTIAjn\n"
+ "QLauy0Kkzv8uxmbID3uROgrFNDQ5RxTTCe+kW/vE6Pyzr5Z5ayjSTKeycTE7mAC4\n"
+ "6ntoCeEWiD593zlfqVo5PuRSp9Kusd+kexNVjC/BETDPa3yXctcH1ouW9GyGItgQ\n"
+ "u4GhCE8cipKMuTltgfK+Gh/5e9lFG9/F2fD+wHUVBULLR3JOQoqwgk2zAwKDwLuS\n"
+ "sEd1CBi35+W3apCKN0SEdTKIAxc/R+O/1j2hpOl9yXCCYyveGwJdFXVZtDcx+9/H\n"
+ "7NXhOdmw/mTXC5fOQGKciEo2SXt8Wp89AgMBAAGjEzARMA8GA1UdEQQIMAaHBAEB\n"
+ "AQEwDQYJKoZIhvcNAQEFBQADggEBAEAO6CE8twpcfdjk9oMjI5nX9GdC5Wt6+ujd\n"
+ "tLj0SbXvMKzCLLkveT0xTEzXfyEo8KW2qYYvPP1h83BIxsbR/J3Swt35UQVofv+4\n"
+ "JgO0FIdgB+iLEcjUh5+60xslylqWE+9bSWm4f06OXuv78tq5NYPZKku/3i4tqLRp\n"
+ "gH2rTtjX7Q4olSS7GdAgfiA2AnDZAbMtxtsnTt/QFpYQqhlkqHVDwgkGP7C8aMBD\n"
+ "RH0UIQCPxUkhwhtNmVyHO42r6oHXselZoVU6XRHuhogrGxPf/pzDUvrKBiJhsZQQ\n"
+ "oEu+pZCwkFLiNwUoq1G2oDpkkdBWB0JcBXB2Txa536ezFFWZYc0=\n" + "-----END CERTIFICATE-----")
.getBytes();
public static final byte[] EMAIL_ALT_SUBJECT_NAME = ("-----BEGIN CERTIFICATE-----\n"
+ "MIIDpTCCAo2gAwIBAgIJANqkMEtlkelbMA0GCSqGSIb3DQEBCwUAMHAxCzAJBgNV\n"
+ "BAYTAlVTMQswCQYDVQQIDAJWQTERMA8GA1UEBwwIU29tZUNpdHkxEjAQBgNVBAoM\n"
+ "CU15Q29tcGFueTETMBEGA1UECwwKTXlEaXZpc2lvbjEYMBYGA1UEAwwPd3d3LmNv\n"
+ "bXBhbnkuY29tMB4XDTE4MDIxNTA3MjkzMFoXDTIwMDIxNTA3MjkzMFowcDELMAkG\n"
+ "A1UEBhMCVVMxCzAJBgNVBAgMAlZBMREwDwYDVQQHDAhTb21lQ2l0eTESMBAGA1UE\n"
+ "CgwJTXlDb21wYW55MRMwEQYDVQQLDApNeURpdmlzaW9uMRgwFgYDVQQDDA93d3cu\n"
+ "Y29tcGFueS5jb20wggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQC4v6Oq\n"
+ "Ua0goRVn1cmT7MOpJhXFm3A70bTpvJIRpEjtGIz99hb34/9r5AYyf1VhKyWmBq24\n"
+ "XNcOJ59XOlyjjbm2Tl811ufTOdcNbPadoVBmMt4039OSUFpVb4wAw2XPWLTCG2h1\n"
+ "HNj9GuFHmwcDsg5EiIRrhDGQm2LLLAGoe5PdReoMZCeeWzNWvKTCV14pyRzwQhJL\n"
+ "F1OmzLYzovbPfB8LZVhQgDbLsh034FScivf2oKDB+NEzAEagNpnrFR0MFLWGYsu1\n"
+ "nWD5RiZi78HFGiibmhH7QrEPfGlo2eofuUga6naoBUROqkmMCIL8n1HZ/Ur0oGny\n"
+ "vQCj1AyrfOhuVC53AgMBAAGjQjBAMAsGA1UdDwQEAwIEMDATBgNVHSUEDDAKBggr\n"
+ "BgEFBQcDATAcBgNVHREEFTATgRFlbWFpbEBleGFtcGxlLmNvbTANBgkqhkiG9w0B\n"
+ "AQsFAAOCAQEAZ0IsqRrsEmJ6Fa9Yo6PQtrKJrejN2TTDddVgyLQdokzWh/25JFad\n"
+ "NCMYPH5KjTUyKf96hJDlDayjbKk1PMMhSZMU5OG9NOuGMH/dQttruG1ojse7KIKg\n"
+ "yHDQrfq5Exxgfa7CMHRKAoTCY7JZhSLyVbTMVhmGfuUDad/RA86ZisXycp0ZmS97\n"
+ "qDkAmzFL0sL0ZUWNNUh4ZUWvCUZwiuN08z70NjGqXMTDCf68p3SYxbII0xTfScgf\n"
+ "aQ/A/hD7IbGGTexeoTwpEj01DNvefbQV6//neo32/R5XD0D5jn3TCgZcMThA6H3a\n"
+ "VkEghVg+s7uMfL/UEebOBQWXQJ/uVoknMA==\n" + "-----END CERTIFICATE-----").getBytes();
}
|
googleapis/google-cloud-java | 36,806 | java-recaptchaenterprise/proto-google-cloud-recaptchaenterprise-v1/src/main/java/com/google/recaptchaenterprise/v1/SmsTollFraudVerdict.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/recaptchaenterprise/v1/recaptchaenterprise.proto
// Protobuf Java Version: 3.25.8
package com.google.recaptchaenterprise.v1;
/**
*
*
* <pre>
* Information about SMS toll fraud.
* </pre>
*
* Protobuf type {@code google.cloud.recaptchaenterprise.v1.SmsTollFraudVerdict}
*/
public final class SmsTollFraudVerdict extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.recaptchaenterprise.v1.SmsTollFraudVerdict)
SmsTollFraudVerdictOrBuilder {
private static final long serialVersionUID = 0L;
// Use SmsTollFraudVerdict.newBuilder() to construct.
private SmsTollFraudVerdict(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private SmsTollFraudVerdict() {
reasons_ = java.util.Collections.emptyList();
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new SmsTollFraudVerdict();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.recaptchaenterprise.v1.RecaptchaEnterpriseProto
.internal_static_google_cloud_recaptchaenterprise_v1_SmsTollFraudVerdict_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.recaptchaenterprise.v1.RecaptchaEnterpriseProto
.internal_static_google_cloud_recaptchaenterprise_v1_SmsTollFraudVerdict_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.recaptchaenterprise.v1.SmsTollFraudVerdict.class,
com.google.recaptchaenterprise.v1.SmsTollFraudVerdict.Builder.class);
}
/**
*
*
* <pre>
* Reasons contributing to the SMS toll fraud verdict.
* </pre>
*
* Protobuf enum {@code
* google.cloud.recaptchaenterprise.v1.SmsTollFraudVerdict.SmsTollFraudReason}
*/
public enum SmsTollFraudReason implements com.google.protobuf.ProtocolMessageEnum {
/**
*
*
* <pre>
* Default unspecified reason
* </pre>
*
* <code>SMS_TOLL_FRAUD_REASON_UNSPECIFIED = 0;</code>
*/
SMS_TOLL_FRAUD_REASON_UNSPECIFIED(0),
/**
*
*
* <pre>
* The provided phone number was invalid
* </pre>
*
* <code>INVALID_PHONE_NUMBER = 1;</code>
*/
INVALID_PHONE_NUMBER(1),
UNRECOGNIZED(-1),
;
/**
*
*
* <pre>
* Default unspecified reason
* </pre>
*
* <code>SMS_TOLL_FRAUD_REASON_UNSPECIFIED = 0;</code>
*/
public static final int SMS_TOLL_FRAUD_REASON_UNSPECIFIED_VALUE = 0;
/**
*
*
* <pre>
* The provided phone number was invalid
* </pre>
*
* <code>INVALID_PHONE_NUMBER = 1;</code>
*/
public static final int INVALID_PHONE_NUMBER_VALUE = 1;
public final int getNumber() {
if (this == UNRECOGNIZED) {
throw new java.lang.IllegalArgumentException(
"Can't get the number of an unknown enum value.");
}
return value;
}
/**
* @param value The numeric wire value of the corresponding enum entry.
* @return The enum associated with the given numeric wire value.
* @deprecated Use {@link #forNumber(int)} instead.
*/
@java.lang.Deprecated
public static SmsTollFraudReason valueOf(int value) {
return forNumber(value);
}
/**
* @param value The numeric wire value of the corresponding enum entry.
* @return The enum associated with the given numeric wire value.
*/
public static SmsTollFraudReason forNumber(int value) {
switch (value) {
case 0:
return SMS_TOLL_FRAUD_REASON_UNSPECIFIED;
case 1:
return INVALID_PHONE_NUMBER;
default:
return null;
}
}
public static com.google.protobuf.Internal.EnumLiteMap<SmsTollFraudReason>
internalGetValueMap() {
return internalValueMap;
}
private static final com.google.protobuf.Internal.EnumLiteMap<SmsTollFraudReason>
internalValueMap =
new com.google.protobuf.Internal.EnumLiteMap<SmsTollFraudReason>() {
public SmsTollFraudReason findValueByNumber(int number) {
return SmsTollFraudReason.forNumber(number);
}
};
public final com.google.protobuf.Descriptors.EnumValueDescriptor getValueDescriptor() {
if (this == UNRECOGNIZED) {
throw new java.lang.IllegalStateException(
"Can't get the descriptor of an unrecognized enum value.");
}
return getDescriptor().getValues().get(ordinal());
}
public final com.google.protobuf.Descriptors.EnumDescriptor getDescriptorForType() {
return getDescriptor();
}
public static final com.google.protobuf.Descriptors.EnumDescriptor getDescriptor() {
return com.google.recaptchaenterprise.v1.SmsTollFraudVerdict.getDescriptor()
.getEnumTypes()
.get(0);
}
private static final SmsTollFraudReason[] VALUES = values();
public static SmsTollFraudReason valueOf(
com.google.protobuf.Descriptors.EnumValueDescriptor desc) {
if (desc.getType() != getDescriptor()) {
throw new java.lang.IllegalArgumentException("EnumValueDescriptor is not for this type.");
}
if (desc.getIndex() == -1) {
return UNRECOGNIZED;
}
return VALUES[desc.getIndex()];
}
private final int value;
private SmsTollFraudReason(int value) {
this.value = value;
}
// @@protoc_insertion_point(enum_scope:google.cloud.recaptchaenterprise.v1.SmsTollFraudVerdict.SmsTollFraudReason)
}
public static final int RISK_FIELD_NUMBER = 1;
private float risk_ = 0F;
/**
*
*
* <pre>
* Output only. Probability of an SMS event being fraudulent.
* Values are from 0.0 (lowest) to 1.0 (highest).
* </pre>
*
* <code>float risk = 1 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
*
* @return The risk.
*/
@java.lang.Override
public float getRisk() {
return risk_;
}
public static final int REASONS_FIELD_NUMBER = 2;
@SuppressWarnings("serial")
private java.util.List<java.lang.Integer> reasons_;
private static final com.google.protobuf.Internal.ListAdapter.Converter<
java.lang.Integer,
com.google.recaptchaenterprise.v1.SmsTollFraudVerdict.SmsTollFraudReason>
reasons_converter_ =
new com.google.protobuf.Internal.ListAdapter.Converter<
java.lang.Integer,
com.google.recaptchaenterprise.v1.SmsTollFraudVerdict.SmsTollFraudReason>() {
public com.google.recaptchaenterprise.v1.SmsTollFraudVerdict.SmsTollFraudReason convert(
java.lang.Integer from) {
com.google.recaptchaenterprise.v1.SmsTollFraudVerdict.SmsTollFraudReason result =
com.google.recaptchaenterprise.v1.SmsTollFraudVerdict.SmsTollFraudReason
.forNumber(from);
return result == null
? com.google.recaptchaenterprise.v1.SmsTollFraudVerdict.SmsTollFraudReason
.UNRECOGNIZED
: result;
}
};
/**
*
*
* <pre>
* Output only. Reasons contributing to the SMS toll fraud verdict.
* </pre>
*
* <code>
* repeated .google.cloud.recaptchaenterprise.v1.SmsTollFraudVerdict.SmsTollFraudReason reasons = 2 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*
* @return A list containing the reasons.
*/
@java.lang.Override
public java.util.List<com.google.recaptchaenterprise.v1.SmsTollFraudVerdict.SmsTollFraudReason>
getReasonsList() {
return new com.google.protobuf.Internal.ListAdapter<
java.lang.Integer,
com.google.recaptchaenterprise.v1.SmsTollFraudVerdict.SmsTollFraudReason>(
reasons_, reasons_converter_);
}
/**
*
*
* <pre>
* Output only. Reasons contributing to the SMS toll fraud verdict.
* </pre>
*
* <code>
* repeated .google.cloud.recaptchaenterprise.v1.SmsTollFraudVerdict.SmsTollFraudReason reasons = 2 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*
* @return The count of reasons.
*/
@java.lang.Override
public int getReasonsCount() {
return reasons_.size();
}
/**
*
*
* <pre>
* Output only. Reasons contributing to the SMS toll fraud verdict.
* </pre>
*
* <code>
* repeated .google.cloud.recaptchaenterprise.v1.SmsTollFraudVerdict.SmsTollFraudReason reasons = 2 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*
* @param index The index of the element to return.
* @return The reasons at the given index.
*/
@java.lang.Override
public com.google.recaptchaenterprise.v1.SmsTollFraudVerdict.SmsTollFraudReason getReasons(
int index) {
return reasons_converter_.convert(reasons_.get(index));
}
/**
*
*
* <pre>
* Output only. Reasons contributing to the SMS toll fraud verdict.
* </pre>
*
* <code>
* repeated .google.cloud.recaptchaenterprise.v1.SmsTollFraudVerdict.SmsTollFraudReason reasons = 2 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*
* @return A list containing the enum numeric values on the wire for reasons.
*/
@java.lang.Override
public java.util.List<java.lang.Integer> getReasonsValueList() {
return reasons_;
}
/**
*
*
* <pre>
* Output only. Reasons contributing to the SMS toll fraud verdict.
* </pre>
*
* <code>
* repeated .google.cloud.recaptchaenterprise.v1.SmsTollFraudVerdict.SmsTollFraudReason reasons = 2 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*
* @param index The index of the value to return.
* @return The enum numeric value on the wire of reasons at the given index.
*/
@java.lang.Override
public int getReasonsValue(int index) {
return reasons_.get(index);
}
private int reasonsMemoizedSerializedSize;
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
getSerializedSize();
if (java.lang.Float.floatToRawIntBits(risk_) != 0) {
output.writeFloat(1, risk_);
}
if (getReasonsList().size() > 0) {
output.writeUInt32NoTag(18);
output.writeUInt32NoTag(reasonsMemoizedSerializedSize);
}
for (int i = 0; i < reasons_.size(); i++) {
output.writeEnumNoTag(reasons_.get(i));
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (java.lang.Float.floatToRawIntBits(risk_) != 0) {
size += com.google.protobuf.CodedOutputStream.computeFloatSize(1, risk_);
}
{
int dataSize = 0;
for (int i = 0; i < reasons_.size(); i++) {
dataSize += com.google.protobuf.CodedOutputStream.computeEnumSizeNoTag(reasons_.get(i));
}
size += dataSize;
if (!getReasonsList().isEmpty()) {
size += 1;
size += com.google.protobuf.CodedOutputStream.computeUInt32SizeNoTag(dataSize);
}
reasonsMemoizedSerializedSize = dataSize;
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.recaptchaenterprise.v1.SmsTollFraudVerdict)) {
return super.equals(obj);
}
com.google.recaptchaenterprise.v1.SmsTollFraudVerdict other =
(com.google.recaptchaenterprise.v1.SmsTollFraudVerdict) obj;
if (java.lang.Float.floatToIntBits(getRisk())
!= java.lang.Float.floatToIntBits(other.getRisk())) return false;
if (!reasons_.equals(other.reasons_)) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + RISK_FIELD_NUMBER;
hash = (53 * hash) + java.lang.Float.floatToIntBits(getRisk());
if (getReasonsCount() > 0) {
hash = (37 * hash) + REASONS_FIELD_NUMBER;
hash = (53 * hash) + reasons_.hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.recaptchaenterprise.v1.SmsTollFraudVerdict parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.recaptchaenterprise.v1.SmsTollFraudVerdict parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.recaptchaenterprise.v1.SmsTollFraudVerdict parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.recaptchaenterprise.v1.SmsTollFraudVerdict parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.recaptchaenterprise.v1.SmsTollFraudVerdict parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.recaptchaenterprise.v1.SmsTollFraudVerdict parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.recaptchaenterprise.v1.SmsTollFraudVerdict parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.recaptchaenterprise.v1.SmsTollFraudVerdict parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.recaptchaenterprise.v1.SmsTollFraudVerdict parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.recaptchaenterprise.v1.SmsTollFraudVerdict parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.recaptchaenterprise.v1.SmsTollFraudVerdict parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.recaptchaenterprise.v1.SmsTollFraudVerdict parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.recaptchaenterprise.v1.SmsTollFraudVerdict prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Information about SMS toll fraud.
* </pre>
*
* Protobuf type {@code google.cloud.recaptchaenterprise.v1.SmsTollFraudVerdict}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.recaptchaenterprise.v1.SmsTollFraudVerdict)
com.google.recaptchaenterprise.v1.SmsTollFraudVerdictOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.recaptchaenterprise.v1.RecaptchaEnterpriseProto
.internal_static_google_cloud_recaptchaenterprise_v1_SmsTollFraudVerdict_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.recaptchaenterprise.v1.RecaptchaEnterpriseProto
.internal_static_google_cloud_recaptchaenterprise_v1_SmsTollFraudVerdict_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.recaptchaenterprise.v1.SmsTollFraudVerdict.class,
com.google.recaptchaenterprise.v1.SmsTollFraudVerdict.Builder.class);
}
// Construct using com.google.recaptchaenterprise.v1.SmsTollFraudVerdict.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
risk_ = 0F;
reasons_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000002);
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.recaptchaenterprise.v1.RecaptchaEnterpriseProto
.internal_static_google_cloud_recaptchaenterprise_v1_SmsTollFraudVerdict_descriptor;
}
@java.lang.Override
public com.google.recaptchaenterprise.v1.SmsTollFraudVerdict getDefaultInstanceForType() {
return com.google.recaptchaenterprise.v1.SmsTollFraudVerdict.getDefaultInstance();
}
@java.lang.Override
public com.google.recaptchaenterprise.v1.SmsTollFraudVerdict build() {
com.google.recaptchaenterprise.v1.SmsTollFraudVerdict result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.recaptchaenterprise.v1.SmsTollFraudVerdict buildPartial() {
com.google.recaptchaenterprise.v1.SmsTollFraudVerdict result =
new com.google.recaptchaenterprise.v1.SmsTollFraudVerdict(this);
buildPartialRepeatedFields(result);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartialRepeatedFields(
com.google.recaptchaenterprise.v1.SmsTollFraudVerdict result) {
if (((bitField0_ & 0x00000002) != 0)) {
reasons_ = java.util.Collections.unmodifiableList(reasons_);
bitField0_ = (bitField0_ & ~0x00000002);
}
result.reasons_ = reasons_;
}
private void buildPartial0(com.google.recaptchaenterprise.v1.SmsTollFraudVerdict result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.risk_ = risk_;
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.recaptchaenterprise.v1.SmsTollFraudVerdict) {
return mergeFrom((com.google.recaptchaenterprise.v1.SmsTollFraudVerdict) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.recaptchaenterprise.v1.SmsTollFraudVerdict other) {
if (other == com.google.recaptchaenterprise.v1.SmsTollFraudVerdict.getDefaultInstance())
return this;
if (other.getRisk() != 0F) {
setRisk(other.getRisk());
}
if (!other.reasons_.isEmpty()) {
if (reasons_.isEmpty()) {
reasons_ = other.reasons_;
bitField0_ = (bitField0_ & ~0x00000002);
} else {
ensureReasonsIsMutable();
reasons_.addAll(other.reasons_);
}
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 13:
{
risk_ = input.readFloat();
bitField0_ |= 0x00000001;
break;
} // case 13
case 16:
{
int tmpRaw = input.readEnum();
ensureReasonsIsMutable();
reasons_.add(tmpRaw);
break;
} // case 16
case 18:
{
int length = input.readRawVarint32();
int oldLimit = input.pushLimit(length);
while (input.getBytesUntilLimit() > 0) {
int tmpRaw = input.readEnum();
ensureReasonsIsMutable();
reasons_.add(tmpRaw);
}
input.popLimit(oldLimit);
break;
} // case 18
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private float risk_;
/**
*
*
* <pre>
* Output only. Probability of an SMS event being fraudulent.
* Values are from 0.0 (lowest) to 1.0 (highest).
* </pre>
*
* <code>float risk = 1 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
*
* @return The risk.
*/
@java.lang.Override
public float getRisk() {
return risk_;
}
/**
*
*
* <pre>
* Output only. Probability of an SMS event being fraudulent.
* Values are from 0.0 (lowest) to 1.0 (highest).
* </pre>
*
* <code>float risk = 1 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
*
* @param value The risk to set.
* @return This builder for chaining.
*/
public Builder setRisk(float value) {
risk_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Output only. Probability of an SMS event being fraudulent.
* Values are from 0.0 (lowest) to 1.0 (highest).
* </pre>
*
* <code>float risk = 1 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
*
* @return This builder for chaining.
*/
public Builder clearRisk() {
bitField0_ = (bitField0_ & ~0x00000001);
risk_ = 0F;
onChanged();
return this;
}
private java.util.List<java.lang.Integer> reasons_ = java.util.Collections.emptyList();
private void ensureReasonsIsMutable() {
if (!((bitField0_ & 0x00000002) != 0)) {
reasons_ = new java.util.ArrayList<java.lang.Integer>(reasons_);
bitField0_ |= 0x00000002;
}
}
/**
*
*
* <pre>
* Output only. Reasons contributing to the SMS toll fraud verdict.
* </pre>
*
* <code>
* repeated .google.cloud.recaptchaenterprise.v1.SmsTollFraudVerdict.SmsTollFraudReason reasons = 2 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*
* @return A list containing the reasons.
*/
public java.util.List<com.google.recaptchaenterprise.v1.SmsTollFraudVerdict.SmsTollFraudReason>
getReasonsList() {
return new com.google.protobuf.Internal.ListAdapter<
java.lang.Integer,
com.google.recaptchaenterprise.v1.SmsTollFraudVerdict.SmsTollFraudReason>(
reasons_, reasons_converter_);
}
/**
*
*
* <pre>
* Output only. Reasons contributing to the SMS toll fraud verdict.
* </pre>
*
* <code>
* repeated .google.cloud.recaptchaenterprise.v1.SmsTollFraudVerdict.SmsTollFraudReason reasons = 2 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*
* @return The count of reasons.
*/
public int getReasonsCount() {
return reasons_.size();
}
/**
*
*
* <pre>
* Output only. Reasons contributing to the SMS toll fraud verdict.
* </pre>
*
* <code>
* repeated .google.cloud.recaptchaenterprise.v1.SmsTollFraudVerdict.SmsTollFraudReason reasons = 2 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*
* @param index The index of the element to return.
* @return The reasons at the given index.
*/
public com.google.recaptchaenterprise.v1.SmsTollFraudVerdict.SmsTollFraudReason getReasons(
int index) {
return reasons_converter_.convert(reasons_.get(index));
}
/**
*
*
* <pre>
* Output only. Reasons contributing to the SMS toll fraud verdict.
* </pre>
*
* <code>
* repeated .google.cloud.recaptchaenterprise.v1.SmsTollFraudVerdict.SmsTollFraudReason reasons = 2 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*
* @param index The index to set the value at.
* @param value The reasons to set.
* @return This builder for chaining.
*/
public Builder setReasons(
int index, com.google.recaptchaenterprise.v1.SmsTollFraudVerdict.SmsTollFraudReason value) {
if (value == null) {
throw new NullPointerException();
}
ensureReasonsIsMutable();
reasons_.set(index, value.getNumber());
onChanged();
return this;
}
/**
*
*
* <pre>
* Output only. Reasons contributing to the SMS toll fraud verdict.
* </pre>
*
* <code>
* repeated .google.cloud.recaptchaenterprise.v1.SmsTollFraudVerdict.SmsTollFraudReason reasons = 2 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*
* @param value The reasons to add.
* @return This builder for chaining.
*/
public Builder addReasons(
com.google.recaptchaenterprise.v1.SmsTollFraudVerdict.SmsTollFraudReason value) {
if (value == null) {
throw new NullPointerException();
}
ensureReasonsIsMutable();
reasons_.add(value.getNumber());
onChanged();
return this;
}
/**
*
*
* <pre>
* Output only. Reasons contributing to the SMS toll fraud verdict.
* </pre>
*
* <code>
* repeated .google.cloud.recaptchaenterprise.v1.SmsTollFraudVerdict.SmsTollFraudReason reasons = 2 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*
* @param values The reasons to add.
* @return This builder for chaining.
*/
public Builder addAllReasons(
java.lang.Iterable<
? extends com.google.recaptchaenterprise.v1.SmsTollFraudVerdict.SmsTollFraudReason>
values) {
ensureReasonsIsMutable();
for (com.google.recaptchaenterprise.v1.SmsTollFraudVerdict.SmsTollFraudReason value :
values) {
reasons_.add(value.getNumber());
}
onChanged();
return this;
}
/**
*
*
* <pre>
* Output only. Reasons contributing to the SMS toll fraud verdict.
* </pre>
*
* <code>
* repeated .google.cloud.recaptchaenterprise.v1.SmsTollFraudVerdict.SmsTollFraudReason reasons = 2 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*
* @return This builder for chaining.
*/
public Builder clearReasons() {
reasons_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
return this;
}
/**
*
*
* <pre>
* Output only. Reasons contributing to the SMS toll fraud verdict.
* </pre>
*
* <code>
* repeated .google.cloud.recaptchaenterprise.v1.SmsTollFraudVerdict.SmsTollFraudReason reasons = 2 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*
* @return A list containing the enum numeric values on the wire for reasons.
*/
public java.util.List<java.lang.Integer> getReasonsValueList() {
return java.util.Collections.unmodifiableList(reasons_);
}
/**
*
*
* <pre>
* Output only. Reasons contributing to the SMS toll fraud verdict.
* </pre>
*
* <code>
* repeated .google.cloud.recaptchaenterprise.v1.SmsTollFraudVerdict.SmsTollFraudReason reasons = 2 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*
* @param index The index of the value to return.
* @return The enum numeric value on the wire of reasons at the given index.
*/
public int getReasonsValue(int index) {
return reasons_.get(index);
}
/**
*
*
* <pre>
* Output only. Reasons contributing to the SMS toll fraud verdict.
* </pre>
*
* <code>
* repeated .google.cloud.recaptchaenterprise.v1.SmsTollFraudVerdict.SmsTollFraudReason reasons = 2 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*
* @param index The index to set the value at.
* @param value The enum numeric value on the wire for reasons to set.
* @return This builder for chaining.
*/
public Builder setReasonsValue(int index, int value) {
ensureReasonsIsMutable();
reasons_.set(index, value);
onChanged();
return this;
}
/**
*
*
* <pre>
* Output only. Reasons contributing to the SMS toll fraud verdict.
* </pre>
*
* <code>
* repeated .google.cloud.recaptchaenterprise.v1.SmsTollFraudVerdict.SmsTollFraudReason reasons = 2 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*
* @param value The enum numeric value on the wire for reasons to add.
* @return This builder for chaining.
*/
public Builder addReasonsValue(int value) {
ensureReasonsIsMutable();
reasons_.add(value);
onChanged();
return this;
}
/**
*
*
* <pre>
* Output only. Reasons contributing to the SMS toll fraud verdict.
* </pre>
*
* <code>
* repeated .google.cloud.recaptchaenterprise.v1.SmsTollFraudVerdict.SmsTollFraudReason reasons = 2 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*
* @param values The enum numeric values on the wire for reasons to add.
* @return This builder for chaining.
*/
public Builder addAllReasonsValue(java.lang.Iterable<java.lang.Integer> values) {
ensureReasonsIsMutable();
for (int value : values) {
reasons_.add(value);
}
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.recaptchaenterprise.v1.SmsTollFraudVerdict)
}
// @@protoc_insertion_point(class_scope:google.cloud.recaptchaenterprise.v1.SmsTollFraudVerdict)
private static final com.google.recaptchaenterprise.v1.SmsTollFraudVerdict DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.recaptchaenterprise.v1.SmsTollFraudVerdict();
}
public static com.google.recaptchaenterprise.v1.SmsTollFraudVerdict getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<SmsTollFraudVerdict> PARSER =
new com.google.protobuf.AbstractParser<SmsTollFraudVerdict>() {
@java.lang.Override
public SmsTollFraudVerdict parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<SmsTollFraudVerdict> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<SmsTollFraudVerdict> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.recaptchaenterprise.v1.SmsTollFraudVerdict getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleapis/google-cloud-java | 36,768 | java-datalineage/proto-google-cloud-datalineage-v1/src/main/java/com/google/cloud/datacatalog/lineage/v1/CreateProcessRequest.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/datacatalog/lineage/v1/lineage.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.datacatalog.lineage.v1;
/**
*
*
* <pre>
* Request message for
* [CreateProcess][google.cloud.datacatalog.lineage.v1.CreateProcess].
* </pre>
*
* Protobuf type {@code google.cloud.datacatalog.lineage.v1.CreateProcessRequest}
*/
public final class CreateProcessRequest extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.datacatalog.lineage.v1.CreateProcessRequest)
CreateProcessRequestOrBuilder {
private static final long serialVersionUID = 0L;
// Use CreateProcessRequest.newBuilder() to construct.
private CreateProcessRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private CreateProcessRequest() {
parent_ = "";
requestId_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new CreateProcessRequest();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.datacatalog.lineage.v1.LineageProto
.internal_static_google_cloud_datacatalog_lineage_v1_CreateProcessRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.datacatalog.lineage.v1.LineageProto
.internal_static_google_cloud_datacatalog_lineage_v1_CreateProcessRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.datacatalog.lineage.v1.CreateProcessRequest.class,
com.google.cloud.datacatalog.lineage.v1.CreateProcessRequest.Builder.class);
}
private int bitField0_;
public static final int PARENT_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private volatile java.lang.Object parent_ = "";
/**
*
*
* <pre>
* Required. The name of the project and its location that should own the
* process.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The parent.
*/
@java.lang.Override
public java.lang.String getParent() {
java.lang.Object ref = parent_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
parent_ = s;
return s;
}
}
/**
*
*
* <pre>
* Required. The name of the project and its location that should own the
* process.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for parent.
*/
@java.lang.Override
public com.google.protobuf.ByteString getParentBytes() {
java.lang.Object ref = parent_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
parent_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int PROCESS_FIELD_NUMBER = 2;
private com.google.cloud.datacatalog.lineage.v1.Process process_;
/**
*
*
* <pre>
* Required. The process to create.
* </pre>
*
* <code>
* .google.cloud.datacatalog.lineage.v1.Process process = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the process field is set.
*/
@java.lang.Override
public boolean hasProcess() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* Required. The process to create.
* </pre>
*
* <code>
* .google.cloud.datacatalog.lineage.v1.Process process = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The process.
*/
@java.lang.Override
public com.google.cloud.datacatalog.lineage.v1.Process getProcess() {
return process_ == null
? com.google.cloud.datacatalog.lineage.v1.Process.getDefaultInstance()
: process_;
}
/**
*
*
* <pre>
* Required. The process to create.
* </pre>
*
* <code>
* .google.cloud.datacatalog.lineage.v1.Process process = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
@java.lang.Override
public com.google.cloud.datacatalog.lineage.v1.ProcessOrBuilder getProcessOrBuilder() {
return process_ == null
? com.google.cloud.datacatalog.lineage.v1.Process.getDefaultInstance()
: process_;
}
public static final int REQUEST_ID_FIELD_NUMBER = 3;
@SuppressWarnings("serial")
private volatile java.lang.Object requestId_ = "";
/**
*
*
* <pre>
* A unique identifier for this request. Restricted to 36 ASCII characters.
* A random UUID is recommended. This request is idempotent only if a
* `request_id` is provided.
* </pre>
*
* <code>string request_id = 3;</code>
*
* @return The requestId.
*/
@java.lang.Override
public java.lang.String getRequestId() {
java.lang.Object ref = requestId_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
requestId_ = s;
return s;
}
}
/**
*
*
* <pre>
* A unique identifier for this request. Restricted to 36 ASCII characters.
* A random UUID is recommended. This request is idempotent only if a
* `request_id` is provided.
* </pre>
*
* <code>string request_id = 3;</code>
*
* @return The bytes for requestId.
*/
@java.lang.Override
public com.google.protobuf.ByteString getRequestIdBytes() {
java.lang.Object ref = requestId_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
requestId_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 1, parent_);
}
if (((bitField0_ & 0x00000001) != 0)) {
output.writeMessage(2, getProcess());
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(requestId_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 3, requestId_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, parent_);
}
if (((bitField0_ & 0x00000001) != 0)) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, getProcess());
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(requestId_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(3, requestId_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.datacatalog.lineage.v1.CreateProcessRequest)) {
return super.equals(obj);
}
com.google.cloud.datacatalog.lineage.v1.CreateProcessRequest other =
(com.google.cloud.datacatalog.lineage.v1.CreateProcessRequest) obj;
if (!getParent().equals(other.getParent())) return false;
if (hasProcess() != other.hasProcess()) return false;
if (hasProcess()) {
if (!getProcess().equals(other.getProcess())) return false;
}
if (!getRequestId().equals(other.getRequestId())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + PARENT_FIELD_NUMBER;
hash = (53 * hash) + getParent().hashCode();
if (hasProcess()) {
hash = (37 * hash) + PROCESS_FIELD_NUMBER;
hash = (53 * hash) + getProcess().hashCode();
}
hash = (37 * hash) + REQUEST_ID_FIELD_NUMBER;
hash = (53 * hash) + getRequestId().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.datacatalog.lineage.v1.CreateProcessRequest parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.datacatalog.lineage.v1.CreateProcessRequest parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.datacatalog.lineage.v1.CreateProcessRequest parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.datacatalog.lineage.v1.CreateProcessRequest parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.datacatalog.lineage.v1.CreateProcessRequest parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.datacatalog.lineage.v1.CreateProcessRequest parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.datacatalog.lineage.v1.CreateProcessRequest parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.datacatalog.lineage.v1.CreateProcessRequest parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.datacatalog.lineage.v1.CreateProcessRequest parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.datacatalog.lineage.v1.CreateProcessRequest parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.datacatalog.lineage.v1.CreateProcessRequest parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.datacatalog.lineage.v1.CreateProcessRequest parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.datacatalog.lineage.v1.CreateProcessRequest prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Request message for
* [CreateProcess][google.cloud.datacatalog.lineage.v1.CreateProcess].
* </pre>
*
* Protobuf type {@code google.cloud.datacatalog.lineage.v1.CreateProcessRequest}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.datacatalog.lineage.v1.CreateProcessRequest)
com.google.cloud.datacatalog.lineage.v1.CreateProcessRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.datacatalog.lineage.v1.LineageProto
.internal_static_google_cloud_datacatalog_lineage_v1_CreateProcessRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.datacatalog.lineage.v1.LineageProto
.internal_static_google_cloud_datacatalog_lineage_v1_CreateProcessRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.datacatalog.lineage.v1.CreateProcessRequest.class,
com.google.cloud.datacatalog.lineage.v1.CreateProcessRequest.Builder.class);
}
// Construct using com.google.cloud.datacatalog.lineage.v1.CreateProcessRequest.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {
getProcessFieldBuilder();
}
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
parent_ = "";
process_ = null;
if (processBuilder_ != null) {
processBuilder_.dispose();
processBuilder_ = null;
}
requestId_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.datacatalog.lineage.v1.LineageProto
.internal_static_google_cloud_datacatalog_lineage_v1_CreateProcessRequest_descriptor;
}
@java.lang.Override
public com.google.cloud.datacatalog.lineage.v1.CreateProcessRequest
getDefaultInstanceForType() {
return com.google.cloud.datacatalog.lineage.v1.CreateProcessRequest.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.datacatalog.lineage.v1.CreateProcessRequest build() {
com.google.cloud.datacatalog.lineage.v1.CreateProcessRequest result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.datacatalog.lineage.v1.CreateProcessRequest buildPartial() {
com.google.cloud.datacatalog.lineage.v1.CreateProcessRequest result =
new com.google.cloud.datacatalog.lineage.v1.CreateProcessRequest(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(
com.google.cloud.datacatalog.lineage.v1.CreateProcessRequest result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.parent_ = parent_;
}
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000002) != 0)) {
result.process_ = processBuilder_ == null ? process_ : processBuilder_.build();
to_bitField0_ |= 0x00000001;
}
if (((from_bitField0_ & 0x00000004) != 0)) {
result.requestId_ = requestId_;
}
result.bitField0_ |= to_bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.datacatalog.lineage.v1.CreateProcessRequest) {
return mergeFrom((com.google.cloud.datacatalog.lineage.v1.CreateProcessRequest) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.datacatalog.lineage.v1.CreateProcessRequest other) {
if (other
== com.google.cloud.datacatalog.lineage.v1.CreateProcessRequest.getDefaultInstance())
return this;
if (!other.getParent().isEmpty()) {
parent_ = other.parent_;
bitField0_ |= 0x00000001;
onChanged();
}
if (other.hasProcess()) {
mergeProcess(other.getProcess());
}
if (!other.getRequestId().isEmpty()) {
requestId_ = other.requestId_;
bitField0_ |= 0x00000004;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
parent_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000001;
break;
} // case 10
case 18:
{
input.readMessage(getProcessFieldBuilder().getBuilder(), extensionRegistry);
bitField0_ |= 0x00000002;
break;
} // case 18
case 26:
{
requestId_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000004;
break;
} // case 26
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.lang.Object parent_ = "";
/**
*
*
* <pre>
* Required. The name of the project and its location that should own the
* process.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The parent.
*/
public java.lang.String getParent() {
java.lang.Object ref = parent_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
parent_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Required. The name of the project and its location that should own the
* process.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for parent.
*/
public com.google.protobuf.ByteString getParentBytes() {
java.lang.Object ref = parent_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
parent_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Required. The name of the project and its location that should own the
* process.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The parent to set.
* @return This builder for chaining.
*/
public Builder setParent(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
parent_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The name of the project and its location that should own the
* process.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return This builder for chaining.
*/
public Builder clearParent() {
parent_ = getDefaultInstance().getParent();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The name of the project and its location that should own the
* process.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The bytes for parent to set.
* @return This builder for chaining.
*/
public Builder setParentBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
parent_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
private com.google.cloud.datacatalog.lineage.v1.Process process_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.datacatalog.lineage.v1.Process,
com.google.cloud.datacatalog.lineage.v1.Process.Builder,
com.google.cloud.datacatalog.lineage.v1.ProcessOrBuilder>
processBuilder_;
/**
*
*
* <pre>
* Required. The process to create.
* </pre>
*
* <code>
* .google.cloud.datacatalog.lineage.v1.Process process = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the process field is set.
*/
public boolean hasProcess() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
*
*
* <pre>
* Required. The process to create.
* </pre>
*
* <code>
* .google.cloud.datacatalog.lineage.v1.Process process = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The process.
*/
public com.google.cloud.datacatalog.lineage.v1.Process getProcess() {
if (processBuilder_ == null) {
return process_ == null
? com.google.cloud.datacatalog.lineage.v1.Process.getDefaultInstance()
: process_;
} else {
return processBuilder_.getMessage();
}
}
/**
*
*
* <pre>
* Required. The process to create.
* </pre>
*
* <code>
* .google.cloud.datacatalog.lineage.v1.Process process = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setProcess(com.google.cloud.datacatalog.lineage.v1.Process value) {
if (processBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
process_ = value;
} else {
processBuilder_.setMessage(value);
}
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The process to create.
* </pre>
*
* <code>
* .google.cloud.datacatalog.lineage.v1.Process process = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setProcess(
com.google.cloud.datacatalog.lineage.v1.Process.Builder builderForValue) {
if (processBuilder_ == null) {
process_ = builderForValue.build();
} else {
processBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The process to create.
* </pre>
*
* <code>
* .google.cloud.datacatalog.lineage.v1.Process process = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder mergeProcess(com.google.cloud.datacatalog.lineage.v1.Process value) {
if (processBuilder_ == null) {
if (((bitField0_ & 0x00000002) != 0)
&& process_ != null
&& process_ != com.google.cloud.datacatalog.lineage.v1.Process.getDefaultInstance()) {
getProcessBuilder().mergeFrom(value);
} else {
process_ = value;
}
} else {
processBuilder_.mergeFrom(value);
}
if (process_ != null) {
bitField0_ |= 0x00000002;
onChanged();
}
return this;
}
/**
*
*
* <pre>
* Required. The process to create.
* </pre>
*
* <code>
* .google.cloud.datacatalog.lineage.v1.Process process = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder clearProcess() {
bitField0_ = (bitField0_ & ~0x00000002);
process_ = null;
if (processBuilder_ != null) {
processBuilder_.dispose();
processBuilder_ = null;
}
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The process to create.
* </pre>
*
* <code>
* .google.cloud.datacatalog.lineage.v1.Process process = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.cloud.datacatalog.lineage.v1.Process.Builder getProcessBuilder() {
bitField0_ |= 0x00000002;
onChanged();
return getProcessFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* Required. The process to create.
* </pre>
*
* <code>
* .google.cloud.datacatalog.lineage.v1.Process process = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.cloud.datacatalog.lineage.v1.ProcessOrBuilder getProcessOrBuilder() {
if (processBuilder_ != null) {
return processBuilder_.getMessageOrBuilder();
} else {
return process_ == null
? com.google.cloud.datacatalog.lineage.v1.Process.getDefaultInstance()
: process_;
}
}
/**
*
*
* <pre>
* Required. The process to create.
* </pre>
*
* <code>
* .google.cloud.datacatalog.lineage.v1.Process process = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.datacatalog.lineage.v1.Process,
com.google.cloud.datacatalog.lineage.v1.Process.Builder,
com.google.cloud.datacatalog.lineage.v1.ProcessOrBuilder>
getProcessFieldBuilder() {
if (processBuilder_ == null) {
processBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.datacatalog.lineage.v1.Process,
com.google.cloud.datacatalog.lineage.v1.Process.Builder,
com.google.cloud.datacatalog.lineage.v1.ProcessOrBuilder>(
getProcess(), getParentForChildren(), isClean());
process_ = null;
}
return processBuilder_;
}
private java.lang.Object requestId_ = "";
/**
*
*
* <pre>
* A unique identifier for this request. Restricted to 36 ASCII characters.
* A random UUID is recommended. This request is idempotent only if a
* `request_id` is provided.
* </pre>
*
* <code>string request_id = 3;</code>
*
* @return The requestId.
*/
public java.lang.String getRequestId() {
java.lang.Object ref = requestId_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
requestId_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* A unique identifier for this request. Restricted to 36 ASCII characters.
* A random UUID is recommended. This request is idempotent only if a
* `request_id` is provided.
* </pre>
*
* <code>string request_id = 3;</code>
*
* @return The bytes for requestId.
*/
public com.google.protobuf.ByteString getRequestIdBytes() {
java.lang.Object ref = requestId_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
requestId_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* A unique identifier for this request. Restricted to 36 ASCII characters.
* A random UUID is recommended. This request is idempotent only if a
* `request_id` is provided.
* </pre>
*
* <code>string request_id = 3;</code>
*
* @param value The requestId to set.
* @return This builder for chaining.
*/
public Builder setRequestId(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
requestId_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
*
*
* <pre>
* A unique identifier for this request. Restricted to 36 ASCII characters.
* A random UUID is recommended. This request is idempotent only if a
* `request_id` is provided.
* </pre>
*
* <code>string request_id = 3;</code>
*
* @return This builder for chaining.
*/
public Builder clearRequestId() {
requestId_ = getDefaultInstance().getRequestId();
bitField0_ = (bitField0_ & ~0x00000004);
onChanged();
return this;
}
/**
*
*
* <pre>
* A unique identifier for this request. Restricted to 36 ASCII characters.
* A random UUID is recommended. This request is idempotent only if a
* `request_id` is provided.
* </pre>
*
* <code>string request_id = 3;</code>
*
* @param value The bytes for requestId to set.
* @return This builder for chaining.
*/
public Builder setRequestIdBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
requestId_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.datacatalog.lineage.v1.CreateProcessRequest)
}
// @@protoc_insertion_point(class_scope:google.cloud.datacatalog.lineage.v1.CreateProcessRequest)
private static final com.google.cloud.datacatalog.lineage.v1.CreateProcessRequest
DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.datacatalog.lineage.v1.CreateProcessRequest();
}
public static com.google.cloud.datacatalog.lineage.v1.CreateProcessRequest getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<CreateProcessRequest> PARSER =
new com.google.protobuf.AbstractParser<CreateProcessRequest>() {
@java.lang.Override
public CreateProcessRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<CreateProcessRequest> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<CreateProcessRequest> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.datacatalog.lineage.v1.CreateProcessRequest getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleapis/google-cloud-java | 36,804 | java-service-usage/proto-google-cloud-service-usage-v1beta1/src/main/java/com/google/api/serviceusage/v1beta1/BatchEnableServicesRequest.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/api/serviceusage/v1beta1/serviceusage.proto
// Protobuf Java Version: 3.25.8
package com.google.api.serviceusage.v1beta1;
/**
*
*
* <pre>
* Request message for the `BatchEnableServices` method.
* </pre>
*
* Protobuf type {@code google.api.serviceusage.v1beta1.BatchEnableServicesRequest}
*/
public final class BatchEnableServicesRequest extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.api.serviceusage.v1beta1.BatchEnableServicesRequest)
BatchEnableServicesRequestOrBuilder {
private static final long serialVersionUID = 0L;
// Use BatchEnableServicesRequest.newBuilder() to construct.
private BatchEnableServicesRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private BatchEnableServicesRequest() {
parent_ = "";
serviceIds_ = com.google.protobuf.LazyStringArrayList.emptyList();
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new BatchEnableServicesRequest();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.api.serviceusage.v1beta1.ServiceUsageProto
.internal_static_google_api_serviceusage_v1beta1_BatchEnableServicesRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.api.serviceusage.v1beta1.ServiceUsageProto
.internal_static_google_api_serviceusage_v1beta1_BatchEnableServicesRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.api.serviceusage.v1beta1.BatchEnableServicesRequest.class,
com.google.api.serviceusage.v1beta1.BatchEnableServicesRequest.Builder.class);
}
public static final int PARENT_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private volatile java.lang.Object parent_ = "";
/**
*
*
* <pre>
* Parent to enable services on.
*
* An example name would be:
* `projects/123`
* where `123` is the project number (not project ID).
*
* The `BatchEnableServices` method currently only supports projects.
* </pre>
*
* <code>string parent = 1;</code>
*
* @return The parent.
*/
@java.lang.Override
public java.lang.String getParent() {
java.lang.Object ref = parent_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
parent_ = s;
return s;
}
}
/**
*
*
* <pre>
* Parent to enable services on.
*
* An example name would be:
* `projects/123`
* where `123` is the project number (not project ID).
*
* The `BatchEnableServices` method currently only supports projects.
* </pre>
*
* <code>string parent = 1;</code>
*
* @return The bytes for parent.
*/
@java.lang.Override
public com.google.protobuf.ByteString getParentBytes() {
java.lang.Object ref = parent_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
parent_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int SERVICE_IDS_FIELD_NUMBER = 2;
@SuppressWarnings("serial")
private com.google.protobuf.LazyStringArrayList serviceIds_ =
com.google.protobuf.LazyStringArrayList.emptyList();
/**
*
*
* <pre>
* The identifiers of the services to enable on the project.
*
* A valid identifier would be:
* serviceusage.googleapis.com
*
* Enabling services requires that each service is public or is shared with
* the user enabling the service.
*
* Two or more services must be specified. To enable a single service,
* use the `EnableService` method instead.
*
* A single request can enable a maximum of 20 services at a time. If more
* than 20 services are specified, the request will fail, and no state changes
* will occur.
* </pre>
*
* <code>repeated string service_ids = 2;</code>
*
* @return A list containing the serviceIds.
*/
public com.google.protobuf.ProtocolStringList getServiceIdsList() {
return serviceIds_;
}
/**
*
*
* <pre>
* The identifiers of the services to enable on the project.
*
* A valid identifier would be:
* serviceusage.googleapis.com
*
* Enabling services requires that each service is public or is shared with
* the user enabling the service.
*
* Two or more services must be specified. To enable a single service,
* use the `EnableService` method instead.
*
* A single request can enable a maximum of 20 services at a time. If more
* than 20 services are specified, the request will fail, and no state changes
* will occur.
* </pre>
*
* <code>repeated string service_ids = 2;</code>
*
* @return The count of serviceIds.
*/
public int getServiceIdsCount() {
return serviceIds_.size();
}
/**
*
*
* <pre>
* The identifiers of the services to enable on the project.
*
* A valid identifier would be:
* serviceusage.googleapis.com
*
* Enabling services requires that each service is public or is shared with
* the user enabling the service.
*
* Two or more services must be specified. To enable a single service,
* use the `EnableService` method instead.
*
* A single request can enable a maximum of 20 services at a time. If more
* than 20 services are specified, the request will fail, and no state changes
* will occur.
* </pre>
*
* <code>repeated string service_ids = 2;</code>
*
* @param index The index of the element to return.
* @return The serviceIds at the given index.
*/
public java.lang.String getServiceIds(int index) {
return serviceIds_.get(index);
}
/**
*
*
* <pre>
* The identifiers of the services to enable on the project.
*
* A valid identifier would be:
* serviceusage.googleapis.com
*
* Enabling services requires that each service is public or is shared with
* the user enabling the service.
*
* Two or more services must be specified. To enable a single service,
* use the `EnableService` method instead.
*
* A single request can enable a maximum of 20 services at a time. If more
* than 20 services are specified, the request will fail, and no state changes
* will occur.
* </pre>
*
* <code>repeated string service_ids = 2;</code>
*
* @param index The index of the value to return.
* @return The bytes of the serviceIds at the given index.
*/
public com.google.protobuf.ByteString getServiceIdsBytes(int index) {
return serviceIds_.getByteString(index);
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 1, parent_);
}
for (int i = 0; i < serviceIds_.size(); i++) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 2, serviceIds_.getRaw(i));
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, parent_);
}
{
int dataSize = 0;
for (int i = 0; i < serviceIds_.size(); i++) {
dataSize += computeStringSizeNoTag(serviceIds_.getRaw(i));
}
size += dataSize;
size += 1 * getServiceIdsList().size();
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.api.serviceusage.v1beta1.BatchEnableServicesRequest)) {
return super.equals(obj);
}
com.google.api.serviceusage.v1beta1.BatchEnableServicesRequest other =
(com.google.api.serviceusage.v1beta1.BatchEnableServicesRequest) obj;
if (!getParent().equals(other.getParent())) return false;
if (!getServiceIdsList().equals(other.getServiceIdsList())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + PARENT_FIELD_NUMBER;
hash = (53 * hash) + getParent().hashCode();
if (getServiceIdsCount() > 0) {
hash = (37 * hash) + SERVICE_IDS_FIELD_NUMBER;
hash = (53 * hash) + getServiceIdsList().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.api.serviceusage.v1beta1.BatchEnableServicesRequest parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.api.serviceusage.v1beta1.BatchEnableServicesRequest parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.api.serviceusage.v1beta1.BatchEnableServicesRequest parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.api.serviceusage.v1beta1.BatchEnableServicesRequest parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.api.serviceusage.v1beta1.BatchEnableServicesRequest parseFrom(
byte[] data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.api.serviceusage.v1beta1.BatchEnableServicesRequest parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.api.serviceusage.v1beta1.BatchEnableServicesRequest parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.api.serviceusage.v1beta1.BatchEnableServicesRequest parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.api.serviceusage.v1beta1.BatchEnableServicesRequest parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.api.serviceusage.v1beta1.BatchEnableServicesRequest parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.api.serviceusage.v1beta1.BatchEnableServicesRequest parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.api.serviceusage.v1beta1.BatchEnableServicesRequest parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.api.serviceusage.v1beta1.BatchEnableServicesRequest prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Request message for the `BatchEnableServices` method.
* </pre>
*
* Protobuf type {@code google.api.serviceusage.v1beta1.BatchEnableServicesRequest}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.api.serviceusage.v1beta1.BatchEnableServicesRequest)
com.google.api.serviceusage.v1beta1.BatchEnableServicesRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.api.serviceusage.v1beta1.ServiceUsageProto
.internal_static_google_api_serviceusage_v1beta1_BatchEnableServicesRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.api.serviceusage.v1beta1.ServiceUsageProto
.internal_static_google_api_serviceusage_v1beta1_BatchEnableServicesRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.api.serviceusage.v1beta1.BatchEnableServicesRequest.class,
com.google.api.serviceusage.v1beta1.BatchEnableServicesRequest.Builder.class);
}
// Construct using com.google.api.serviceusage.v1beta1.BatchEnableServicesRequest.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
parent_ = "";
serviceIds_ = com.google.protobuf.LazyStringArrayList.emptyList();
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.api.serviceusage.v1beta1.ServiceUsageProto
.internal_static_google_api_serviceusage_v1beta1_BatchEnableServicesRequest_descriptor;
}
@java.lang.Override
public com.google.api.serviceusage.v1beta1.BatchEnableServicesRequest
getDefaultInstanceForType() {
return com.google.api.serviceusage.v1beta1.BatchEnableServicesRequest.getDefaultInstance();
}
@java.lang.Override
public com.google.api.serviceusage.v1beta1.BatchEnableServicesRequest build() {
com.google.api.serviceusage.v1beta1.BatchEnableServicesRequest result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.api.serviceusage.v1beta1.BatchEnableServicesRequest buildPartial() {
com.google.api.serviceusage.v1beta1.BatchEnableServicesRequest result =
new com.google.api.serviceusage.v1beta1.BatchEnableServicesRequest(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(
com.google.api.serviceusage.v1beta1.BatchEnableServicesRequest result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.parent_ = parent_;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
serviceIds_.makeImmutable();
result.serviceIds_ = serviceIds_;
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.api.serviceusage.v1beta1.BatchEnableServicesRequest) {
return mergeFrom((com.google.api.serviceusage.v1beta1.BatchEnableServicesRequest) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.api.serviceusage.v1beta1.BatchEnableServicesRequest other) {
if (other
== com.google.api.serviceusage.v1beta1.BatchEnableServicesRequest.getDefaultInstance())
return this;
if (!other.getParent().isEmpty()) {
parent_ = other.parent_;
bitField0_ |= 0x00000001;
onChanged();
}
if (!other.serviceIds_.isEmpty()) {
if (serviceIds_.isEmpty()) {
serviceIds_ = other.serviceIds_;
bitField0_ |= 0x00000002;
} else {
ensureServiceIdsIsMutable();
serviceIds_.addAll(other.serviceIds_);
}
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
parent_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000001;
break;
} // case 10
case 18:
{
java.lang.String s = input.readStringRequireUtf8();
ensureServiceIdsIsMutable();
serviceIds_.add(s);
break;
} // case 18
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.lang.Object parent_ = "";
/**
*
*
* <pre>
* Parent to enable services on.
*
* An example name would be:
* `projects/123`
* where `123` is the project number (not project ID).
*
* The `BatchEnableServices` method currently only supports projects.
* </pre>
*
* <code>string parent = 1;</code>
*
* @return The parent.
*/
public java.lang.String getParent() {
java.lang.Object ref = parent_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
parent_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Parent to enable services on.
*
* An example name would be:
* `projects/123`
* where `123` is the project number (not project ID).
*
* The `BatchEnableServices` method currently only supports projects.
* </pre>
*
* <code>string parent = 1;</code>
*
* @return The bytes for parent.
*/
public com.google.protobuf.ByteString getParentBytes() {
java.lang.Object ref = parent_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
parent_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Parent to enable services on.
*
* An example name would be:
* `projects/123`
* where `123` is the project number (not project ID).
*
* The `BatchEnableServices` method currently only supports projects.
* </pre>
*
* <code>string parent = 1;</code>
*
* @param value The parent to set.
* @return This builder for chaining.
*/
public Builder setParent(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
parent_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Parent to enable services on.
*
* An example name would be:
* `projects/123`
* where `123` is the project number (not project ID).
*
* The `BatchEnableServices` method currently only supports projects.
* </pre>
*
* <code>string parent = 1;</code>
*
* @return This builder for chaining.
*/
public Builder clearParent() {
parent_ = getDefaultInstance().getParent();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
return this;
}
/**
*
*
* <pre>
* Parent to enable services on.
*
* An example name would be:
* `projects/123`
* where `123` is the project number (not project ID).
*
* The `BatchEnableServices` method currently only supports projects.
* </pre>
*
* <code>string parent = 1;</code>
*
* @param value The bytes for parent to set.
* @return This builder for chaining.
*/
public Builder setParentBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
parent_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
private com.google.protobuf.LazyStringArrayList serviceIds_ =
com.google.protobuf.LazyStringArrayList.emptyList();
private void ensureServiceIdsIsMutable() {
if (!serviceIds_.isModifiable()) {
serviceIds_ = new com.google.protobuf.LazyStringArrayList(serviceIds_);
}
bitField0_ |= 0x00000002;
}
/**
*
*
* <pre>
* The identifiers of the services to enable on the project.
*
* A valid identifier would be:
* serviceusage.googleapis.com
*
* Enabling services requires that each service is public or is shared with
* the user enabling the service.
*
* Two or more services must be specified. To enable a single service,
* use the `EnableService` method instead.
*
* A single request can enable a maximum of 20 services at a time. If more
* than 20 services are specified, the request will fail, and no state changes
* will occur.
* </pre>
*
* <code>repeated string service_ids = 2;</code>
*
* @return A list containing the serviceIds.
*/
public com.google.protobuf.ProtocolStringList getServiceIdsList() {
serviceIds_.makeImmutable();
return serviceIds_;
}
/**
*
*
* <pre>
* The identifiers of the services to enable on the project.
*
* A valid identifier would be:
* serviceusage.googleapis.com
*
* Enabling services requires that each service is public or is shared with
* the user enabling the service.
*
* Two or more services must be specified. To enable a single service,
* use the `EnableService` method instead.
*
* A single request can enable a maximum of 20 services at a time. If more
* than 20 services are specified, the request will fail, and no state changes
* will occur.
* </pre>
*
* <code>repeated string service_ids = 2;</code>
*
* @return The count of serviceIds.
*/
public int getServiceIdsCount() {
return serviceIds_.size();
}
/**
*
*
* <pre>
* The identifiers of the services to enable on the project.
*
* A valid identifier would be:
* serviceusage.googleapis.com
*
* Enabling services requires that each service is public or is shared with
* the user enabling the service.
*
* Two or more services must be specified. To enable a single service,
* use the `EnableService` method instead.
*
* A single request can enable a maximum of 20 services at a time. If more
* than 20 services are specified, the request will fail, and no state changes
* will occur.
* </pre>
*
* <code>repeated string service_ids = 2;</code>
*
* @param index The index of the element to return.
* @return The serviceIds at the given index.
*/
public java.lang.String getServiceIds(int index) {
return serviceIds_.get(index);
}
/**
*
*
* <pre>
* The identifiers of the services to enable on the project.
*
* A valid identifier would be:
* serviceusage.googleapis.com
*
* Enabling services requires that each service is public or is shared with
* the user enabling the service.
*
* Two or more services must be specified. To enable a single service,
* use the `EnableService` method instead.
*
* A single request can enable a maximum of 20 services at a time. If more
* than 20 services are specified, the request will fail, and no state changes
* will occur.
* </pre>
*
* <code>repeated string service_ids = 2;</code>
*
* @param index The index of the value to return.
* @return The bytes of the serviceIds at the given index.
*/
public com.google.protobuf.ByteString getServiceIdsBytes(int index) {
return serviceIds_.getByteString(index);
}
/**
*
*
* <pre>
* The identifiers of the services to enable on the project.
*
* A valid identifier would be:
* serviceusage.googleapis.com
*
* Enabling services requires that each service is public or is shared with
* the user enabling the service.
*
* Two or more services must be specified. To enable a single service,
* use the `EnableService` method instead.
*
* A single request can enable a maximum of 20 services at a time. If more
* than 20 services are specified, the request will fail, and no state changes
* will occur.
* </pre>
*
* <code>repeated string service_ids = 2;</code>
*
* @param index The index to set the value at.
* @param value The serviceIds to set.
* @return This builder for chaining.
*/
public Builder setServiceIds(int index, java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
ensureServiceIdsIsMutable();
serviceIds_.set(index, value);
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* The identifiers of the services to enable on the project.
*
* A valid identifier would be:
* serviceusage.googleapis.com
*
* Enabling services requires that each service is public or is shared with
* the user enabling the service.
*
* Two or more services must be specified. To enable a single service,
* use the `EnableService` method instead.
*
* A single request can enable a maximum of 20 services at a time. If more
* than 20 services are specified, the request will fail, and no state changes
* will occur.
* </pre>
*
* <code>repeated string service_ids = 2;</code>
*
* @param value The serviceIds to add.
* @return This builder for chaining.
*/
public Builder addServiceIds(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
ensureServiceIdsIsMutable();
serviceIds_.add(value);
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* The identifiers of the services to enable on the project.
*
* A valid identifier would be:
* serviceusage.googleapis.com
*
* Enabling services requires that each service is public or is shared with
* the user enabling the service.
*
* Two or more services must be specified. To enable a single service,
* use the `EnableService` method instead.
*
* A single request can enable a maximum of 20 services at a time. If more
* than 20 services are specified, the request will fail, and no state changes
* will occur.
* </pre>
*
* <code>repeated string service_ids = 2;</code>
*
* @param values The serviceIds to add.
* @return This builder for chaining.
*/
public Builder addAllServiceIds(java.lang.Iterable<java.lang.String> values) {
ensureServiceIdsIsMutable();
com.google.protobuf.AbstractMessageLite.Builder.addAll(values, serviceIds_);
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* The identifiers of the services to enable on the project.
*
* A valid identifier would be:
* serviceusage.googleapis.com
*
* Enabling services requires that each service is public or is shared with
* the user enabling the service.
*
* Two or more services must be specified. To enable a single service,
* use the `EnableService` method instead.
*
* A single request can enable a maximum of 20 services at a time. If more
* than 20 services are specified, the request will fail, and no state changes
* will occur.
* </pre>
*
* <code>repeated string service_ids = 2;</code>
*
* @return This builder for chaining.
*/
public Builder clearServiceIds() {
serviceIds_ = com.google.protobuf.LazyStringArrayList.emptyList();
bitField0_ = (bitField0_ & ~0x00000002);
;
onChanged();
return this;
}
/**
*
*
* <pre>
* The identifiers of the services to enable on the project.
*
* A valid identifier would be:
* serviceusage.googleapis.com
*
* Enabling services requires that each service is public or is shared with
* the user enabling the service.
*
* Two or more services must be specified. To enable a single service,
* use the `EnableService` method instead.
*
* A single request can enable a maximum of 20 services at a time. If more
* than 20 services are specified, the request will fail, and no state changes
* will occur.
* </pre>
*
* <code>repeated string service_ids = 2;</code>
*
* @param value The bytes of the serviceIds to add.
* @return This builder for chaining.
*/
public Builder addServiceIdsBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
ensureServiceIdsIsMutable();
serviceIds_.add(value);
bitField0_ |= 0x00000002;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.api.serviceusage.v1beta1.BatchEnableServicesRequest)
}
// @@protoc_insertion_point(class_scope:google.api.serviceusage.v1beta1.BatchEnableServicesRequest)
private static final com.google.api.serviceusage.v1beta1.BatchEnableServicesRequest
DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.api.serviceusage.v1beta1.BatchEnableServicesRequest();
}
public static com.google.api.serviceusage.v1beta1.BatchEnableServicesRequest
getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<BatchEnableServicesRequest> PARSER =
new com.google.protobuf.AbstractParser<BatchEnableServicesRequest>() {
@java.lang.Override
public BatchEnableServicesRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<BatchEnableServicesRequest> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<BatchEnableServicesRequest> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.api.serviceusage.v1beta1.BatchEnableServicesRequest
getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
openjdk/jdk8 | 37,219 | jdk/src/share/classes/java/time/chrono/AbstractChronology.java | /*
* Copyright (c) 2012, 2013, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License version 2 only, as
* published by the Free Software Foundation. Oracle designates this
* particular file as subject to the "Classpath" exception as provided
* by Oracle in the LICENSE file that accompanied this code.
*
* This code is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* version 2 for more details (a copy is included in the LICENSE file that
* accompanied this code).
*
* You should have received a copy of the GNU General Public License version
* 2 along with this work; if not, write to the Free Software Foundation,
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
*
* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
* or visit www.oracle.com if you need additional information or have any
* questions.
*/
/*
* This file is available under and governed by the GNU General Public
* License version 2 only, as published by the Free Software Foundation.
* However, the following notice accompanied the original version of this
* file:
*
* Copyright (c) 2012, Stephen Colebourne & Michael Nascimento Santos
*
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* * Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
*
* * Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
*
* * Neither the name of JSR-310 nor the names of its contributors
* may be used to endorse or promote products derived from this software
* without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
* LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
* A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
* CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
* EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
* PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
* LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
* NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package java.time.chrono;
import static java.time.temporal.ChronoField.ALIGNED_DAY_OF_WEEK_IN_MONTH;
import static java.time.temporal.ChronoField.ALIGNED_DAY_OF_WEEK_IN_YEAR;
import static java.time.temporal.ChronoField.ALIGNED_WEEK_OF_MONTH;
import static java.time.temporal.ChronoField.ALIGNED_WEEK_OF_YEAR;
import static java.time.temporal.ChronoField.DAY_OF_MONTH;
import static java.time.temporal.ChronoField.DAY_OF_WEEK;
import static java.time.temporal.ChronoField.DAY_OF_YEAR;
import static java.time.temporal.ChronoField.EPOCH_DAY;
import static java.time.temporal.ChronoField.ERA;
import static java.time.temporal.ChronoField.MONTH_OF_YEAR;
import static java.time.temporal.ChronoField.PROLEPTIC_MONTH;
import static java.time.temporal.ChronoField.YEAR;
import static java.time.temporal.ChronoField.YEAR_OF_ERA;
import static java.time.temporal.ChronoUnit.DAYS;
import static java.time.temporal.ChronoUnit.MONTHS;
import static java.time.temporal.ChronoUnit.WEEKS;
import static java.time.temporal.TemporalAdjusters.nextOrSame;
import java.io.DataInput;
import java.io.DataOutput;
import java.io.IOException;
import java.io.InvalidObjectException;
import java.io.ObjectInputStream;
import java.io.ObjectStreamException;
import java.io.Serializable;
import java.time.DateTimeException;
import java.time.DayOfWeek;
import java.time.format.ResolverStyle;
import java.time.temporal.ChronoField;
import java.time.temporal.TemporalAdjusters;
import java.time.temporal.TemporalField;
import java.time.temporal.ValueRange;
import java.util.Comparator;
import java.util.HashSet;
import java.util.List;
import java.util.Locale;
import java.util.Map;
import java.util.Objects;
import java.util.ServiceLoader;
import java.util.Set;
import java.util.concurrent.ConcurrentHashMap;
import sun.util.logging.PlatformLogger;
/**
* An abstract implementation of a calendar system, used to organize and identify dates.
* <p>
* The main date and time API is built on the ISO calendar system.
* The chronology operates behind the scenes to represent the general concept of a calendar system.
* <p>
* See {@link Chronology} for more details.
*
* @implSpec
* This class is separated from the {@code Chronology} interface so that the static methods
* are not inherited. While {@code Chronology} can be implemented directly, it is strongly
* recommended to extend this abstract class instead.
* <p>
* This class must be implemented with care to ensure other classes operate correctly.
* All implementations that can be instantiated must be final, immutable and thread-safe.
* Subclasses should be Serializable wherever possible.
*
* @since 1.8
*/
public abstract class AbstractChronology implements Chronology {
/**
* ChronoLocalDate order constant.
*/
static final Comparator<ChronoLocalDate> DATE_ORDER =
(Comparator<ChronoLocalDate> & Serializable) (date1, date2) -> {
return Long.compare(date1.toEpochDay(), date2.toEpochDay());
};
/**
* ChronoLocalDateTime order constant.
*/
static final Comparator<ChronoLocalDateTime<? extends ChronoLocalDate>> DATE_TIME_ORDER =
(Comparator<ChronoLocalDateTime<? extends ChronoLocalDate>> & Serializable) (dateTime1, dateTime2) -> {
int cmp = Long.compare(dateTime1.toLocalDate().toEpochDay(), dateTime2.toLocalDate().toEpochDay());
if (cmp == 0) {
cmp = Long.compare(dateTime1.toLocalTime().toNanoOfDay(), dateTime2.toLocalTime().toNanoOfDay());
}
return cmp;
};
/**
* ChronoZonedDateTime order constant.
*/
static final Comparator<ChronoZonedDateTime<?>> INSTANT_ORDER =
(Comparator<ChronoZonedDateTime<?>> & Serializable) (dateTime1, dateTime2) -> {
int cmp = Long.compare(dateTime1.toEpochSecond(), dateTime2.toEpochSecond());
if (cmp == 0) {
cmp = Long.compare(dateTime1.toLocalTime().getNano(), dateTime2.toLocalTime().getNano());
}
return cmp;
};
/**
* Map of available calendars by ID.
*/
private static final ConcurrentHashMap<String, Chronology> CHRONOS_BY_ID = new ConcurrentHashMap<>();
/**
* Map of available calendars by calendar type.
*/
private static final ConcurrentHashMap<String, Chronology> CHRONOS_BY_TYPE = new ConcurrentHashMap<>();
/**
* Register a Chronology by its ID and type for lookup by {@link #of(String)}.
* Chronologies must not be registered until they are completely constructed.
* Specifically, not in the constructor of Chronology.
*
* @param chrono the chronology to register; not null
* @return the already registered Chronology if any, may be null
*/
static Chronology registerChrono(Chronology chrono) {
return registerChrono(chrono, chrono.getId());
}
/**
* Register a Chronology by ID and type for lookup by {@link #of(String)}.
* Chronos must not be registered until they are completely constructed.
* Specifically, not in the constructor of Chronology.
*
* @param chrono the chronology to register; not null
* @param id the ID to register the chronology; not null
* @return the already registered Chronology if any, may be null
*/
static Chronology registerChrono(Chronology chrono, String id) {
Chronology prev = CHRONOS_BY_ID.putIfAbsent(id, chrono);
if (prev == null) {
String type = chrono.getCalendarType();
if (type != null) {
CHRONOS_BY_TYPE.putIfAbsent(type, chrono);
}
}
return prev;
}
/**
* Initialization of the maps from id and type to Chronology.
* The ServiceLoader is used to find and register any implementations
* of {@link java.time.chrono.AbstractChronology} found in the bootclass loader.
* The built-in chronologies are registered explicitly.
* Calendars configured via the Thread's context classloader are local
* to that thread and are ignored.
* <p>
* The initialization is done only once using the registration
* of the IsoChronology as the test and the final step.
* Multiple threads may perform the initialization concurrently.
* Only the first registration of each Chronology is retained by the
* ConcurrentHashMap.
* @return true if the cache was initialized
*/
private static boolean initCache() {
if (CHRONOS_BY_ID.get("ISO") == null) {
// Initialization is incomplete
// Register built-in Chronologies
registerChrono(HijrahChronology.INSTANCE);
registerChrono(JapaneseChronology.INSTANCE);
registerChrono(MinguoChronology.INSTANCE);
registerChrono(ThaiBuddhistChronology.INSTANCE);
// Register Chronologies from the ServiceLoader
@SuppressWarnings("rawtypes")
ServiceLoader<AbstractChronology> loader = ServiceLoader.load(AbstractChronology.class, null);
for (AbstractChronology chrono : loader) {
String id = chrono.getId();
if (id.equals("ISO") || registerChrono(chrono) != null) {
// Log the attempt to replace an existing Chronology
PlatformLogger logger = PlatformLogger.getLogger("java.time.chrono");
logger.warning("Ignoring duplicate Chronology, from ServiceLoader configuration " + id);
}
}
// finally, register IsoChronology to mark initialization is complete
registerChrono(IsoChronology.INSTANCE);
return true;
}
return false;
}
//-----------------------------------------------------------------------
/**
* Obtains an instance of {@code Chronology} from a locale.
* <p>
* See {@link Chronology#ofLocale(Locale)}.
*
* @param locale the locale to use to obtain the calendar system, not null
* @return the calendar system associated with the locale, not null
* @throws java.time.DateTimeException if the locale-specified calendar cannot be found
*/
static Chronology ofLocale(Locale locale) {
Objects.requireNonNull(locale, "locale");
String type = locale.getUnicodeLocaleType("ca");
if (type == null || "iso".equals(type) || "iso8601".equals(type)) {
return IsoChronology.INSTANCE;
}
// Not pre-defined; lookup by the type
do {
Chronology chrono = CHRONOS_BY_TYPE.get(type);
if (chrono != null) {
return chrono;
}
// If not found, do the initialization (once) and repeat the lookup
} while (initCache());
// Look for a Chronology using ServiceLoader of the Thread's ContextClassLoader
// Application provided Chronologies must not be cached
@SuppressWarnings("rawtypes")
ServiceLoader<Chronology> loader = ServiceLoader.load(Chronology.class);
for (Chronology chrono : loader) {
if (type.equals(chrono.getCalendarType())) {
return chrono;
}
}
throw new DateTimeException("Unknown calendar system: " + type);
}
//-----------------------------------------------------------------------
/**
* Obtains an instance of {@code Chronology} from a chronology ID or
* calendar system type.
* <p>
* See {@link Chronology#of(String)}.
*
* @param id the chronology ID or calendar system type, not null
* @return the chronology with the identifier requested, not null
* @throws java.time.DateTimeException if the chronology cannot be found
*/
static Chronology of(String id) {
Objects.requireNonNull(id, "id");
do {
Chronology chrono = of0(id);
if (chrono != null) {
return chrono;
}
// If not found, do the initialization (once) and repeat the lookup
} while (initCache());
// Look for a Chronology using ServiceLoader of the Thread's ContextClassLoader
// Application provided Chronologies must not be cached
@SuppressWarnings("rawtypes")
ServiceLoader<Chronology> loader = ServiceLoader.load(Chronology.class);
for (Chronology chrono : loader) {
if (id.equals(chrono.getId()) || id.equals(chrono.getCalendarType())) {
return chrono;
}
}
throw new DateTimeException("Unknown chronology: " + id);
}
/**
* Obtains an instance of {@code Chronology} from a chronology ID or
* calendar system type.
*
* @param id the chronology ID or calendar system type, not null
* @return the chronology with the identifier requested, or {@code null} if not found
*/
private static Chronology of0(String id) {
Chronology chrono = CHRONOS_BY_ID.get(id);
if (chrono == null) {
chrono = CHRONOS_BY_TYPE.get(id);
}
return chrono;
}
/**
* Returns the available chronologies.
* <p>
* Each returned {@code Chronology} is available for use in the system.
* The set of chronologies includes the system chronologies and
* any chronologies provided by the application via ServiceLoader
* configuration.
*
* @return the independent, modifiable set of the available chronology IDs, not null
*/
static Set<Chronology> getAvailableChronologies() {
initCache(); // force initialization
HashSet<Chronology> chronos = new HashSet<>(CHRONOS_BY_ID.values());
/// Add in Chronologies from the ServiceLoader configuration
@SuppressWarnings("rawtypes")
ServiceLoader<Chronology> loader = ServiceLoader.load(Chronology.class);
for (Chronology chrono : loader) {
chronos.add(chrono);
}
return chronos;
}
//-----------------------------------------------------------------------
/**
* Creates an instance.
*/
protected AbstractChronology() {
}
//-----------------------------------------------------------------------
/**
* Resolves parsed {@code ChronoField} values into a date during parsing.
* <p>
* Most {@code TemporalField} implementations are resolved using the
* resolve method on the field. By contrast, the {@code ChronoField} class
* defines fields that only have meaning relative to the chronology.
* As such, {@code ChronoField} date fields are resolved here in the
* context of a specific chronology.
* <p>
* {@code ChronoField} instances are resolved by this method, which may
* be overridden in subclasses.
* <ul>
* <li>{@code EPOCH_DAY} - If present, this is converted to a date and
* all other date fields are then cross-checked against the date.
* <li>{@code PROLEPTIC_MONTH} - If present, then it is split into the
* {@code YEAR} and {@code MONTH_OF_YEAR}. If the mode is strict or smart
* then the field is validated.
* <li>{@code YEAR_OF_ERA} and {@code ERA} - If both are present, then they
* are combined to form a {@code YEAR}. In lenient mode, the {@code YEAR_OF_ERA}
* range is not validated, in smart and strict mode it is. The {@code ERA} is
* validated for range in all three modes. If only the {@code YEAR_OF_ERA} is
* present, and the mode is smart or lenient, then the last available era
* is assumed. In strict mode, no era is assumed and the {@code YEAR_OF_ERA} is
* left untouched. If only the {@code ERA} is present, then it is left untouched.
* <li>{@code YEAR}, {@code MONTH_OF_YEAR} and {@code DAY_OF_MONTH} -
* If all three are present, then they are combined to form a date.
* In all three modes, the {@code YEAR} is validated.
* If the mode is smart or strict, then the month and day are validated.
* If the mode is lenient, then the date is combined in a manner equivalent to
* creating a date on the first day of the first month in the requested year,
* then adding the difference in months, then the difference in days.
* If the mode is smart, and the day-of-month is greater than the maximum for
* the year-month, then the day-of-month is adjusted to the last day-of-month.
* If the mode is strict, then the three fields must form a valid date.
* <li>{@code YEAR} and {@code DAY_OF_YEAR} -
* If both are present, then they are combined to form a date.
* In all three modes, the {@code YEAR} is validated.
* If the mode is lenient, then the date is combined in a manner equivalent to
* creating a date on the first day of the requested year, then adding
* the difference in days.
* If the mode is smart or strict, then the two fields must form a valid date.
* <li>{@code YEAR}, {@code MONTH_OF_YEAR}, {@code ALIGNED_WEEK_OF_MONTH} and
* {@code ALIGNED_DAY_OF_WEEK_IN_MONTH} -
* If all four are present, then they are combined to form a date.
* In all three modes, the {@code YEAR} is validated.
* If the mode is lenient, then the date is combined in a manner equivalent to
* creating a date on the first day of the first month in the requested year, then adding
* the difference in months, then the difference in weeks, then in days.
* If the mode is smart or strict, then the all four fields are validated to
* their outer ranges. The date is then combined in a manner equivalent to
* creating a date on the first day of the requested year and month, then adding
* the amount in weeks and days to reach their values. If the mode is strict,
* the date is additionally validated to check that the day and week adjustment
* did not change the month.
* <li>{@code YEAR}, {@code MONTH_OF_YEAR}, {@code ALIGNED_WEEK_OF_MONTH} and
* {@code DAY_OF_WEEK} - If all four are present, then they are combined to
* form a date. The approach is the same as described above for
* years, months and weeks in {@code ALIGNED_DAY_OF_WEEK_IN_MONTH}.
* The day-of-week is adjusted as the next or same matching day-of-week once
* the years, months and weeks have been handled.
* <li>{@code YEAR}, {@code ALIGNED_WEEK_OF_YEAR} and {@code ALIGNED_DAY_OF_WEEK_IN_YEAR} -
* If all three are present, then they are combined to form a date.
* In all three modes, the {@code YEAR} is validated.
* If the mode is lenient, then the date is combined in a manner equivalent to
* creating a date on the first day of the requested year, then adding
* the difference in weeks, then in days.
* If the mode is smart or strict, then the all three fields are validated to
* their outer ranges. The date is then combined in a manner equivalent to
* creating a date on the first day of the requested year, then adding
* the amount in weeks and days to reach their values. If the mode is strict,
* the date is additionally validated to check that the day and week adjustment
* did not change the year.
* <li>{@code YEAR}, {@code ALIGNED_WEEK_OF_YEAR} and {@code DAY_OF_WEEK} -
* If all three are present, then they are combined to form a date.
* The approach is the same as described above for years and weeks in
* {@code ALIGNED_DAY_OF_WEEK_IN_YEAR}. The day-of-week is adjusted as the
* next or same matching day-of-week once the years and weeks have been handled.
* </ul>
* <p>
* The default implementation is suitable for most calendar systems.
* If {@link java.time.temporal.ChronoField#YEAR_OF_ERA} is found without an {@link java.time.temporal.ChronoField#ERA}
* then the last era in {@link #eras()} is used.
* The implementation assumes a 7 day week, that the first day-of-month
* has the value 1, that first day-of-year has the value 1, and that the
* first of the month and year always exists.
*
* @param fieldValues the map of fields to values, which can be updated, not null
* @param resolverStyle the requested type of resolve, not null
* @return the resolved date, null if insufficient information to create a date
* @throws java.time.DateTimeException if the date cannot be resolved, typically
* because of a conflict in the input data
*/
@Override
public ChronoLocalDate resolveDate(Map<TemporalField, Long> fieldValues, ResolverStyle resolverStyle) {
// check epoch-day before inventing era
if (fieldValues.containsKey(EPOCH_DAY)) {
return dateEpochDay(fieldValues.remove(EPOCH_DAY));
}
// fix proleptic month before inventing era
resolveProlepticMonth(fieldValues, resolverStyle);
// invent era if necessary to resolve year-of-era
ChronoLocalDate resolved = resolveYearOfEra(fieldValues, resolverStyle);
if (resolved != null) {
return resolved;
}
// build date
if (fieldValues.containsKey(YEAR)) {
if (fieldValues.containsKey(MONTH_OF_YEAR)) {
if (fieldValues.containsKey(DAY_OF_MONTH)) {
return resolveYMD(fieldValues, resolverStyle);
}
if (fieldValues.containsKey(ALIGNED_WEEK_OF_MONTH)) {
if (fieldValues.containsKey(ALIGNED_DAY_OF_WEEK_IN_MONTH)) {
return resolveYMAA(fieldValues, resolverStyle);
}
if (fieldValues.containsKey(DAY_OF_WEEK)) {
return resolveYMAD(fieldValues, resolverStyle);
}
}
}
if (fieldValues.containsKey(DAY_OF_YEAR)) {
return resolveYD(fieldValues, resolverStyle);
}
if (fieldValues.containsKey(ALIGNED_WEEK_OF_YEAR)) {
if (fieldValues.containsKey(ALIGNED_DAY_OF_WEEK_IN_YEAR)) {
return resolveYAA(fieldValues, resolverStyle);
}
if (fieldValues.containsKey(DAY_OF_WEEK)) {
return resolveYAD(fieldValues, resolverStyle);
}
}
}
return null;
}
void resolveProlepticMonth(Map<TemporalField, Long> fieldValues, ResolverStyle resolverStyle) {
Long pMonth = fieldValues.remove(PROLEPTIC_MONTH);
if (pMonth != null) {
if (resolverStyle != ResolverStyle.LENIENT) {
PROLEPTIC_MONTH.checkValidValue(pMonth);
}
// first day-of-month is likely to be safest for setting proleptic-month
// cannot add to year zero, as not all chronologies have a year zero
ChronoLocalDate chronoDate = dateNow()
.with(DAY_OF_MONTH, 1).with(PROLEPTIC_MONTH, pMonth);
addFieldValue(fieldValues, MONTH_OF_YEAR, chronoDate.get(MONTH_OF_YEAR));
addFieldValue(fieldValues, YEAR, chronoDate.get(YEAR));
}
}
ChronoLocalDate resolveYearOfEra(Map<TemporalField, Long> fieldValues, ResolverStyle resolverStyle) {
Long yoeLong = fieldValues.remove(YEAR_OF_ERA);
if (yoeLong != null) {
Long eraLong = fieldValues.remove(ERA);
int yoe;
if (resolverStyle != ResolverStyle.LENIENT) {
yoe = range(YEAR_OF_ERA).checkValidIntValue(yoeLong, YEAR_OF_ERA);
} else {
yoe = Math.toIntExact(yoeLong);
}
if (eraLong != null) {
Era eraObj = eraOf(range(ERA).checkValidIntValue(eraLong, ERA));
addFieldValue(fieldValues, YEAR, prolepticYear(eraObj, yoe));
} else {
if (fieldValues.containsKey(YEAR)) {
int year = range(YEAR).checkValidIntValue(fieldValues.get(YEAR), YEAR);
ChronoLocalDate chronoDate = dateYearDay(year, 1);
addFieldValue(fieldValues, YEAR, prolepticYear(chronoDate.getEra(), yoe));
} else if (resolverStyle == ResolverStyle.STRICT) {
// do not invent era if strict
// reinstate the field removed earlier, no cross-check issues
fieldValues.put(YEAR_OF_ERA, yoeLong);
} else {
List<Era> eras = eras();
if (eras.isEmpty()) {
addFieldValue(fieldValues, YEAR, yoe);
} else {
Era eraObj = eras.get(eras.size() - 1);
addFieldValue(fieldValues, YEAR, prolepticYear(eraObj, yoe));
}
}
}
} else if (fieldValues.containsKey(ERA)) {
range(ERA).checkValidValue(fieldValues.get(ERA), ERA); // always validated
}
return null;
}
ChronoLocalDate resolveYMD(Map<TemporalField, Long> fieldValues, ResolverStyle resolverStyle) {
int y = range(YEAR).checkValidIntValue(fieldValues.remove(YEAR), YEAR);
if (resolverStyle == ResolverStyle.LENIENT) {
long months = Math.subtractExact(fieldValues.remove(MONTH_OF_YEAR), 1);
long days = Math.subtractExact(fieldValues.remove(DAY_OF_MONTH), 1);
return date(y, 1, 1).plus(months, MONTHS).plus(days, DAYS);
}
int moy = range(MONTH_OF_YEAR).checkValidIntValue(fieldValues.remove(MONTH_OF_YEAR), MONTH_OF_YEAR);
ValueRange domRange = range(DAY_OF_MONTH);
int dom = domRange.checkValidIntValue(fieldValues.remove(DAY_OF_MONTH), DAY_OF_MONTH);
if (resolverStyle == ResolverStyle.SMART) { // previous valid
try {
return date(y, moy, dom);
} catch (DateTimeException ex) {
return date(y, moy, 1).with(TemporalAdjusters.lastDayOfMonth());
}
}
return date(y, moy, dom);
}
ChronoLocalDate resolveYD(Map<TemporalField, Long> fieldValues, ResolverStyle resolverStyle) {
int y = range(YEAR).checkValidIntValue(fieldValues.remove(YEAR), YEAR);
if (resolverStyle == ResolverStyle.LENIENT) {
long days = Math.subtractExact(fieldValues.remove(DAY_OF_YEAR), 1);
return dateYearDay(y, 1).plus(days, DAYS);
}
int doy = range(DAY_OF_YEAR).checkValidIntValue(fieldValues.remove(DAY_OF_YEAR), DAY_OF_YEAR);
return dateYearDay(y, doy); // smart is same as strict
}
ChronoLocalDate resolveYMAA(Map<TemporalField, Long> fieldValues, ResolverStyle resolverStyle) {
int y = range(YEAR).checkValidIntValue(fieldValues.remove(YEAR), YEAR);
if (resolverStyle == ResolverStyle.LENIENT) {
long months = Math.subtractExact(fieldValues.remove(MONTH_OF_YEAR), 1);
long weeks = Math.subtractExact(fieldValues.remove(ALIGNED_WEEK_OF_MONTH), 1);
long days = Math.subtractExact(fieldValues.remove(ALIGNED_DAY_OF_WEEK_IN_MONTH), 1);
return date(y, 1, 1).plus(months, MONTHS).plus(weeks, WEEKS).plus(days, DAYS);
}
int moy = range(MONTH_OF_YEAR).checkValidIntValue(fieldValues.remove(MONTH_OF_YEAR), MONTH_OF_YEAR);
int aw = range(ALIGNED_WEEK_OF_MONTH).checkValidIntValue(fieldValues.remove(ALIGNED_WEEK_OF_MONTH), ALIGNED_WEEK_OF_MONTH);
int ad = range(ALIGNED_DAY_OF_WEEK_IN_MONTH).checkValidIntValue(fieldValues.remove(ALIGNED_DAY_OF_WEEK_IN_MONTH), ALIGNED_DAY_OF_WEEK_IN_MONTH);
ChronoLocalDate date = date(y, moy, 1).plus((aw - 1) * 7 + (ad - 1), DAYS);
if (resolverStyle == ResolverStyle.STRICT && date.get(MONTH_OF_YEAR) != moy) {
throw new DateTimeException("Strict mode rejected resolved date as it is in a different month");
}
return date;
}
ChronoLocalDate resolveYMAD(Map<TemporalField, Long> fieldValues, ResolverStyle resolverStyle) {
int y = range(YEAR).checkValidIntValue(fieldValues.remove(YEAR), YEAR);
if (resolverStyle == ResolverStyle.LENIENT) {
long months = Math.subtractExact(fieldValues.remove(MONTH_OF_YEAR), 1);
long weeks = Math.subtractExact(fieldValues.remove(ALIGNED_WEEK_OF_MONTH), 1);
long dow = Math.subtractExact(fieldValues.remove(DAY_OF_WEEK), 1);
return resolveAligned(date(y, 1, 1), months, weeks, dow);
}
int moy = range(MONTH_OF_YEAR).checkValidIntValue(fieldValues.remove(MONTH_OF_YEAR), MONTH_OF_YEAR);
int aw = range(ALIGNED_WEEK_OF_MONTH).checkValidIntValue(fieldValues.remove(ALIGNED_WEEK_OF_MONTH), ALIGNED_WEEK_OF_MONTH);
int dow = range(DAY_OF_WEEK).checkValidIntValue(fieldValues.remove(DAY_OF_WEEK), DAY_OF_WEEK);
ChronoLocalDate date = date(y, moy, 1).plus((aw - 1) * 7, DAYS).with(nextOrSame(DayOfWeek.of(dow)));
if (resolverStyle == ResolverStyle.STRICT && date.get(MONTH_OF_YEAR) != moy) {
throw new DateTimeException("Strict mode rejected resolved date as it is in a different month");
}
return date;
}
ChronoLocalDate resolveYAA(Map<TemporalField, Long> fieldValues, ResolverStyle resolverStyle) {
int y = range(YEAR).checkValidIntValue(fieldValues.remove(YEAR), YEAR);
if (resolverStyle == ResolverStyle.LENIENT) {
long weeks = Math.subtractExact(fieldValues.remove(ALIGNED_WEEK_OF_YEAR), 1);
long days = Math.subtractExact(fieldValues.remove(ALIGNED_DAY_OF_WEEK_IN_YEAR), 1);
return dateYearDay(y, 1).plus(weeks, WEEKS).plus(days, DAYS);
}
int aw = range(ALIGNED_WEEK_OF_YEAR).checkValidIntValue(fieldValues.remove(ALIGNED_WEEK_OF_YEAR), ALIGNED_WEEK_OF_YEAR);
int ad = range(ALIGNED_DAY_OF_WEEK_IN_YEAR).checkValidIntValue(fieldValues.remove(ALIGNED_DAY_OF_WEEK_IN_YEAR), ALIGNED_DAY_OF_WEEK_IN_YEAR);
ChronoLocalDate date = dateYearDay(y, 1).plus((aw - 1) * 7 + (ad - 1), DAYS);
if (resolverStyle == ResolverStyle.STRICT && date.get(YEAR) != y) {
throw new DateTimeException("Strict mode rejected resolved date as it is in a different year");
}
return date;
}
ChronoLocalDate resolveYAD(Map<TemporalField, Long> fieldValues, ResolverStyle resolverStyle) {
int y = range(YEAR).checkValidIntValue(fieldValues.remove(YEAR), YEAR);
if (resolverStyle == ResolverStyle.LENIENT) {
long weeks = Math.subtractExact(fieldValues.remove(ALIGNED_WEEK_OF_YEAR), 1);
long dow = Math.subtractExact(fieldValues.remove(DAY_OF_WEEK), 1);
return resolveAligned(dateYearDay(y, 1), 0, weeks, dow);
}
int aw = range(ALIGNED_WEEK_OF_YEAR).checkValidIntValue(fieldValues.remove(ALIGNED_WEEK_OF_YEAR), ALIGNED_WEEK_OF_YEAR);
int dow = range(DAY_OF_WEEK).checkValidIntValue(fieldValues.remove(DAY_OF_WEEK), DAY_OF_WEEK);
ChronoLocalDate date = dateYearDay(y, 1).plus((aw - 1) * 7, DAYS).with(nextOrSame(DayOfWeek.of(dow)));
if (resolverStyle == ResolverStyle.STRICT && date.get(YEAR) != y) {
throw new DateTimeException("Strict mode rejected resolved date as it is in a different year");
}
return date;
}
ChronoLocalDate resolveAligned(ChronoLocalDate base, long months, long weeks, long dow) {
ChronoLocalDate date = base.plus(months, MONTHS).plus(weeks, WEEKS);
if (dow > 7) {
date = date.plus((dow - 1) / 7, WEEKS);
dow = ((dow - 1) % 7) + 1;
} else if (dow < 1) {
date = date.plus(Math.subtractExact(dow, 7) / 7, WEEKS);
dow = ((dow + 6) % 7) + 1;
}
return date.with(nextOrSame(DayOfWeek.of((int) dow)));
}
/**
* Adds a field-value pair to the map, checking for conflicts.
* <p>
* If the field is not already present, then the field-value pair is added to the map.
* If the field is already present and it has the same value as that specified, no action occurs.
* If the field is already present and it has a different value to that specified, then
* an exception is thrown.
*
* @param field the field to add, not null
* @param value the value to add, not null
* @throws java.time.DateTimeException if the field is already present with a different value
*/
void addFieldValue(Map<TemporalField, Long> fieldValues, ChronoField field, long value) {
Long old = fieldValues.get(field); // check first for better error message
if (old != null && old.longValue() != value) {
throw new DateTimeException("Conflict found: " + field + " " + old + " differs from " + field + " " + value);
}
fieldValues.put(field, value);
}
//-----------------------------------------------------------------------
/**
* Compares this chronology to another chronology.
* <p>
* The comparison order first by the chronology ID string, then by any
* additional information specific to the subclass.
* It is "consistent with equals", as defined by {@link Comparable}.
*
* @implSpec
* This implementation compares the chronology ID.
* Subclasses must compare any additional state that they store.
*
* @param other the other chronology to compare to, not null
* @return the comparator value, negative if less, positive if greater
*/
@Override
public int compareTo(Chronology other) {
return getId().compareTo(other.getId());
}
/**
* Checks if this chronology is equal to another chronology.
* <p>
* The comparison is based on the entire state of the object.
*
* @implSpec
* This implementation checks the type and calls
* {@link #compareTo(java.time.chrono.Chronology)}.
*
* @param obj the object to check, null returns false
* @return true if this is equal to the other chronology
*/
@Override
public boolean equals(Object obj) {
if (this == obj) {
return true;
}
if (obj instanceof AbstractChronology) {
return compareTo((AbstractChronology) obj) == 0;
}
return false;
}
/**
* A hash code for this chronology.
* <p>
* The hash code should be based on the entire state of the object.
*
* @implSpec
* This implementation is based on the chronology ID and class.
* Subclasses should add any additional state that they store.
*
* @return a suitable hash code
*/
@Override
public int hashCode() {
return getClass().hashCode() ^ getId().hashCode();
}
//-----------------------------------------------------------------------
/**
* Outputs this chronology as a {@code String}, using the chronology ID.
*
* @return a string representation of this chronology, not null
*/
@Override
public String toString() {
return getId();
}
//-----------------------------------------------------------------------
/**
* Writes the Chronology using a
* <a href="../../../serialized-form.html#java.time.chrono.Ser">dedicated serialized form</a>.
* <pre>
* out.writeByte(1); // identifies this as a Chronology
* out.writeUTF(getId());
* </pre>
*
* @return the instance of {@code Ser}, not null
*/
Object writeReplace() {
return new Ser(Ser.CHRONO_TYPE, this);
}
/**
* Defend against malicious streams.
*
* @throws java.io.InvalidObjectException always
*/
private void readObject(ObjectInputStream s) throws ObjectStreamException {
throw new InvalidObjectException("Deserialization via serialization delegate");
}
void writeExternal(DataOutput out) throws IOException {
out.writeUTF(getId());
}
static Chronology readExternal(DataInput in) throws IOException {
String id = in.readUTF();
return Chronology.of(id);
}
}
|
apache/james-project | 36,769 | backends-common/rabbitmq/src/test/java/org/apache/james/backends/rabbitmq/RabbitMQConfigurationTest.java | /****************************************************************
* Licensed to the Apache Software Foundation (ASF) under one *
* or more contributor license agreements. See the NOTICE file *
* distributed with this work for additional information *
* regarding copyright ownership. The ASF licenses this file *
* to you under the Apache License, Version 2.0 (the *
* "License"); you may not use this file except in compliance *
* with the License. You may obtain a copy of the License at *
* *
* http://www.apache.org/licenses/LICENSE-2.0 *
* *
* Unless required by applicable law or agreed to in writing, *
* software distributed under the License is distributed on an *
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY *
* KIND, either express or implied. See the License for the *
* specific language governing permissions and limitations *
* under the License. *
****************************************************************/
package org.apache.james.backends.rabbitmq;
import static org.apache.james.backends.rabbitmq.RabbitMQFixture.DEFAULT_MANAGEMENT_CREDENTIAL;
import static org.apache.james.backends.rabbitmq.RabbitMQFixture.DEFAULT_PASSWORD_STRING;
import static org.apache.james.backends.rabbitmq.RabbitMQFixture.DEFAULT_USER;
import static org.assertj.core.api.Assertions.assertThat;
import static org.assertj.core.api.Assertions.assertThatThrownBy;
import java.io.File;
import java.io.IOException;
import java.net.URI;
import java.net.URISyntaxException;
import java.time.Duration;
import java.util.Arrays;
import java.util.Optional;
import org.apache.commons.configuration2.PropertiesConfiguration;
import org.apache.commons.configuration2.convert.DefaultListDelimiterHandler;
import org.apache.commons.configuration2.ex.ConversionException;
import org.apache.james.util.Host;
import org.junit.jupiter.api.Nested;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.params.ParameterizedTest;
import org.junit.jupiter.params.provider.ValueSource;
import com.google.common.collect.ImmutableList;
import nl.jqno.equalsverifier.EqualsVerifier;
class RabbitMQConfigurationTest {
@Test
void shouldRespectBeanContract() {
EqualsVerifier.forClass(RabbitMQConfiguration.class).verify();
}
@Test
void fromShouldThrowWhenURIIsNotInTheConfiguration() {
PropertiesConfiguration configuration = new PropertiesConfiguration();
assertThatThrownBy(() -> RabbitMQConfiguration.from(configuration))
.isInstanceOf(IllegalStateException.class)
.hasMessage("You need to specify the URI of RabbitMQ");
}
@Test
void fromShouldThrowWhenURIIsNull() {
PropertiesConfiguration configuration = new PropertiesConfiguration();
configuration.addProperty("uri", null);
assertThatThrownBy(() -> RabbitMQConfiguration.from(configuration))
.isInstanceOf(IllegalStateException.class)
.hasMessage("You need to specify the URI of RabbitMQ");
}
@Test
void fromShouldThrowWhenURIIsEmpty() {
PropertiesConfiguration configuration = new PropertiesConfiguration();
configuration.addProperty("uri", "");
assertThatThrownBy(() -> RabbitMQConfiguration.from(configuration))
.isInstanceOf(IllegalStateException.class)
.hasMessage("You need to specify the URI of RabbitMQ");
}
@Test
void fromShouldThrowWhenURIIsInvalid() {
PropertiesConfiguration configuration = new PropertiesConfiguration();
configuration.addProperty("uri", ":invalid");
assertThatThrownBy(() -> RabbitMQConfiguration.from(configuration))
.isInstanceOf(IllegalStateException.class)
.hasMessage("You need to specify a valid URI");
}
@Test
void fromShouldThrowWhenManagementURIIsNotInTheConfiguration() {
PropertiesConfiguration configuration = new PropertiesConfiguration();
configuration.addProperty("uri", "amqp://james:james@rabbitmqhost:5672");
assertThatThrownBy(() -> RabbitMQConfiguration.from(configuration))
.isInstanceOf(IllegalStateException.class)
.hasMessage("You need to specify the management URI of RabbitMQ");
}
@Test
void fromShouldThrowWhenManagementURIIsNull() {
PropertiesConfiguration configuration = new PropertiesConfiguration();
configuration.addProperty("uri", "amqp://james:james@rabbitmqhost:5672");
configuration.addProperty("management.uri", null);
assertThatThrownBy(() -> RabbitMQConfiguration.from(configuration))
.isInstanceOf(IllegalStateException.class)
.hasMessage("You need to specify the management URI of RabbitMQ");
}
@Test
void fromShouldThrowWhenManagementURIIsEmpty() {
PropertiesConfiguration configuration = new PropertiesConfiguration();
configuration.addProperty("uri", "amqp://james:james@rabbitmqhost:5672");
configuration.addProperty("management.uri", "");
assertThatThrownBy(() -> RabbitMQConfiguration.from(configuration))
.isInstanceOf(IllegalStateException.class)
.hasMessage("You need to specify the management URI of RabbitMQ");
}
@Test
void fromShouldThrowWhenManagementURIIsInvalid() {
PropertiesConfiguration configuration = new PropertiesConfiguration();
configuration.addProperty("uri", "amqp://james:james@rabbitmqhost:5672");
configuration.addProperty("management.uri", ":invalid");
assertThatThrownBy(() -> RabbitMQConfiguration.from(configuration))
.isInstanceOf(IllegalStateException.class)
.hasMessage("You need to specify a valid URI");
}
@Test
void fromShouldReturnTheConfigurationWhenRequiredParametersAreGiven() {
PropertiesConfiguration configuration = new PropertiesConfiguration();
String amqpUri = "amqp://james:james@rabbitmqhost:5672";
configuration.addProperty("uri", amqpUri);
String managementUri = "http://james:james@rabbitmqhost:15672/api/";
configuration.addProperty("management.uri", managementUri);
configuration.addProperty("management.user", DEFAULT_USER);
configuration.addProperty("management.password", DEFAULT_PASSWORD_STRING);
assertThat(RabbitMQConfiguration.from(configuration))
.isEqualTo(RabbitMQConfiguration.builder()
.amqpUri(URI.create(amqpUri))
.managementUri(URI.create(managementUri))
.managementCredentials(DEFAULT_MANAGEMENT_CREDENTIAL)
.hosts(ImmutableList.of(Host.from("rabbitmqhost", 5672)))
.build());
}
@Test
void fromShouldThrowWhenManagementCredentialsAreNotGiven() {
PropertiesConfiguration configuration = new PropertiesConfiguration();
String amqpUri = "amqp://james:james@rabbitmqhost:5672";
configuration.addProperty("uri", amqpUri);
String managementUri = "http://james:james@rabbitmqhost:15672/api/";
configuration.addProperty("management.uri", managementUri);
assertThatThrownBy(() -> RabbitMQConfiguration.from(configuration))
.isInstanceOf(IllegalStateException.class)
.hasMessage("You need to specify the management.user property as username of rabbitmq management admin account");
}
@Test
void fromShouldReturnCustomValueWhenManagementCredentialsAreGiven() {
PropertiesConfiguration configuration = new PropertiesConfiguration();
String amqpUri = "amqp://james:james@rabbitmqhost:5672";
configuration.addProperty("uri", amqpUri);
String managementUri = "http://james:james@rabbitmqhost:15672/api/";
configuration.addProperty("management.uri", managementUri);
String user = "james";
configuration.addProperty("management.user", user);
String passwordString = "james_password";
configuration.addProperty("management.password", passwordString);
RabbitMQConfiguration.ManagementCredentials credentials = new RabbitMQConfiguration.ManagementCredentials(
user, passwordString.toCharArray());
assertThat(RabbitMQConfiguration.from(configuration).getManagementCredentials())
.isEqualTo(credentials);
}
@Test
void maxRetriesShouldEqualsDefaultValueWhenNotGiven() throws URISyntaxException {
RabbitMQConfiguration rabbitMQConfiguration = RabbitMQConfiguration.builder()
.amqpUri(new URI("amqp://james:james@rabbitmqhost:5672"))
.managementUri(new URI("http://james:james@rabbitmqhost:15672/api/"))
.managementCredentials(DEFAULT_MANAGEMENT_CREDENTIAL)
.build();
assertThat(rabbitMQConfiguration.getMaxRetries())
.isEqualTo(RabbitMQConfiguration.Builder.DEFAULT_MAX_RETRIES);
}
@Test
void maxRetriesShouldEqualsCustomValueWhenGiven() throws URISyntaxException {
int maxRetries = 1;
RabbitMQConfiguration rabbitMQConfiguration = RabbitMQConfiguration.builder()
.amqpUri(new URI("amqp://james:james@rabbitmqhost:5672"))
.managementUri(new URI("http://james:james@rabbitmqhost:15672/api/"))
.managementCredentials(DEFAULT_MANAGEMENT_CREDENTIAL)
.maxRetries(maxRetries)
.build();
assertThat(rabbitMQConfiguration.getMaxRetries())
.isEqualTo(maxRetries);
}
@Test
void minDelayShouldEqualsDefaultValueWhenNotGiven() throws URISyntaxException {
RabbitMQConfiguration rabbitMQConfiguration = RabbitMQConfiguration.builder()
.amqpUri(new URI("amqp://james:james@rabbitmqhost:5672"))
.managementUri(new URI("http://james:james@rabbitmqhost:15672/api/"))
.managementCredentials(DEFAULT_MANAGEMENT_CREDENTIAL)
.build();
assertThat(rabbitMQConfiguration.getMinDelayInMs())
.isEqualTo(RabbitMQConfiguration.Builder.DEFAULT_MIN_DELAY);
}
@Test
void minDelayShouldEqualsCustomValueWhenGiven() throws URISyntaxException {
int minDelay = 1;
RabbitMQConfiguration rabbitMQConfiguration = RabbitMQConfiguration.builder()
.amqpUri(new URI("amqp://james:james@rabbitmqhost:5672"))
.managementUri(new URI("http://james:james@rabbitmqhost:15672/api/"))
.managementCredentials(DEFAULT_MANAGEMENT_CREDENTIAL)
.minDelayInMs(minDelay)
.build();
assertThat(rabbitMQConfiguration.getMinDelayInMs())
.isEqualTo(minDelay);
}
@Test
void connectionTimeoutShouldEqualsDefaultValueWhenNotGiven() throws URISyntaxException {
RabbitMQConfiguration rabbitMQConfiguration = RabbitMQConfiguration.builder()
.amqpUri(new URI("amqp://james:james@rabbitmqhost:5672"))
.managementUri(new URI("http://james:james@rabbitmqhost:15672/api/"))
.managementCredentials(DEFAULT_MANAGEMENT_CREDENTIAL)
.build();
assertThat(rabbitMQConfiguration.getConnectionTimeoutInMs())
.isEqualTo(RabbitMQConfiguration.Builder.DEFAULT_CONNECTION_TIMEOUT);
}
@Test
void connectionTimeoutShouldEqualsCustomValueWhenGiven() throws URISyntaxException {
int connectionTimeout = 1;
RabbitMQConfiguration rabbitMQConfiguration = RabbitMQConfiguration.builder()
.amqpUri(new URI("amqp://james:james@rabbitmqhost:5672"))
.managementUri(new URI("http://james:james@rabbitmqhost:15672/api/"))
.managementCredentials(DEFAULT_MANAGEMENT_CREDENTIAL)
.connectionTimeoutInMs(connectionTimeout)
.build();
assertThat(rabbitMQConfiguration.getConnectionTimeoutInMs())
.isEqualTo(connectionTimeout);
}
@Test
void channelRpcTimeoutShouldEqualsDefaultValueWhenNotGiven() throws URISyntaxException {
RabbitMQConfiguration rabbitMQConfiguration = RabbitMQConfiguration.builder()
.amqpUri(new URI("amqp://james:james@rabbitmqhost:5672"))
.managementUri(new URI("http://james:james@rabbitmqhost:15672/api/"))
.managementCredentials(DEFAULT_MANAGEMENT_CREDENTIAL)
.build();
assertThat(rabbitMQConfiguration.getChannelRpcTimeoutInMs())
.isEqualTo(RabbitMQConfiguration.Builder.DEFAULT_CHANNEL_RPC_TIMEOUT);
}
@Test
void channelRpcTimeoutShouldEqualsCustomValueWhenGiven() throws URISyntaxException {
int channelRpcTimeout = 1;
RabbitMQConfiguration rabbitMQConfiguration = RabbitMQConfiguration.builder()
.amqpUri(new URI("amqp://james:james@rabbitmqhost:5672"))
.managementUri(new URI("http://james:james@rabbitmqhost:15672/api/"))
.managementCredentials(DEFAULT_MANAGEMENT_CREDENTIAL)
.channelRpcTimeoutInMs(channelRpcTimeout)
.build();
assertThat(rabbitMQConfiguration.getChannelRpcTimeoutInMs())
.isEqualTo(channelRpcTimeout);
}
@Test
void handshakeTimeoutShouldEqualsDefaultValueWhenNotGiven() throws URISyntaxException {
RabbitMQConfiguration rabbitMQConfiguration = RabbitMQConfiguration.builder()
.amqpUri(new URI("amqp://james:james@rabbitmqhost:5672"))
.managementUri(new URI("http://james:james@rabbitmqhost:15672/api/"))
.managementCredentials(DEFAULT_MANAGEMENT_CREDENTIAL)
.build();
assertThat(rabbitMQConfiguration.getHandshakeTimeoutInMs())
.isEqualTo(RabbitMQConfiguration.Builder.DEFAULT_HANDSHAKE_TIMEOUT);
}
@Test
void handshakeTimeoutShouldEqualsCustomValueWhenGiven() throws URISyntaxException {
int handshakeTimeout = 1;
RabbitMQConfiguration rabbitMQConfiguration = RabbitMQConfiguration.builder()
.amqpUri(new URI("amqp://james:james@rabbitmqhost:5672"))
.managementUri(new URI("http://james:james@rabbitmqhost:15672/api/"))
.managementCredentials(DEFAULT_MANAGEMENT_CREDENTIAL)
.handshakeTimeoutInMs(handshakeTimeout)
.build();
assertThat(rabbitMQConfiguration.getHandshakeTimeoutInMs())
.isEqualTo(handshakeTimeout);
}
@Test
void shutdownTimeoutShouldEqualsDefaultValueWhenNotGiven() throws URISyntaxException {
RabbitMQConfiguration rabbitMQConfiguration = RabbitMQConfiguration.builder()
.amqpUri(new URI("amqp://james:james@rabbitmqhost:5672"))
.managementUri(new URI("http://james:james@rabbitmqhost:15672/api/"))
.managementCredentials(DEFAULT_MANAGEMENT_CREDENTIAL)
.build();
assertThat(rabbitMQConfiguration.getShutdownTimeoutInMs())
.isEqualTo(RabbitMQConfiguration.Builder.DEFAULT_SHUTDOWN_TIMEOUT);
}
@Test
void shutdownTimeoutShouldEqualsCustomValueWhenGiven() throws URISyntaxException {
int shutdownTimeout = 1;
RabbitMQConfiguration rabbitMQConfiguration = RabbitMQConfiguration.builder()
.amqpUri(new URI("amqp://james:james@rabbitmqhost:5672"))
.managementUri(new URI("http://james:james@rabbitmqhost:15672/api/"))
.managementCredentials(DEFAULT_MANAGEMENT_CREDENTIAL)
.shutdownTimeoutInMs(shutdownTimeout)
.build();
assertThat(rabbitMQConfiguration.getShutdownTimeoutInMs())
.isEqualTo(shutdownTimeout);
}
@Test
void sslConfigurationShouldHaveDefaultWhenNotSpecifiedOtherwise() throws URISyntaxException {
RabbitMQConfiguration rabbitMQConfiguration = RabbitMQConfiguration.builder()
.amqpUri(new URI("amqp://james:james@rabbitmqhost:5672"))
.managementUri(new URI("http://james:james@rabbitmqhost:15672/api/"))
.managementCredentials(DEFAULT_MANAGEMENT_CREDENTIAL)
.build();
assertThat(rabbitMQConfiguration.getSslConfiguration().getHostNameVerifier())
.isEqualTo(RabbitMQConfiguration.SSLConfiguration.HostNameVerifier.DEFAULT);
assertThat(rabbitMQConfiguration.getSslConfiguration().getStrategy())
.isEqualTo(RabbitMQConfiguration.SSLConfiguration.SSLValidationStrategy.DEFAULT);
assertThat(rabbitMQConfiguration.getSslConfiguration().getTrustStore())
.isEmpty();
assertThat(rabbitMQConfiguration.getSslConfiguration().getKeyStore())
.isEmpty();
}
@Test
void sslConfigurationShouldHaveCustomValuesIfUseInConfiguration() throws URISyntaxException {
RabbitMQConfiguration rabbitMQConfiguration = RabbitMQConfiguration.builder()
.amqpUri(new URI("amqp://james:james@rabbitmqhost:5672"))
.managementUri(new URI("http://james:james@rabbitmqhost:15672/api/"))
.managementCredentials(DEFAULT_MANAGEMENT_CREDENTIAL)
.sslConfiguration(
RabbitMQConfiguration.SSLConfiguration.builder()
.strategyOverride(RabbitMQConfiguration.SSLConfiguration.SSLTrustStore.of("src/test/resources/test-truststore-password-password", "password"))
.acceptAnyHostNameVerifier()
.sslKeyStore(Optional.of(RabbitMQConfiguration.SSLConfiguration.SSLKeyStore.of("src/test/resources/test-keystore-password-password", "password")))
.build()
)
.build();
assertThat(rabbitMQConfiguration.getSslConfiguration().getHostNameVerifier())
.isEqualTo(RabbitMQConfiguration.SSLConfiguration.HostNameVerifier.ACCEPT_ANY_HOSTNAME);
assertThat(rabbitMQConfiguration.getSslConfiguration().getStrategy())
.isEqualTo(RabbitMQConfiguration.SSLConfiguration.SSLValidationStrategy.OVERRIDE);
assertThat(rabbitMQConfiguration.getSslConfiguration().getTrustStore())
.isNotEmpty();
assertThat(rabbitMQConfiguration.getSslConfiguration().getKeyStore())
.isNotEmpty();
}
@Test
void queueTTLShouldEqualsDefaultValueWhenNotGiven() throws URISyntaxException {
RabbitMQConfiguration rabbitMQConfiguration = RabbitMQConfiguration.builder()
.amqpUri(new URI("amqp://james:james@rabbitmqhost:5672"))
.managementUri(new URI("http://james:james@rabbitmqhost:15672/api/"))
.managementCredentials(DEFAULT_MANAGEMENT_CREDENTIAL)
.build();
assertThat(rabbitMQConfiguration.getQueueTTL())
.isEqualTo(Optional.empty());
}
@Test
void fromShouldReturnCustomQueueTTLValueWhenGiven() {
PropertiesConfiguration configuration = new PropertiesConfiguration();
String amqpUri = "amqp://james:james@rabbitmqhost:5672";
configuration.addProperty("uri", amqpUri);
String managementUri = "http://james:james@rabbitmqhost:15672/api/";
configuration.addProperty("management.uri", managementUri);
configuration.addProperty("management.user", DEFAULT_USER);
configuration.addProperty("management.password", DEFAULT_PASSWORD_STRING);
configuration.addProperty("notification.queue.ttl", 99999);
assertThat(RabbitMQConfiguration.from(configuration).getQueueTTL())
.isEqualTo(Optional.of(99999L));
}
@ParameterizedTest
@ValueSource(longs = {0, -1})
void fromShouldThrowWhenQueueTTLIsNotPositive(long ttl) {
PropertiesConfiguration configuration = new PropertiesConfiguration();
String amqpUri = "amqp://james:james@rabbitmqhost:5672";
configuration.addProperty("uri", amqpUri);
String managementUri = "http://james:james@rabbitmqhost:15672/api/";
configuration.addProperty("management.uri", managementUri);
configuration.addProperty("management.user", DEFAULT_USER);
configuration.addProperty("management.password", DEFAULT_PASSWORD_STRING);
configuration.addProperty("notification.queue.ttl", ttl);
assertThatThrownBy(() -> RabbitMQConfiguration.from(configuration))
.isInstanceOf(IllegalArgumentException.class)
.hasMessage("'notification.queue.ttl' must be strictly positive");
}
@Test
void fromShouldThrowWhenQueueTTLIsNotANumber() {
PropertiesConfiguration configuration = new PropertiesConfiguration();
String amqpUri = "amqp://james:james@rabbitmqhost:5672";
configuration.addProperty("uri", amqpUri);
String managementUri = "http://james:james@rabbitmqhost:15672/api/";
configuration.addProperty("management.uri", managementUri);
configuration.addProperty("management.user", DEFAULT_USER);
configuration.addProperty("management.password", DEFAULT_PASSWORD_STRING);
configuration.addProperty("notification.queue.ttl", "notnumber");
assertThatThrownBy(() -> RabbitMQConfiguration.from(configuration))
.isInstanceOf(ConversionException.class);
}
@Test
void emptyTaskQueueConsumerTimeoutShouldDefaultToOneDay() {
PropertiesConfiguration configuration = new PropertiesConfiguration();
configuration.addProperty("uri", "amqp://james:james@rabbitmqhost:5672");
configuration.addProperty("management.uri", "http://james:james@rabbitmqhost:15672/api/");
configuration.addProperty("management.user", DEFAULT_USER);
configuration.addProperty("management.password", DEFAULT_PASSWORD_STRING);
assertThat(RabbitMQConfiguration.from(configuration).getTaskQueueConsumerTimeout())
.isEqualTo(Duration.ofDays(1));
}
@Test
void parseValidTaskQueueConsumerTimeoutShouldSucceed() {
PropertiesConfiguration configuration = new PropertiesConfiguration();
configuration.addProperty("uri", "amqp://james:james@rabbitmqhost:5672");
configuration.addProperty("management.uri", "http://james:james@rabbitmqhost:15672/api/");
configuration.addProperty("management.user", DEFAULT_USER);
configuration.addProperty("management.password", DEFAULT_PASSWORD_STRING);
configuration.addProperty("task.queue.consumer.timeout", "2day");
assertThat(RabbitMQConfiguration.from(configuration).getTaskQueueConsumerTimeout())
.isEqualTo(Duration.ofDays(2));
}
@Test
void parseTaskQueueConsumerTimeoutWithoutTimeUnitShouldDefaultToSecond() {
PropertiesConfiguration configuration = new PropertiesConfiguration();
configuration.addProperty("uri", "amqp://james:james@rabbitmqhost:5672");
configuration.addProperty("management.uri", "http://james:james@rabbitmqhost:15672/api/");
configuration.addProperty("management.user", DEFAULT_USER);
configuration.addProperty("management.password", DEFAULT_PASSWORD_STRING);
configuration.addProperty("task.queue.consumer.timeout", "3600");
assertThat(RabbitMQConfiguration.from(configuration).getTaskQueueConsumerTimeout())
.isEqualTo(Duration.ofSeconds(3600));
}
@Test
void parseInvalidTaskQueueConsumerTimeoutShouldFail() {
PropertiesConfiguration configuration = new PropertiesConfiguration();
configuration.addProperty("uri", "amqp://james:james@rabbitmqhost:5672");
configuration.addProperty("management.uri", "http://james:james@rabbitmqhost:15672/api/");
configuration.addProperty("management.user", DEFAULT_USER);
configuration.addProperty("management.password", DEFAULT_PASSWORD_STRING);
configuration.addProperty("task.queue.consumer.timeout", "invalid");
assertThatThrownBy(() -> RabbitMQConfiguration.from(configuration))
.isInstanceOf(NumberFormatException.class);
}
@Test
void fromShouldReturnEmptyVhostValueByDefault() {
PropertiesConfiguration configuration = new PropertiesConfiguration();
String amqpUri = "amqp://james:james@rabbitmqhost:5672";
configuration.addProperty("uri", amqpUri);
String managementUri = "http://james:james@rabbitmqhost:15672/api/";
configuration.addProperty("management.uri", managementUri);
configuration.addProperty("management.user", DEFAULT_USER);
configuration.addProperty("management.password", DEFAULT_PASSWORD_STRING);
assertThat(RabbitMQConfiguration.from(configuration).getVhost())
.isEqualTo(Optional.empty());
}
@Test
void fromShouldReturnVhostValueWhenGiven() {
PropertiesConfiguration configuration = new PropertiesConfiguration();
String amqpUri = "amqp://james:james@rabbitmqhost:5672";
configuration.addProperty("uri", amqpUri);
String managementUri = "http://james:james@rabbitmqhost:15672/api/";
configuration.addProperty("management.uri", managementUri);
configuration.addProperty("management.user", DEFAULT_USER);
configuration.addProperty("management.password", DEFAULT_PASSWORD_STRING);
configuration.addProperty("vhost", "test");
assertThat(RabbitMQConfiguration.from(configuration).getVhost())
.isEqualTo(Optional.of("test"));
}
@Test
void fromShouldReturnVhostValueWhenDeclaredInURI() {
PropertiesConfiguration configuration = new PropertiesConfiguration();
String amqpUri = "amqp://james:james@rabbitmqhost:5672/test";
configuration.addProperty("uri", amqpUri);
String managementUri = "http://james:james@rabbitmqhost:15672/api/";
configuration.addProperty("management.uri", managementUri);
configuration.addProperty("management.user", DEFAULT_USER);
configuration.addProperty("management.password", DEFAULT_PASSWORD_STRING);
assertThat(RabbitMQConfiguration.from(configuration).getVhost())
.isEqualTo(Optional.of("test"));
}
@Test
void fromShouldReturnVhostValueWhenGivenAndNotUriOne() {
PropertiesConfiguration configuration = new PropertiesConfiguration();
String amqpUri = "amqp://james:james@rabbitmqhost:5672/test";
configuration.addProperty("uri", amqpUri);
String managementUri = "http://james:james@rabbitmqhost:15672/api/";
configuration.addProperty("management.uri", managementUri);
configuration.addProperty("management.user", DEFAULT_USER);
configuration.addProperty("management.password", DEFAULT_PASSWORD_STRING);
configuration.addProperty("vhost", "vhosttest");
assertThat(RabbitMQConfiguration.from(configuration).getVhost())
.isEqualTo(Optional.of("vhosttest"));
}
@Test
void eventBusPropagateDispatchErrorShouldBeTrueByDefault() {
PropertiesConfiguration configuration = new PropertiesConfiguration();
configuration.addProperty("uri", "amqp://james:james@rabbitmqhost:5672");
configuration.addProperty("management.uri", "http://james:james@rabbitmqhost:15672/api/");
configuration.addProperty("management.user", DEFAULT_USER);
configuration.addProperty("management.password", DEFAULT_PASSWORD_STRING);
assertThat(RabbitMQConfiguration.from(configuration).eventBusPropagateDispatchError())
.isTrue();
}
@Test
void eventBusPropagateDispatchErrorShouldBeDisabledWhenConfiguredFalse() {
PropertiesConfiguration configuration = new PropertiesConfiguration();
configuration.addProperty("uri", "amqp://james:james@rabbitmqhost:5672");
configuration.addProperty("management.uri", "http://james:james@rabbitmqhost:15672/api/");
configuration.addProperty("management.user", DEFAULT_USER);
configuration.addProperty("management.password", DEFAULT_PASSWORD_STRING);
configuration.addProperty("event.bus.propagate.dispatch.error", "false");
assertThat(RabbitMQConfiguration.from(configuration).eventBusPropagateDispatchError())
.isFalse();
}
@Nested
class ManagementCredentialsTest {
@Test
void managementCredentialShouldRespectBeanContract() {
EqualsVerifier.forClass(RabbitMQConfiguration.ManagementCredentials.class)
.verify();
}
@Test
void fromShouldThrowWhenUserAndPasswordAreNotGiven() {
PropertiesConfiguration configuration = new PropertiesConfiguration();
assertThatThrownBy(() -> RabbitMQConfiguration.ManagementCredentials.from(configuration))
.isInstanceOf(IllegalStateException.class)
.hasMessage("You need to specify the management.user property as username of rabbitmq management admin account");
}
@Test
void fromShouldThrowWhenUserIsNotGiven() {
PropertiesConfiguration configuration = new PropertiesConfiguration();
String passwordString = "password";
configuration.addProperty("management.password", passwordString);
assertThatThrownBy(() -> RabbitMQConfiguration.ManagementCredentials.from(configuration))
.isInstanceOf(IllegalStateException.class)
.hasMessage("You need to specify the management.user property as username of rabbitmq management admin account");
}
@Test
void fromShouldThrowWhenPasswordIsNotGiven() {
PropertiesConfiguration configuration = new PropertiesConfiguration();
String userString = "guest";
configuration.addProperty("management.user", userString);
assertThatThrownBy(() -> RabbitMQConfiguration.ManagementCredentials.from(configuration))
.isInstanceOf(IllegalStateException.class)
.hasMessage("You need to specify the management.password property as password of rabbitmq management admin account");
}
@Test
void fromShouldReturnCorrespondingCredentialWhenGiven() {
PropertiesConfiguration configuration = new PropertiesConfiguration();
String userString = "guest";
configuration.addProperty("management.user", userString);
String passwordString = "password";
configuration.addProperty("management.password", passwordString);
RabbitMQConfiguration.ManagementCredentials credentialWithUserAndPassword = new RabbitMQConfiguration.ManagementCredentials(
userString, passwordString.toCharArray());
assertThat(RabbitMQConfiguration.ManagementCredentials.from(configuration))
.isEqualTo(credentialWithUserAndPassword);
}
}
@Test
void hostsShouldDefaultToAmqpUriIfNotSpecified() {
PropertiesConfiguration configuration = new PropertiesConfiguration();
configuration.setListDelimiterHandler(new DefaultListDelimiterHandler(','));
String amqpUri = "amqp://james:james@rabbitmqhost1:5672";
configuration.addProperty("uri", amqpUri);
configuration.addProperty("management.uri", "http://james:james@rabbitmqhost:15672/api/");
configuration.addProperty("management.user", DEFAULT_USER);
configuration.addProperty("management.password", DEFAULT_PASSWORD_STRING);
assertThat(RabbitMQConfiguration.from(configuration).rabbitMQHosts())
.containsOnly(Host.from("rabbitmqhost1", 5672));
}
@Test
void hostsShouldParseIfSpecified() {
PropertiesConfiguration configuration = new PropertiesConfiguration();
configuration.setListDelimiterHandler(new DefaultListDelimiterHandler(','));
String amqpUri = "amqp://james:james@rabbitmqhost1:5672";
configuration.addProperty("uri", amqpUri);
configuration.addProperty("management.uri", "http://james:james@rabbitmqhost:15672/api/");
configuration.addProperty("management.user", DEFAULT_USER);
configuration.addProperty("management.password", DEFAULT_PASSWORD_STRING);
configuration.addProperty("hosts", "rabbitmqhost1:5672,rabbitmqhost2:5672");
assertThat(RabbitMQConfiguration.from(configuration).rabbitMQHosts())
.containsExactlyInAnyOrder(Host.from("rabbitmqhost1", 5672),
Host.from("rabbitmqhost2", 5672));
}
@Test
void shouldReturnQuorumQueueReplicationFactorWhenConfigured() {
PropertiesConfiguration configuration = new PropertiesConfiguration();
String amqpUri = "amqp://james:james@rabbitmqhost:5672";
configuration.addProperty("uri", amqpUri);
String managementUri = "http://james:james@rabbitmqhost:15672/api/";
configuration.addProperty("management.uri", managementUri);
configuration.addProperty("management.user", DEFAULT_USER);
configuration.addProperty("management.password", DEFAULT_PASSWORD_STRING);
configuration.addProperty("quorum.queues.replication.factor", 3);
assertThat(RabbitMQConfiguration.from(configuration).getQuorumQueueReplicationFactor())
.isEqualTo(3);
}
@Nested
class SSLConfigurationTest {
@Nested
class SSLValidationStrategyTest {
@Test
void fromShouldThrowExceptionWhenUnknownStrategyNameSupplied() {
assertThatThrownBy(() -> RabbitMQConfiguration.SSLConfiguration.SSLValidationStrategy.from("random"))
.isInstanceOf(IllegalArgumentException.class);
}
@Test
void fromShouldReturnWhenCorrectNamesAreUsed() {
assertThat(RabbitMQConfiguration.SSLConfiguration.SSLValidationStrategy.from("default"))
.isEqualTo(RabbitMQConfiguration.SSLConfiguration.SSLValidationStrategy.DEFAULT);
assertThat(RabbitMQConfiguration.SSLConfiguration.SSLValidationStrategy.from("override"))
.isEqualTo(RabbitMQConfiguration.SSLConfiguration.SSLValidationStrategy.OVERRIDE);
assertThat(RabbitMQConfiguration.SSLConfiguration.SSLValidationStrategy.from("ignore"))
.isEqualTo(RabbitMQConfiguration.SSLConfiguration.SSLValidationStrategy.IGNORE);
}
}
@Nested
class HostNameVerifierTest {
@Test
void fromShouldThrowExceptionWhenUnknownStrategyNameSupplied() {
assertThatThrownBy(() -> RabbitMQConfiguration.SSLConfiguration.HostNameVerifier.from("random"))
.isInstanceOf(IllegalArgumentException.class);
}
@Test
void fromShouldReturnWhenCorrectNamesAreUsed() {
assertThat(RabbitMQConfiguration.SSLConfiguration.HostNameVerifier.from("default"))
.isEqualTo(RabbitMQConfiguration.SSLConfiguration.HostNameVerifier.DEFAULT);
assertThat(RabbitMQConfiguration.SSLConfiguration.HostNameVerifier.from("accept_any_hostname"))
.isEqualTo(RabbitMQConfiguration.SSLConfiguration.HostNameVerifier.ACCEPT_ANY_HOSTNAME);
}
}
@Nested
class SSLTrustStoreTest {
@Test
void ofShouldThrowExceptionWhenFilePathNotSupplied() {
assertThatThrownBy(() -> RabbitMQConfiguration.SSLConfiguration.SSLTrustStore.of(null, "password"))
.isInstanceOf(NullPointerException.class);
}
@Test
void ofShouldThrowExceptionWhenPasswordNotSupplied() {
assertThatThrownBy(() -> RabbitMQConfiguration.SSLConfiguration.SSLTrustStore.of("/path/to/file", null))
.isInstanceOf(NullPointerException.class);
}
@Test
void ofShouldThrowExceptionWhenFileDoesNotExist() {
assertThatThrownBy(() -> RabbitMQConfiguration.SSLConfiguration.SSLTrustStore.of("/does/not/exist", "password"))
.isInstanceOf(IllegalArgumentException.class);
}
@Test
void ofShouldReturnWhenCorrectAttributesUsed() throws IOException {
File tempFile = File.createTempFile("for-james-test", "");
tempFile.deleteOnExit();
assertThat(RabbitMQConfiguration.SSLConfiguration.SSLTrustStore.of(tempFile.getAbsolutePath(), "password"))
.matches(sslTrustStore -> sslTrustStore.getFile().equals(tempFile))
.matches(sslTrustStore -> Arrays.equals(sslTrustStore.getPassword(), "password".toCharArray()));
}
}
}
}
|
googleapis/google-cloud-java | 36,755 | java-maps-routeoptimization/proto-google-maps-routeoptimization-v1/src/main/java/com/google/maps/routeoptimization/v1/Waypoint.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/maps/routeoptimization/v1/route_optimization_service.proto
// Protobuf Java Version: 3.25.8
package com.google.maps.routeoptimization.v1;
/**
*
*
* <pre>
* Encapsulates a waypoint. Waypoints mark arrival and departure locations of
* VisitRequests, and start and end locations of Vehicles.
* </pre>
*
* Protobuf type {@code google.maps.routeoptimization.v1.Waypoint}
*/
public final class Waypoint extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.maps.routeoptimization.v1.Waypoint)
WaypointOrBuilder {
private static final long serialVersionUID = 0L;
// Use Waypoint.newBuilder() to construct.
private Waypoint(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private Waypoint() {}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new Waypoint();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.maps.routeoptimization.v1.RouteOptimizationServiceProto
.internal_static_google_maps_routeoptimization_v1_Waypoint_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.maps.routeoptimization.v1.RouteOptimizationServiceProto
.internal_static_google_maps_routeoptimization_v1_Waypoint_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.maps.routeoptimization.v1.Waypoint.class,
com.google.maps.routeoptimization.v1.Waypoint.Builder.class);
}
private int locationTypeCase_ = 0;
@SuppressWarnings("serial")
private java.lang.Object locationType_;
public enum LocationTypeCase
implements
com.google.protobuf.Internal.EnumLite,
com.google.protobuf.AbstractMessage.InternalOneOfEnum {
LOCATION(1),
PLACE_ID(2),
LOCATIONTYPE_NOT_SET(0);
private final int value;
private LocationTypeCase(int value) {
this.value = value;
}
/**
* @param value The number of the enum to look for.
* @return The enum associated with the given number.
* @deprecated Use {@link #forNumber(int)} instead.
*/
@java.lang.Deprecated
public static LocationTypeCase valueOf(int value) {
return forNumber(value);
}
public static LocationTypeCase forNumber(int value) {
switch (value) {
case 1:
return LOCATION;
case 2:
return PLACE_ID;
case 0:
return LOCATIONTYPE_NOT_SET;
default:
return null;
}
}
public int getNumber() {
return this.value;
}
};
public LocationTypeCase getLocationTypeCase() {
return LocationTypeCase.forNumber(locationTypeCase_);
}
public static final int LOCATION_FIELD_NUMBER = 1;
/**
*
*
* <pre>
* A point specified using geographic coordinates, including an optional
* heading.
* </pre>
*
* <code>.google.maps.routeoptimization.v1.Location location = 1;</code>
*
* @return Whether the location field is set.
*/
@java.lang.Override
public boolean hasLocation() {
return locationTypeCase_ == 1;
}
/**
*
*
* <pre>
* A point specified using geographic coordinates, including an optional
* heading.
* </pre>
*
* <code>.google.maps.routeoptimization.v1.Location location = 1;</code>
*
* @return The location.
*/
@java.lang.Override
public com.google.maps.routeoptimization.v1.Location getLocation() {
if (locationTypeCase_ == 1) {
return (com.google.maps.routeoptimization.v1.Location) locationType_;
}
return com.google.maps.routeoptimization.v1.Location.getDefaultInstance();
}
/**
*
*
* <pre>
* A point specified using geographic coordinates, including an optional
* heading.
* </pre>
*
* <code>.google.maps.routeoptimization.v1.Location location = 1;</code>
*/
@java.lang.Override
public com.google.maps.routeoptimization.v1.LocationOrBuilder getLocationOrBuilder() {
if (locationTypeCase_ == 1) {
return (com.google.maps.routeoptimization.v1.Location) locationType_;
}
return com.google.maps.routeoptimization.v1.Location.getDefaultInstance();
}
public static final int PLACE_ID_FIELD_NUMBER = 2;
/**
*
*
* <pre>
* The POI Place ID associated with the waypoint.
* </pre>
*
* <code>string place_id = 2;</code>
*
* @return Whether the placeId field is set.
*/
public boolean hasPlaceId() {
return locationTypeCase_ == 2;
}
/**
*
*
* <pre>
* The POI Place ID associated with the waypoint.
* </pre>
*
* <code>string place_id = 2;</code>
*
* @return The placeId.
*/
public java.lang.String getPlaceId() {
java.lang.Object ref = "";
if (locationTypeCase_ == 2) {
ref = locationType_;
}
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
if (locationTypeCase_ == 2) {
locationType_ = s;
}
return s;
}
}
/**
*
*
* <pre>
* The POI Place ID associated with the waypoint.
* </pre>
*
* <code>string place_id = 2;</code>
*
* @return The bytes for placeId.
*/
public com.google.protobuf.ByteString getPlaceIdBytes() {
java.lang.Object ref = "";
if (locationTypeCase_ == 2) {
ref = locationType_;
}
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
if (locationTypeCase_ == 2) {
locationType_ = b;
}
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int SIDE_OF_ROAD_FIELD_NUMBER = 3;
private boolean sideOfRoad_ = false;
/**
*
*
* <pre>
* Optional. Indicates that the location of this waypoint is meant to have a
* preference for the vehicle to stop at a particular side of road. When you
* set this value, the route will pass through the location so that the
* vehicle can stop at the side of road that the location is biased towards
* from the center of the road. This option doesn't work for the 'WALKING'
* travel mode.
* </pre>
*
* <code>bool side_of_road = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The sideOfRoad.
*/
@java.lang.Override
public boolean getSideOfRoad() {
return sideOfRoad_;
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (locationTypeCase_ == 1) {
output.writeMessage(1, (com.google.maps.routeoptimization.v1.Location) locationType_);
}
if (locationTypeCase_ == 2) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 2, locationType_);
}
if (sideOfRoad_ != false) {
output.writeBool(3, sideOfRoad_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (locationTypeCase_ == 1) {
size +=
com.google.protobuf.CodedOutputStream.computeMessageSize(
1, (com.google.maps.routeoptimization.v1.Location) locationType_);
}
if (locationTypeCase_ == 2) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, locationType_);
}
if (sideOfRoad_ != false) {
size += com.google.protobuf.CodedOutputStream.computeBoolSize(3, sideOfRoad_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.maps.routeoptimization.v1.Waypoint)) {
return super.equals(obj);
}
com.google.maps.routeoptimization.v1.Waypoint other =
(com.google.maps.routeoptimization.v1.Waypoint) obj;
if (getSideOfRoad() != other.getSideOfRoad()) return false;
if (!getLocationTypeCase().equals(other.getLocationTypeCase())) return false;
switch (locationTypeCase_) {
case 1:
if (!getLocation().equals(other.getLocation())) return false;
break;
case 2:
if (!getPlaceId().equals(other.getPlaceId())) return false;
break;
case 0:
default:
}
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + SIDE_OF_ROAD_FIELD_NUMBER;
hash = (53 * hash) + com.google.protobuf.Internal.hashBoolean(getSideOfRoad());
switch (locationTypeCase_) {
case 1:
hash = (37 * hash) + LOCATION_FIELD_NUMBER;
hash = (53 * hash) + getLocation().hashCode();
break;
case 2:
hash = (37 * hash) + PLACE_ID_FIELD_NUMBER;
hash = (53 * hash) + getPlaceId().hashCode();
break;
case 0:
default:
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.maps.routeoptimization.v1.Waypoint parseFrom(java.nio.ByteBuffer data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.maps.routeoptimization.v1.Waypoint parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.maps.routeoptimization.v1.Waypoint parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.maps.routeoptimization.v1.Waypoint parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.maps.routeoptimization.v1.Waypoint parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.maps.routeoptimization.v1.Waypoint parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.maps.routeoptimization.v1.Waypoint parseFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.maps.routeoptimization.v1.Waypoint parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.maps.routeoptimization.v1.Waypoint parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.maps.routeoptimization.v1.Waypoint parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.maps.routeoptimization.v1.Waypoint parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.maps.routeoptimization.v1.Waypoint parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.maps.routeoptimization.v1.Waypoint prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Encapsulates a waypoint. Waypoints mark arrival and departure locations of
* VisitRequests, and start and end locations of Vehicles.
* </pre>
*
* Protobuf type {@code google.maps.routeoptimization.v1.Waypoint}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.maps.routeoptimization.v1.Waypoint)
com.google.maps.routeoptimization.v1.WaypointOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.maps.routeoptimization.v1.RouteOptimizationServiceProto
.internal_static_google_maps_routeoptimization_v1_Waypoint_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.maps.routeoptimization.v1.RouteOptimizationServiceProto
.internal_static_google_maps_routeoptimization_v1_Waypoint_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.maps.routeoptimization.v1.Waypoint.class,
com.google.maps.routeoptimization.v1.Waypoint.Builder.class);
}
// Construct using com.google.maps.routeoptimization.v1.Waypoint.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
if (locationBuilder_ != null) {
locationBuilder_.clear();
}
sideOfRoad_ = false;
locationTypeCase_ = 0;
locationType_ = null;
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.maps.routeoptimization.v1.RouteOptimizationServiceProto
.internal_static_google_maps_routeoptimization_v1_Waypoint_descriptor;
}
@java.lang.Override
public com.google.maps.routeoptimization.v1.Waypoint getDefaultInstanceForType() {
return com.google.maps.routeoptimization.v1.Waypoint.getDefaultInstance();
}
@java.lang.Override
public com.google.maps.routeoptimization.v1.Waypoint build() {
com.google.maps.routeoptimization.v1.Waypoint result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.maps.routeoptimization.v1.Waypoint buildPartial() {
com.google.maps.routeoptimization.v1.Waypoint result =
new com.google.maps.routeoptimization.v1.Waypoint(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
buildPartialOneofs(result);
onBuilt();
return result;
}
private void buildPartial0(com.google.maps.routeoptimization.v1.Waypoint result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000004) != 0)) {
result.sideOfRoad_ = sideOfRoad_;
}
}
private void buildPartialOneofs(com.google.maps.routeoptimization.v1.Waypoint result) {
result.locationTypeCase_ = locationTypeCase_;
result.locationType_ = this.locationType_;
if (locationTypeCase_ == 1 && locationBuilder_ != null) {
result.locationType_ = locationBuilder_.build();
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.maps.routeoptimization.v1.Waypoint) {
return mergeFrom((com.google.maps.routeoptimization.v1.Waypoint) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.maps.routeoptimization.v1.Waypoint other) {
if (other == com.google.maps.routeoptimization.v1.Waypoint.getDefaultInstance()) return this;
if (other.getSideOfRoad() != false) {
setSideOfRoad(other.getSideOfRoad());
}
switch (other.getLocationTypeCase()) {
case LOCATION:
{
mergeLocation(other.getLocation());
break;
}
case PLACE_ID:
{
locationTypeCase_ = 2;
locationType_ = other.locationType_;
onChanged();
break;
}
case LOCATIONTYPE_NOT_SET:
{
break;
}
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
input.readMessage(getLocationFieldBuilder().getBuilder(), extensionRegistry);
locationTypeCase_ = 1;
break;
} // case 10
case 18:
{
java.lang.String s = input.readStringRequireUtf8();
locationTypeCase_ = 2;
locationType_ = s;
break;
} // case 18
case 24:
{
sideOfRoad_ = input.readBool();
bitField0_ |= 0x00000004;
break;
} // case 24
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int locationTypeCase_ = 0;
private java.lang.Object locationType_;
public LocationTypeCase getLocationTypeCase() {
return LocationTypeCase.forNumber(locationTypeCase_);
}
public Builder clearLocationType() {
locationTypeCase_ = 0;
locationType_ = null;
onChanged();
return this;
}
private int bitField0_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.maps.routeoptimization.v1.Location,
com.google.maps.routeoptimization.v1.Location.Builder,
com.google.maps.routeoptimization.v1.LocationOrBuilder>
locationBuilder_;
/**
*
*
* <pre>
* A point specified using geographic coordinates, including an optional
* heading.
* </pre>
*
* <code>.google.maps.routeoptimization.v1.Location location = 1;</code>
*
* @return Whether the location field is set.
*/
@java.lang.Override
public boolean hasLocation() {
return locationTypeCase_ == 1;
}
/**
*
*
* <pre>
* A point specified using geographic coordinates, including an optional
* heading.
* </pre>
*
* <code>.google.maps.routeoptimization.v1.Location location = 1;</code>
*
* @return The location.
*/
@java.lang.Override
public com.google.maps.routeoptimization.v1.Location getLocation() {
if (locationBuilder_ == null) {
if (locationTypeCase_ == 1) {
return (com.google.maps.routeoptimization.v1.Location) locationType_;
}
return com.google.maps.routeoptimization.v1.Location.getDefaultInstance();
} else {
if (locationTypeCase_ == 1) {
return locationBuilder_.getMessage();
}
return com.google.maps.routeoptimization.v1.Location.getDefaultInstance();
}
}
/**
*
*
* <pre>
* A point specified using geographic coordinates, including an optional
* heading.
* </pre>
*
* <code>.google.maps.routeoptimization.v1.Location location = 1;</code>
*/
public Builder setLocation(com.google.maps.routeoptimization.v1.Location value) {
if (locationBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
locationType_ = value;
onChanged();
} else {
locationBuilder_.setMessage(value);
}
locationTypeCase_ = 1;
return this;
}
/**
*
*
* <pre>
* A point specified using geographic coordinates, including an optional
* heading.
* </pre>
*
* <code>.google.maps.routeoptimization.v1.Location location = 1;</code>
*/
public Builder setLocation(
com.google.maps.routeoptimization.v1.Location.Builder builderForValue) {
if (locationBuilder_ == null) {
locationType_ = builderForValue.build();
onChanged();
} else {
locationBuilder_.setMessage(builderForValue.build());
}
locationTypeCase_ = 1;
return this;
}
/**
*
*
* <pre>
* A point specified using geographic coordinates, including an optional
* heading.
* </pre>
*
* <code>.google.maps.routeoptimization.v1.Location location = 1;</code>
*/
public Builder mergeLocation(com.google.maps.routeoptimization.v1.Location value) {
if (locationBuilder_ == null) {
if (locationTypeCase_ == 1
&& locationType_
!= com.google.maps.routeoptimization.v1.Location.getDefaultInstance()) {
locationType_ =
com.google.maps.routeoptimization.v1.Location.newBuilder(
(com.google.maps.routeoptimization.v1.Location) locationType_)
.mergeFrom(value)
.buildPartial();
} else {
locationType_ = value;
}
onChanged();
} else {
if (locationTypeCase_ == 1) {
locationBuilder_.mergeFrom(value);
} else {
locationBuilder_.setMessage(value);
}
}
locationTypeCase_ = 1;
return this;
}
/**
*
*
* <pre>
* A point specified using geographic coordinates, including an optional
* heading.
* </pre>
*
* <code>.google.maps.routeoptimization.v1.Location location = 1;</code>
*/
public Builder clearLocation() {
if (locationBuilder_ == null) {
if (locationTypeCase_ == 1) {
locationTypeCase_ = 0;
locationType_ = null;
onChanged();
}
} else {
if (locationTypeCase_ == 1) {
locationTypeCase_ = 0;
locationType_ = null;
}
locationBuilder_.clear();
}
return this;
}
/**
*
*
* <pre>
* A point specified using geographic coordinates, including an optional
* heading.
* </pre>
*
* <code>.google.maps.routeoptimization.v1.Location location = 1;</code>
*/
public com.google.maps.routeoptimization.v1.Location.Builder getLocationBuilder() {
return getLocationFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* A point specified using geographic coordinates, including an optional
* heading.
* </pre>
*
* <code>.google.maps.routeoptimization.v1.Location location = 1;</code>
*/
@java.lang.Override
public com.google.maps.routeoptimization.v1.LocationOrBuilder getLocationOrBuilder() {
if ((locationTypeCase_ == 1) && (locationBuilder_ != null)) {
return locationBuilder_.getMessageOrBuilder();
} else {
if (locationTypeCase_ == 1) {
return (com.google.maps.routeoptimization.v1.Location) locationType_;
}
return com.google.maps.routeoptimization.v1.Location.getDefaultInstance();
}
}
/**
*
*
* <pre>
* A point specified using geographic coordinates, including an optional
* heading.
* </pre>
*
* <code>.google.maps.routeoptimization.v1.Location location = 1;</code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.maps.routeoptimization.v1.Location,
com.google.maps.routeoptimization.v1.Location.Builder,
com.google.maps.routeoptimization.v1.LocationOrBuilder>
getLocationFieldBuilder() {
if (locationBuilder_ == null) {
if (!(locationTypeCase_ == 1)) {
locationType_ = com.google.maps.routeoptimization.v1.Location.getDefaultInstance();
}
locationBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.maps.routeoptimization.v1.Location,
com.google.maps.routeoptimization.v1.Location.Builder,
com.google.maps.routeoptimization.v1.LocationOrBuilder>(
(com.google.maps.routeoptimization.v1.Location) locationType_,
getParentForChildren(),
isClean());
locationType_ = null;
}
locationTypeCase_ = 1;
onChanged();
return locationBuilder_;
}
/**
*
*
* <pre>
* The POI Place ID associated with the waypoint.
* </pre>
*
* <code>string place_id = 2;</code>
*
* @return Whether the placeId field is set.
*/
@java.lang.Override
public boolean hasPlaceId() {
return locationTypeCase_ == 2;
}
/**
*
*
* <pre>
* The POI Place ID associated with the waypoint.
* </pre>
*
* <code>string place_id = 2;</code>
*
* @return The placeId.
*/
@java.lang.Override
public java.lang.String getPlaceId() {
java.lang.Object ref = "";
if (locationTypeCase_ == 2) {
ref = locationType_;
}
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
if (locationTypeCase_ == 2) {
locationType_ = s;
}
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* The POI Place ID associated with the waypoint.
* </pre>
*
* <code>string place_id = 2;</code>
*
* @return The bytes for placeId.
*/
@java.lang.Override
public com.google.protobuf.ByteString getPlaceIdBytes() {
java.lang.Object ref = "";
if (locationTypeCase_ == 2) {
ref = locationType_;
}
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
if (locationTypeCase_ == 2) {
locationType_ = b;
}
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* The POI Place ID associated with the waypoint.
* </pre>
*
* <code>string place_id = 2;</code>
*
* @param value The placeId to set.
* @return This builder for chaining.
*/
public Builder setPlaceId(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
locationTypeCase_ = 2;
locationType_ = value;
onChanged();
return this;
}
/**
*
*
* <pre>
* The POI Place ID associated with the waypoint.
* </pre>
*
* <code>string place_id = 2;</code>
*
* @return This builder for chaining.
*/
public Builder clearPlaceId() {
if (locationTypeCase_ == 2) {
locationTypeCase_ = 0;
locationType_ = null;
onChanged();
}
return this;
}
/**
*
*
* <pre>
* The POI Place ID associated with the waypoint.
* </pre>
*
* <code>string place_id = 2;</code>
*
* @param value The bytes for placeId to set.
* @return This builder for chaining.
*/
public Builder setPlaceIdBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
locationTypeCase_ = 2;
locationType_ = value;
onChanged();
return this;
}
private boolean sideOfRoad_;
/**
*
*
* <pre>
* Optional. Indicates that the location of this waypoint is meant to have a
* preference for the vehicle to stop at a particular side of road. When you
* set this value, the route will pass through the location so that the
* vehicle can stop at the side of road that the location is biased towards
* from the center of the road. This option doesn't work for the 'WALKING'
* travel mode.
* </pre>
*
* <code>bool side_of_road = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The sideOfRoad.
*/
@java.lang.Override
public boolean getSideOfRoad() {
return sideOfRoad_;
}
/**
*
*
* <pre>
* Optional. Indicates that the location of this waypoint is meant to have a
* preference for the vehicle to stop at a particular side of road. When you
* set this value, the route will pass through the location so that the
* vehicle can stop at the side of road that the location is biased towards
* from the center of the road. This option doesn't work for the 'WALKING'
* travel mode.
* </pre>
*
* <code>bool side_of_road = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @param value The sideOfRoad to set.
* @return This builder for chaining.
*/
public Builder setSideOfRoad(boolean value) {
sideOfRoad_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. Indicates that the location of this waypoint is meant to have a
* preference for the vehicle to stop at a particular side of road. When you
* set this value, the route will pass through the location so that the
* vehicle can stop at the side of road that the location is biased towards
* from the center of the road. This option doesn't work for the 'WALKING'
* travel mode.
* </pre>
*
* <code>bool side_of_road = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return This builder for chaining.
*/
public Builder clearSideOfRoad() {
bitField0_ = (bitField0_ & ~0x00000004);
sideOfRoad_ = false;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.maps.routeoptimization.v1.Waypoint)
}
// @@protoc_insertion_point(class_scope:google.maps.routeoptimization.v1.Waypoint)
private static final com.google.maps.routeoptimization.v1.Waypoint DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.maps.routeoptimization.v1.Waypoint();
}
public static com.google.maps.routeoptimization.v1.Waypoint getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<Waypoint> PARSER =
new com.google.protobuf.AbstractParser<Waypoint>() {
@java.lang.Override
public Waypoint parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<Waypoint> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<Waypoint> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.maps.routeoptimization.v1.Waypoint getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleapis/google-cloud-java | 36,796 | java-biglake/proto-google-cloud-biglake-v1alpha1/src/main/java/com/google/cloud/bigquery/biglake/v1alpha1/ListTablesResponse.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/bigquery/biglake/v1alpha1/metastore.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.bigquery.biglake.v1alpha1;
/**
*
*
* <pre>
* Response message for the ListTables method.
* </pre>
*
* Protobuf type {@code google.cloud.bigquery.biglake.v1alpha1.ListTablesResponse}
*/
public final class ListTablesResponse extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.bigquery.biglake.v1alpha1.ListTablesResponse)
ListTablesResponseOrBuilder {
private static final long serialVersionUID = 0L;
// Use ListTablesResponse.newBuilder() to construct.
private ListTablesResponse(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private ListTablesResponse() {
tables_ = java.util.Collections.emptyList();
nextPageToken_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new ListTablesResponse();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.bigquery.biglake.v1alpha1.MetastoreProto
.internal_static_google_cloud_bigquery_biglake_v1alpha1_ListTablesResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.bigquery.biglake.v1alpha1.MetastoreProto
.internal_static_google_cloud_bigquery_biglake_v1alpha1_ListTablesResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.bigquery.biglake.v1alpha1.ListTablesResponse.class,
com.google.cloud.bigquery.biglake.v1alpha1.ListTablesResponse.Builder.class);
}
public static final int TABLES_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private java.util.List<com.google.cloud.bigquery.biglake.v1alpha1.Table> tables_;
/**
*
*
* <pre>
* The tables from the specified database.
* </pre>
*
* <code>repeated .google.cloud.bigquery.biglake.v1alpha1.Table tables = 1;</code>
*/
@java.lang.Override
public java.util.List<com.google.cloud.bigquery.biglake.v1alpha1.Table> getTablesList() {
return tables_;
}
/**
*
*
* <pre>
* The tables from the specified database.
* </pre>
*
* <code>repeated .google.cloud.bigquery.biglake.v1alpha1.Table tables = 1;</code>
*/
@java.lang.Override
public java.util.List<? extends com.google.cloud.bigquery.biglake.v1alpha1.TableOrBuilder>
getTablesOrBuilderList() {
return tables_;
}
/**
*
*
* <pre>
* The tables from the specified database.
* </pre>
*
* <code>repeated .google.cloud.bigquery.biglake.v1alpha1.Table tables = 1;</code>
*/
@java.lang.Override
public int getTablesCount() {
return tables_.size();
}
/**
*
*
* <pre>
* The tables from the specified database.
* </pre>
*
* <code>repeated .google.cloud.bigquery.biglake.v1alpha1.Table tables = 1;</code>
*/
@java.lang.Override
public com.google.cloud.bigquery.biglake.v1alpha1.Table getTables(int index) {
return tables_.get(index);
}
/**
*
*
* <pre>
* The tables from the specified database.
* </pre>
*
* <code>repeated .google.cloud.bigquery.biglake.v1alpha1.Table tables = 1;</code>
*/
@java.lang.Override
public com.google.cloud.bigquery.biglake.v1alpha1.TableOrBuilder getTablesOrBuilder(int index) {
return tables_.get(index);
}
public static final int NEXT_PAGE_TOKEN_FIELD_NUMBER = 2;
@SuppressWarnings("serial")
private volatile java.lang.Object nextPageToken_ = "";
/**
*
*
* <pre>
* A token, which can be sent as `page_token` to retrieve the next page.
* If this field is omitted, there are no subsequent pages.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The nextPageToken.
*/
@java.lang.Override
public java.lang.String getNextPageToken() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
nextPageToken_ = s;
return s;
}
}
/**
*
*
* <pre>
* A token, which can be sent as `page_token` to retrieve the next page.
* If this field is omitted, there are no subsequent pages.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The bytes for nextPageToken.
*/
@java.lang.Override
public com.google.protobuf.ByteString getNextPageTokenBytes() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
nextPageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
for (int i = 0; i < tables_.size(); i++) {
output.writeMessage(1, tables_.get(i));
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 2, nextPageToken_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
for (int i = 0; i < tables_.size(); i++) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, tables_.get(i));
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, nextPageToken_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.bigquery.biglake.v1alpha1.ListTablesResponse)) {
return super.equals(obj);
}
com.google.cloud.bigquery.biglake.v1alpha1.ListTablesResponse other =
(com.google.cloud.bigquery.biglake.v1alpha1.ListTablesResponse) obj;
if (!getTablesList().equals(other.getTablesList())) return false;
if (!getNextPageToken().equals(other.getNextPageToken())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (getTablesCount() > 0) {
hash = (37 * hash) + TABLES_FIELD_NUMBER;
hash = (53 * hash) + getTablesList().hashCode();
}
hash = (37 * hash) + NEXT_PAGE_TOKEN_FIELD_NUMBER;
hash = (53 * hash) + getNextPageToken().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.bigquery.biglake.v1alpha1.ListTablesResponse parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.bigquery.biglake.v1alpha1.ListTablesResponse parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.bigquery.biglake.v1alpha1.ListTablesResponse parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.bigquery.biglake.v1alpha1.ListTablesResponse parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.bigquery.biglake.v1alpha1.ListTablesResponse parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.bigquery.biglake.v1alpha1.ListTablesResponse parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.bigquery.biglake.v1alpha1.ListTablesResponse parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.bigquery.biglake.v1alpha1.ListTablesResponse parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.bigquery.biglake.v1alpha1.ListTablesResponse parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.bigquery.biglake.v1alpha1.ListTablesResponse parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.bigquery.biglake.v1alpha1.ListTablesResponse parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.bigquery.biglake.v1alpha1.ListTablesResponse parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.bigquery.biglake.v1alpha1.ListTablesResponse prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Response message for the ListTables method.
* </pre>
*
* Protobuf type {@code google.cloud.bigquery.biglake.v1alpha1.ListTablesResponse}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.bigquery.biglake.v1alpha1.ListTablesResponse)
com.google.cloud.bigquery.biglake.v1alpha1.ListTablesResponseOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.bigquery.biglake.v1alpha1.MetastoreProto
.internal_static_google_cloud_bigquery_biglake_v1alpha1_ListTablesResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.bigquery.biglake.v1alpha1.MetastoreProto
.internal_static_google_cloud_bigquery_biglake_v1alpha1_ListTablesResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.bigquery.biglake.v1alpha1.ListTablesResponse.class,
com.google.cloud.bigquery.biglake.v1alpha1.ListTablesResponse.Builder.class);
}
// Construct using com.google.cloud.bigquery.biglake.v1alpha1.ListTablesResponse.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
if (tablesBuilder_ == null) {
tables_ = java.util.Collections.emptyList();
} else {
tables_ = null;
tablesBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000001);
nextPageToken_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.bigquery.biglake.v1alpha1.MetastoreProto
.internal_static_google_cloud_bigquery_biglake_v1alpha1_ListTablesResponse_descriptor;
}
@java.lang.Override
public com.google.cloud.bigquery.biglake.v1alpha1.ListTablesResponse
getDefaultInstanceForType() {
return com.google.cloud.bigquery.biglake.v1alpha1.ListTablesResponse.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.bigquery.biglake.v1alpha1.ListTablesResponse build() {
com.google.cloud.bigquery.biglake.v1alpha1.ListTablesResponse result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.bigquery.biglake.v1alpha1.ListTablesResponse buildPartial() {
com.google.cloud.bigquery.biglake.v1alpha1.ListTablesResponse result =
new com.google.cloud.bigquery.biglake.v1alpha1.ListTablesResponse(this);
buildPartialRepeatedFields(result);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartialRepeatedFields(
com.google.cloud.bigquery.biglake.v1alpha1.ListTablesResponse result) {
if (tablesBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0)) {
tables_ = java.util.Collections.unmodifiableList(tables_);
bitField0_ = (bitField0_ & ~0x00000001);
}
result.tables_ = tables_;
} else {
result.tables_ = tablesBuilder_.build();
}
}
private void buildPartial0(
com.google.cloud.bigquery.biglake.v1alpha1.ListTablesResponse result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000002) != 0)) {
result.nextPageToken_ = nextPageToken_;
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.bigquery.biglake.v1alpha1.ListTablesResponse) {
return mergeFrom((com.google.cloud.bigquery.biglake.v1alpha1.ListTablesResponse) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.bigquery.biglake.v1alpha1.ListTablesResponse other) {
if (other
== com.google.cloud.bigquery.biglake.v1alpha1.ListTablesResponse.getDefaultInstance())
return this;
if (tablesBuilder_ == null) {
if (!other.tables_.isEmpty()) {
if (tables_.isEmpty()) {
tables_ = other.tables_;
bitField0_ = (bitField0_ & ~0x00000001);
} else {
ensureTablesIsMutable();
tables_.addAll(other.tables_);
}
onChanged();
}
} else {
if (!other.tables_.isEmpty()) {
if (tablesBuilder_.isEmpty()) {
tablesBuilder_.dispose();
tablesBuilder_ = null;
tables_ = other.tables_;
bitField0_ = (bitField0_ & ~0x00000001);
tablesBuilder_ =
com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders
? getTablesFieldBuilder()
: null;
} else {
tablesBuilder_.addAllMessages(other.tables_);
}
}
}
if (!other.getNextPageToken().isEmpty()) {
nextPageToken_ = other.nextPageToken_;
bitField0_ |= 0x00000002;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
com.google.cloud.bigquery.biglake.v1alpha1.Table m =
input.readMessage(
com.google.cloud.bigquery.biglake.v1alpha1.Table.parser(),
extensionRegistry);
if (tablesBuilder_ == null) {
ensureTablesIsMutable();
tables_.add(m);
} else {
tablesBuilder_.addMessage(m);
}
break;
} // case 10
case 18:
{
nextPageToken_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000002;
break;
} // case 18
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.util.List<com.google.cloud.bigquery.biglake.v1alpha1.Table> tables_ =
java.util.Collections.emptyList();
private void ensureTablesIsMutable() {
if (!((bitField0_ & 0x00000001) != 0)) {
tables_ =
new java.util.ArrayList<com.google.cloud.bigquery.biglake.v1alpha1.Table>(tables_);
bitField0_ |= 0x00000001;
}
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.bigquery.biglake.v1alpha1.Table,
com.google.cloud.bigquery.biglake.v1alpha1.Table.Builder,
com.google.cloud.bigquery.biglake.v1alpha1.TableOrBuilder>
tablesBuilder_;
/**
*
*
* <pre>
* The tables from the specified database.
* </pre>
*
* <code>repeated .google.cloud.bigquery.biglake.v1alpha1.Table tables = 1;</code>
*/
public java.util.List<com.google.cloud.bigquery.biglake.v1alpha1.Table> getTablesList() {
if (tablesBuilder_ == null) {
return java.util.Collections.unmodifiableList(tables_);
} else {
return tablesBuilder_.getMessageList();
}
}
/**
*
*
* <pre>
* The tables from the specified database.
* </pre>
*
* <code>repeated .google.cloud.bigquery.biglake.v1alpha1.Table tables = 1;</code>
*/
public int getTablesCount() {
if (tablesBuilder_ == null) {
return tables_.size();
} else {
return tablesBuilder_.getCount();
}
}
/**
*
*
* <pre>
* The tables from the specified database.
* </pre>
*
* <code>repeated .google.cloud.bigquery.biglake.v1alpha1.Table tables = 1;</code>
*/
public com.google.cloud.bigquery.biglake.v1alpha1.Table getTables(int index) {
if (tablesBuilder_ == null) {
return tables_.get(index);
} else {
return tablesBuilder_.getMessage(index);
}
}
/**
*
*
* <pre>
* The tables from the specified database.
* </pre>
*
* <code>repeated .google.cloud.bigquery.biglake.v1alpha1.Table tables = 1;</code>
*/
public Builder setTables(int index, com.google.cloud.bigquery.biglake.v1alpha1.Table value) {
if (tablesBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureTablesIsMutable();
tables_.set(index, value);
onChanged();
} else {
tablesBuilder_.setMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* The tables from the specified database.
* </pre>
*
* <code>repeated .google.cloud.bigquery.biglake.v1alpha1.Table tables = 1;</code>
*/
public Builder setTables(
int index, com.google.cloud.bigquery.biglake.v1alpha1.Table.Builder builderForValue) {
if (tablesBuilder_ == null) {
ensureTablesIsMutable();
tables_.set(index, builderForValue.build());
onChanged();
} else {
tablesBuilder_.setMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* The tables from the specified database.
* </pre>
*
* <code>repeated .google.cloud.bigquery.biglake.v1alpha1.Table tables = 1;</code>
*/
public Builder addTables(com.google.cloud.bigquery.biglake.v1alpha1.Table value) {
if (tablesBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureTablesIsMutable();
tables_.add(value);
onChanged();
} else {
tablesBuilder_.addMessage(value);
}
return this;
}
/**
*
*
* <pre>
* The tables from the specified database.
* </pre>
*
* <code>repeated .google.cloud.bigquery.biglake.v1alpha1.Table tables = 1;</code>
*/
public Builder addTables(int index, com.google.cloud.bigquery.biglake.v1alpha1.Table value) {
if (tablesBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureTablesIsMutable();
tables_.add(index, value);
onChanged();
} else {
tablesBuilder_.addMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* The tables from the specified database.
* </pre>
*
* <code>repeated .google.cloud.bigquery.biglake.v1alpha1.Table tables = 1;</code>
*/
public Builder addTables(
com.google.cloud.bigquery.biglake.v1alpha1.Table.Builder builderForValue) {
if (tablesBuilder_ == null) {
ensureTablesIsMutable();
tables_.add(builderForValue.build());
onChanged();
} else {
tablesBuilder_.addMessage(builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* The tables from the specified database.
* </pre>
*
* <code>repeated .google.cloud.bigquery.biglake.v1alpha1.Table tables = 1;</code>
*/
public Builder addTables(
int index, com.google.cloud.bigquery.biglake.v1alpha1.Table.Builder builderForValue) {
if (tablesBuilder_ == null) {
ensureTablesIsMutable();
tables_.add(index, builderForValue.build());
onChanged();
} else {
tablesBuilder_.addMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* The tables from the specified database.
* </pre>
*
* <code>repeated .google.cloud.bigquery.biglake.v1alpha1.Table tables = 1;</code>
*/
public Builder addAllTables(
java.lang.Iterable<? extends com.google.cloud.bigquery.biglake.v1alpha1.Table> values) {
if (tablesBuilder_ == null) {
ensureTablesIsMutable();
com.google.protobuf.AbstractMessageLite.Builder.addAll(values, tables_);
onChanged();
} else {
tablesBuilder_.addAllMessages(values);
}
return this;
}
/**
*
*
* <pre>
* The tables from the specified database.
* </pre>
*
* <code>repeated .google.cloud.bigquery.biglake.v1alpha1.Table tables = 1;</code>
*/
public Builder clearTables() {
if (tablesBuilder_ == null) {
tables_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
} else {
tablesBuilder_.clear();
}
return this;
}
/**
*
*
* <pre>
* The tables from the specified database.
* </pre>
*
* <code>repeated .google.cloud.bigquery.biglake.v1alpha1.Table tables = 1;</code>
*/
public Builder removeTables(int index) {
if (tablesBuilder_ == null) {
ensureTablesIsMutable();
tables_.remove(index);
onChanged();
} else {
tablesBuilder_.remove(index);
}
return this;
}
/**
*
*
* <pre>
* The tables from the specified database.
* </pre>
*
* <code>repeated .google.cloud.bigquery.biglake.v1alpha1.Table tables = 1;</code>
*/
public com.google.cloud.bigquery.biglake.v1alpha1.Table.Builder getTablesBuilder(int index) {
return getTablesFieldBuilder().getBuilder(index);
}
/**
*
*
* <pre>
* The tables from the specified database.
* </pre>
*
* <code>repeated .google.cloud.bigquery.biglake.v1alpha1.Table tables = 1;</code>
*/
public com.google.cloud.bigquery.biglake.v1alpha1.TableOrBuilder getTablesOrBuilder(int index) {
if (tablesBuilder_ == null) {
return tables_.get(index);
} else {
return tablesBuilder_.getMessageOrBuilder(index);
}
}
/**
*
*
* <pre>
* The tables from the specified database.
* </pre>
*
* <code>repeated .google.cloud.bigquery.biglake.v1alpha1.Table tables = 1;</code>
*/
public java.util.List<? extends com.google.cloud.bigquery.biglake.v1alpha1.TableOrBuilder>
getTablesOrBuilderList() {
if (tablesBuilder_ != null) {
return tablesBuilder_.getMessageOrBuilderList();
} else {
return java.util.Collections.unmodifiableList(tables_);
}
}
/**
*
*
* <pre>
* The tables from the specified database.
* </pre>
*
* <code>repeated .google.cloud.bigquery.biglake.v1alpha1.Table tables = 1;</code>
*/
public com.google.cloud.bigquery.biglake.v1alpha1.Table.Builder addTablesBuilder() {
return getTablesFieldBuilder()
.addBuilder(com.google.cloud.bigquery.biglake.v1alpha1.Table.getDefaultInstance());
}
/**
*
*
* <pre>
* The tables from the specified database.
* </pre>
*
* <code>repeated .google.cloud.bigquery.biglake.v1alpha1.Table tables = 1;</code>
*/
public com.google.cloud.bigquery.biglake.v1alpha1.Table.Builder addTablesBuilder(int index) {
return getTablesFieldBuilder()
.addBuilder(index, com.google.cloud.bigquery.biglake.v1alpha1.Table.getDefaultInstance());
}
/**
*
*
* <pre>
* The tables from the specified database.
* </pre>
*
* <code>repeated .google.cloud.bigquery.biglake.v1alpha1.Table tables = 1;</code>
*/
public java.util.List<com.google.cloud.bigquery.biglake.v1alpha1.Table.Builder>
getTablesBuilderList() {
return getTablesFieldBuilder().getBuilderList();
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.bigquery.biglake.v1alpha1.Table,
com.google.cloud.bigquery.biglake.v1alpha1.Table.Builder,
com.google.cloud.bigquery.biglake.v1alpha1.TableOrBuilder>
getTablesFieldBuilder() {
if (tablesBuilder_ == null) {
tablesBuilder_ =
new com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.bigquery.biglake.v1alpha1.Table,
com.google.cloud.bigquery.biglake.v1alpha1.Table.Builder,
com.google.cloud.bigquery.biglake.v1alpha1.TableOrBuilder>(
tables_, ((bitField0_ & 0x00000001) != 0), getParentForChildren(), isClean());
tables_ = null;
}
return tablesBuilder_;
}
private java.lang.Object nextPageToken_ = "";
/**
*
*
* <pre>
* A token, which can be sent as `page_token` to retrieve the next page.
* If this field is omitted, there are no subsequent pages.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The nextPageToken.
*/
public java.lang.String getNextPageToken() {
java.lang.Object ref = nextPageToken_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
nextPageToken_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* A token, which can be sent as `page_token` to retrieve the next page.
* If this field is omitted, there are no subsequent pages.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The bytes for nextPageToken.
*/
public com.google.protobuf.ByteString getNextPageTokenBytes() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
nextPageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* A token, which can be sent as `page_token` to retrieve the next page.
* If this field is omitted, there are no subsequent pages.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @param value The nextPageToken to set.
* @return This builder for chaining.
*/
public Builder setNextPageToken(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
nextPageToken_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* A token, which can be sent as `page_token` to retrieve the next page.
* If this field is omitted, there are no subsequent pages.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return This builder for chaining.
*/
public Builder clearNextPageToken() {
nextPageToken_ = getDefaultInstance().getNextPageToken();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
return this;
}
/**
*
*
* <pre>
* A token, which can be sent as `page_token` to retrieve the next page.
* If this field is omitted, there are no subsequent pages.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @param value The bytes for nextPageToken to set.
* @return This builder for chaining.
*/
public Builder setNextPageTokenBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
nextPageToken_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.bigquery.biglake.v1alpha1.ListTablesResponse)
}
// @@protoc_insertion_point(class_scope:google.cloud.bigquery.biglake.v1alpha1.ListTablesResponse)
private static final com.google.cloud.bigquery.biglake.v1alpha1.ListTablesResponse
DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.bigquery.biglake.v1alpha1.ListTablesResponse();
}
public static com.google.cloud.bigquery.biglake.v1alpha1.ListTablesResponse getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<ListTablesResponse> PARSER =
new com.google.protobuf.AbstractParser<ListTablesResponse>() {
@java.lang.Override
public ListTablesResponse parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<ListTablesResponse> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<ListTablesResponse> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.bigquery.biglake.v1alpha1.ListTablesResponse getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleapis/google-cloud-java | 36,792 | java-shopping-merchant-lfp/proto-google-shopping-merchant-lfp-v1/src/main/java/com/google/shopping/merchant/lfp/v1/ListLfpStoresResponse.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/shopping/merchant/lfp/v1/lfpstore.proto
// Protobuf Java Version: 3.25.8
package com.google.shopping.merchant.lfp.v1;
/**
*
*
* <pre>
* Response message for the ListLfpStores method.
* </pre>
*
* Protobuf type {@code google.shopping.merchant.lfp.v1.ListLfpStoresResponse}
*/
public final class ListLfpStoresResponse extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.shopping.merchant.lfp.v1.ListLfpStoresResponse)
ListLfpStoresResponseOrBuilder {
private static final long serialVersionUID = 0L;
// Use ListLfpStoresResponse.newBuilder() to construct.
private ListLfpStoresResponse(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private ListLfpStoresResponse() {
lfpStores_ = java.util.Collections.emptyList();
nextPageToken_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new ListLfpStoresResponse();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.shopping.merchant.lfp.v1.LfpStoreProto
.internal_static_google_shopping_merchant_lfp_v1_ListLfpStoresResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.shopping.merchant.lfp.v1.LfpStoreProto
.internal_static_google_shopping_merchant_lfp_v1_ListLfpStoresResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.shopping.merchant.lfp.v1.ListLfpStoresResponse.class,
com.google.shopping.merchant.lfp.v1.ListLfpStoresResponse.Builder.class);
}
public static final int LFP_STORES_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private java.util.List<com.google.shopping.merchant.lfp.v1.LfpStore> lfpStores_;
/**
*
*
* <pre>
* The stores from the specified merchant.
* </pre>
*
* <code>repeated .google.shopping.merchant.lfp.v1.LfpStore lfp_stores = 1;</code>
*/
@java.lang.Override
public java.util.List<com.google.shopping.merchant.lfp.v1.LfpStore> getLfpStoresList() {
return lfpStores_;
}
/**
*
*
* <pre>
* The stores from the specified merchant.
* </pre>
*
* <code>repeated .google.shopping.merchant.lfp.v1.LfpStore lfp_stores = 1;</code>
*/
@java.lang.Override
public java.util.List<? extends com.google.shopping.merchant.lfp.v1.LfpStoreOrBuilder>
getLfpStoresOrBuilderList() {
return lfpStores_;
}
/**
*
*
* <pre>
* The stores from the specified merchant.
* </pre>
*
* <code>repeated .google.shopping.merchant.lfp.v1.LfpStore lfp_stores = 1;</code>
*/
@java.lang.Override
public int getLfpStoresCount() {
return lfpStores_.size();
}
/**
*
*
* <pre>
* The stores from the specified merchant.
* </pre>
*
* <code>repeated .google.shopping.merchant.lfp.v1.LfpStore lfp_stores = 1;</code>
*/
@java.lang.Override
public com.google.shopping.merchant.lfp.v1.LfpStore getLfpStores(int index) {
return lfpStores_.get(index);
}
/**
*
*
* <pre>
* The stores from the specified merchant.
* </pre>
*
* <code>repeated .google.shopping.merchant.lfp.v1.LfpStore lfp_stores = 1;</code>
*/
@java.lang.Override
public com.google.shopping.merchant.lfp.v1.LfpStoreOrBuilder getLfpStoresOrBuilder(int index) {
return lfpStores_.get(index);
}
public static final int NEXT_PAGE_TOKEN_FIELD_NUMBER = 2;
@SuppressWarnings("serial")
private volatile java.lang.Object nextPageToken_ = "";
/**
*
*
* <pre>
* A token, which can be sent as `pageToken` to retrieve the next page.
* If this field is omitted, there are no subsequent pages.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The nextPageToken.
*/
@java.lang.Override
public java.lang.String getNextPageToken() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
nextPageToken_ = s;
return s;
}
}
/**
*
*
* <pre>
* A token, which can be sent as `pageToken` to retrieve the next page.
* If this field is omitted, there are no subsequent pages.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The bytes for nextPageToken.
*/
@java.lang.Override
public com.google.protobuf.ByteString getNextPageTokenBytes() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
nextPageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
for (int i = 0; i < lfpStores_.size(); i++) {
output.writeMessage(1, lfpStores_.get(i));
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 2, nextPageToken_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
for (int i = 0; i < lfpStores_.size(); i++) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, lfpStores_.get(i));
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, nextPageToken_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.shopping.merchant.lfp.v1.ListLfpStoresResponse)) {
return super.equals(obj);
}
com.google.shopping.merchant.lfp.v1.ListLfpStoresResponse other =
(com.google.shopping.merchant.lfp.v1.ListLfpStoresResponse) obj;
if (!getLfpStoresList().equals(other.getLfpStoresList())) return false;
if (!getNextPageToken().equals(other.getNextPageToken())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (getLfpStoresCount() > 0) {
hash = (37 * hash) + LFP_STORES_FIELD_NUMBER;
hash = (53 * hash) + getLfpStoresList().hashCode();
}
hash = (37 * hash) + NEXT_PAGE_TOKEN_FIELD_NUMBER;
hash = (53 * hash) + getNextPageToken().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.shopping.merchant.lfp.v1.ListLfpStoresResponse parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.shopping.merchant.lfp.v1.ListLfpStoresResponse parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.shopping.merchant.lfp.v1.ListLfpStoresResponse parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.shopping.merchant.lfp.v1.ListLfpStoresResponse parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.shopping.merchant.lfp.v1.ListLfpStoresResponse parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.shopping.merchant.lfp.v1.ListLfpStoresResponse parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.shopping.merchant.lfp.v1.ListLfpStoresResponse parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.shopping.merchant.lfp.v1.ListLfpStoresResponse parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.shopping.merchant.lfp.v1.ListLfpStoresResponse parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.shopping.merchant.lfp.v1.ListLfpStoresResponse parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.shopping.merchant.lfp.v1.ListLfpStoresResponse parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.shopping.merchant.lfp.v1.ListLfpStoresResponse parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.shopping.merchant.lfp.v1.ListLfpStoresResponse prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Response message for the ListLfpStores method.
* </pre>
*
* Protobuf type {@code google.shopping.merchant.lfp.v1.ListLfpStoresResponse}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.shopping.merchant.lfp.v1.ListLfpStoresResponse)
com.google.shopping.merchant.lfp.v1.ListLfpStoresResponseOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.shopping.merchant.lfp.v1.LfpStoreProto
.internal_static_google_shopping_merchant_lfp_v1_ListLfpStoresResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.shopping.merchant.lfp.v1.LfpStoreProto
.internal_static_google_shopping_merchant_lfp_v1_ListLfpStoresResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.shopping.merchant.lfp.v1.ListLfpStoresResponse.class,
com.google.shopping.merchant.lfp.v1.ListLfpStoresResponse.Builder.class);
}
// Construct using com.google.shopping.merchant.lfp.v1.ListLfpStoresResponse.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
if (lfpStoresBuilder_ == null) {
lfpStores_ = java.util.Collections.emptyList();
} else {
lfpStores_ = null;
lfpStoresBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000001);
nextPageToken_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.shopping.merchant.lfp.v1.LfpStoreProto
.internal_static_google_shopping_merchant_lfp_v1_ListLfpStoresResponse_descriptor;
}
@java.lang.Override
public com.google.shopping.merchant.lfp.v1.ListLfpStoresResponse getDefaultInstanceForType() {
return com.google.shopping.merchant.lfp.v1.ListLfpStoresResponse.getDefaultInstance();
}
@java.lang.Override
public com.google.shopping.merchant.lfp.v1.ListLfpStoresResponse build() {
com.google.shopping.merchant.lfp.v1.ListLfpStoresResponse result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.shopping.merchant.lfp.v1.ListLfpStoresResponse buildPartial() {
com.google.shopping.merchant.lfp.v1.ListLfpStoresResponse result =
new com.google.shopping.merchant.lfp.v1.ListLfpStoresResponse(this);
buildPartialRepeatedFields(result);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartialRepeatedFields(
com.google.shopping.merchant.lfp.v1.ListLfpStoresResponse result) {
if (lfpStoresBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0)) {
lfpStores_ = java.util.Collections.unmodifiableList(lfpStores_);
bitField0_ = (bitField0_ & ~0x00000001);
}
result.lfpStores_ = lfpStores_;
} else {
result.lfpStores_ = lfpStoresBuilder_.build();
}
}
private void buildPartial0(com.google.shopping.merchant.lfp.v1.ListLfpStoresResponse result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000002) != 0)) {
result.nextPageToken_ = nextPageToken_;
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.shopping.merchant.lfp.v1.ListLfpStoresResponse) {
return mergeFrom((com.google.shopping.merchant.lfp.v1.ListLfpStoresResponse) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.shopping.merchant.lfp.v1.ListLfpStoresResponse other) {
if (other == com.google.shopping.merchant.lfp.v1.ListLfpStoresResponse.getDefaultInstance())
return this;
if (lfpStoresBuilder_ == null) {
if (!other.lfpStores_.isEmpty()) {
if (lfpStores_.isEmpty()) {
lfpStores_ = other.lfpStores_;
bitField0_ = (bitField0_ & ~0x00000001);
} else {
ensureLfpStoresIsMutable();
lfpStores_.addAll(other.lfpStores_);
}
onChanged();
}
} else {
if (!other.lfpStores_.isEmpty()) {
if (lfpStoresBuilder_.isEmpty()) {
lfpStoresBuilder_.dispose();
lfpStoresBuilder_ = null;
lfpStores_ = other.lfpStores_;
bitField0_ = (bitField0_ & ~0x00000001);
lfpStoresBuilder_ =
com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders
? getLfpStoresFieldBuilder()
: null;
} else {
lfpStoresBuilder_.addAllMessages(other.lfpStores_);
}
}
}
if (!other.getNextPageToken().isEmpty()) {
nextPageToken_ = other.nextPageToken_;
bitField0_ |= 0x00000002;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
com.google.shopping.merchant.lfp.v1.LfpStore m =
input.readMessage(
com.google.shopping.merchant.lfp.v1.LfpStore.parser(), extensionRegistry);
if (lfpStoresBuilder_ == null) {
ensureLfpStoresIsMutable();
lfpStores_.add(m);
} else {
lfpStoresBuilder_.addMessage(m);
}
break;
} // case 10
case 18:
{
nextPageToken_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000002;
break;
} // case 18
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.util.List<com.google.shopping.merchant.lfp.v1.LfpStore> lfpStores_ =
java.util.Collections.emptyList();
private void ensureLfpStoresIsMutable() {
if (!((bitField0_ & 0x00000001) != 0)) {
lfpStores_ =
new java.util.ArrayList<com.google.shopping.merchant.lfp.v1.LfpStore>(lfpStores_);
bitField0_ |= 0x00000001;
}
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.shopping.merchant.lfp.v1.LfpStore,
com.google.shopping.merchant.lfp.v1.LfpStore.Builder,
com.google.shopping.merchant.lfp.v1.LfpStoreOrBuilder>
lfpStoresBuilder_;
/**
*
*
* <pre>
* The stores from the specified merchant.
* </pre>
*
* <code>repeated .google.shopping.merchant.lfp.v1.LfpStore lfp_stores = 1;</code>
*/
public java.util.List<com.google.shopping.merchant.lfp.v1.LfpStore> getLfpStoresList() {
if (lfpStoresBuilder_ == null) {
return java.util.Collections.unmodifiableList(lfpStores_);
} else {
return lfpStoresBuilder_.getMessageList();
}
}
/**
*
*
* <pre>
* The stores from the specified merchant.
* </pre>
*
* <code>repeated .google.shopping.merchant.lfp.v1.LfpStore lfp_stores = 1;</code>
*/
public int getLfpStoresCount() {
if (lfpStoresBuilder_ == null) {
return lfpStores_.size();
} else {
return lfpStoresBuilder_.getCount();
}
}
/**
*
*
* <pre>
* The stores from the specified merchant.
* </pre>
*
* <code>repeated .google.shopping.merchant.lfp.v1.LfpStore lfp_stores = 1;</code>
*/
public com.google.shopping.merchant.lfp.v1.LfpStore getLfpStores(int index) {
if (lfpStoresBuilder_ == null) {
return lfpStores_.get(index);
} else {
return lfpStoresBuilder_.getMessage(index);
}
}
/**
*
*
* <pre>
* The stores from the specified merchant.
* </pre>
*
* <code>repeated .google.shopping.merchant.lfp.v1.LfpStore lfp_stores = 1;</code>
*/
public Builder setLfpStores(int index, com.google.shopping.merchant.lfp.v1.LfpStore value) {
if (lfpStoresBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureLfpStoresIsMutable();
lfpStores_.set(index, value);
onChanged();
} else {
lfpStoresBuilder_.setMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* The stores from the specified merchant.
* </pre>
*
* <code>repeated .google.shopping.merchant.lfp.v1.LfpStore lfp_stores = 1;</code>
*/
public Builder setLfpStores(
int index, com.google.shopping.merchant.lfp.v1.LfpStore.Builder builderForValue) {
if (lfpStoresBuilder_ == null) {
ensureLfpStoresIsMutable();
lfpStores_.set(index, builderForValue.build());
onChanged();
} else {
lfpStoresBuilder_.setMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* The stores from the specified merchant.
* </pre>
*
* <code>repeated .google.shopping.merchant.lfp.v1.LfpStore lfp_stores = 1;</code>
*/
public Builder addLfpStores(com.google.shopping.merchant.lfp.v1.LfpStore value) {
if (lfpStoresBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureLfpStoresIsMutable();
lfpStores_.add(value);
onChanged();
} else {
lfpStoresBuilder_.addMessage(value);
}
return this;
}
/**
*
*
* <pre>
* The stores from the specified merchant.
* </pre>
*
* <code>repeated .google.shopping.merchant.lfp.v1.LfpStore lfp_stores = 1;</code>
*/
public Builder addLfpStores(int index, com.google.shopping.merchant.lfp.v1.LfpStore value) {
if (lfpStoresBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureLfpStoresIsMutable();
lfpStores_.add(index, value);
onChanged();
} else {
lfpStoresBuilder_.addMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* The stores from the specified merchant.
* </pre>
*
* <code>repeated .google.shopping.merchant.lfp.v1.LfpStore lfp_stores = 1;</code>
*/
public Builder addLfpStores(
com.google.shopping.merchant.lfp.v1.LfpStore.Builder builderForValue) {
if (lfpStoresBuilder_ == null) {
ensureLfpStoresIsMutable();
lfpStores_.add(builderForValue.build());
onChanged();
} else {
lfpStoresBuilder_.addMessage(builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* The stores from the specified merchant.
* </pre>
*
* <code>repeated .google.shopping.merchant.lfp.v1.LfpStore lfp_stores = 1;</code>
*/
public Builder addLfpStores(
int index, com.google.shopping.merchant.lfp.v1.LfpStore.Builder builderForValue) {
if (lfpStoresBuilder_ == null) {
ensureLfpStoresIsMutable();
lfpStores_.add(index, builderForValue.build());
onChanged();
} else {
lfpStoresBuilder_.addMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* The stores from the specified merchant.
* </pre>
*
* <code>repeated .google.shopping.merchant.lfp.v1.LfpStore lfp_stores = 1;</code>
*/
public Builder addAllLfpStores(
java.lang.Iterable<? extends com.google.shopping.merchant.lfp.v1.LfpStore> values) {
if (lfpStoresBuilder_ == null) {
ensureLfpStoresIsMutable();
com.google.protobuf.AbstractMessageLite.Builder.addAll(values, lfpStores_);
onChanged();
} else {
lfpStoresBuilder_.addAllMessages(values);
}
return this;
}
/**
*
*
* <pre>
* The stores from the specified merchant.
* </pre>
*
* <code>repeated .google.shopping.merchant.lfp.v1.LfpStore lfp_stores = 1;</code>
*/
public Builder clearLfpStores() {
if (lfpStoresBuilder_ == null) {
lfpStores_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
} else {
lfpStoresBuilder_.clear();
}
return this;
}
/**
*
*
* <pre>
* The stores from the specified merchant.
* </pre>
*
* <code>repeated .google.shopping.merchant.lfp.v1.LfpStore lfp_stores = 1;</code>
*/
public Builder removeLfpStores(int index) {
if (lfpStoresBuilder_ == null) {
ensureLfpStoresIsMutable();
lfpStores_.remove(index);
onChanged();
} else {
lfpStoresBuilder_.remove(index);
}
return this;
}
/**
*
*
* <pre>
* The stores from the specified merchant.
* </pre>
*
* <code>repeated .google.shopping.merchant.lfp.v1.LfpStore lfp_stores = 1;</code>
*/
public com.google.shopping.merchant.lfp.v1.LfpStore.Builder getLfpStoresBuilder(int index) {
return getLfpStoresFieldBuilder().getBuilder(index);
}
/**
*
*
* <pre>
* The stores from the specified merchant.
* </pre>
*
* <code>repeated .google.shopping.merchant.lfp.v1.LfpStore lfp_stores = 1;</code>
*/
public com.google.shopping.merchant.lfp.v1.LfpStoreOrBuilder getLfpStoresOrBuilder(int index) {
if (lfpStoresBuilder_ == null) {
return lfpStores_.get(index);
} else {
return lfpStoresBuilder_.getMessageOrBuilder(index);
}
}
/**
*
*
* <pre>
* The stores from the specified merchant.
* </pre>
*
* <code>repeated .google.shopping.merchant.lfp.v1.LfpStore lfp_stores = 1;</code>
*/
public java.util.List<? extends com.google.shopping.merchant.lfp.v1.LfpStoreOrBuilder>
getLfpStoresOrBuilderList() {
if (lfpStoresBuilder_ != null) {
return lfpStoresBuilder_.getMessageOrBuilderList();
} else {
return java.util.Collections.unmodifiableList(lfpStores_);
}
}
/**
*
*
* <pre>
* The stores from the specified merchant.
* </pre>
*
* <code>repeated .google.shopping.merchant.lfp.v1.LfpStore lfp_stores = 1;</code>
*/
public com.google.shopping.merchant.lfp.v1.LfpStore.Builder addLfpStoresBuilder() {
return getLfpStoresFieldBuilder()
.addBuilder(com.google.shopping.merchant.lfp.v1.LfpStore.getDefaultInstance());
}
/**
*
*
* <pre>
* The stores from the specified merchant.
* </pre>
*
* <code>repeated .google.shopping.merchant.lfp.v1.LfpStore lfp_stores = 1;</code>
*/
public com.google.shopping.merchant.lfp.v1.LfpStore.Builder addLfpStoresBuilder(int index) {
return getLfpStoresFieldBuilder()
.addBuilder(index, com.google.shopping.merchant.lfp.v1.LfpStore.getDefaultInstance());
}
/**
*
*
* <pre>
* The stores from the specified merchant.
* </pre>
*
* <code>repeated .google.shopping.merchant.lfp.v1.LfpStore lfp_stores = 1;</code>
*/
public java.util.List<com.google.shopping.merchant.lfp.v1.LfpStore.Builder>
getLfpStoresBuilderList() {
return getLfpStoresFieldBuilder().getBuilderList();
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.shopping.merchant.lfp.v1.LfpStore,
com.google.shopping.merchant.lfp.v1.LfpStore.Builder,
com.google.shopping.merchant.lfp.v1.LfpStoreOrBuilder>
getLfpStoresFieldBuilder() {
if (lfpStoresBuilder_ == null) {
lfpStoresBuilder_ =
new com.google.protobuf.RepeatedFieldBuilderV3<
com.google.shopping.merchant.lfp.v1.LfpStore,
com.google.shopping.merchant.lfp.v1.LfpStore.Builder,
com.google.shopping.merchant.lfp.v1.LfpStoreOrBuilder>(
lfpStores_, ((bitField0_ & 0x00000001) != 0), getParentForChildren(), isClean());
lfpStores_ = null;
}
return lfpStoresBuilder_;
}
private java.lang.Object nextPageToken_ = "";
/**
*
*
* <pre>
* A token, which can be sent as `pageToken` to retrieve the next page.
* If this field is omitted, there are no subsequent pages.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The nextPageToken.
*/
public java.lang.String getNextPageToken() {
java.lang.Object ref = nextPageToken_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
nextPageToken_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* A token, which can be sent as `pageToken` to retrieve the next page.
* If this field is omitted, there are no subsequent pages.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The bytes for nextPageToken.
*/
public com.google.protobuf.ByteString getNextPageTokenBytes() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
nextPageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* A token, which can be sent as `pageToken` to retrieve the next page.
* If this field is omitted, there are no subsequent pages.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @param value The nextPageToken to set.
* @return This builder for chaining.
*/
public Builder setNextPageToken(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
nextPageToken_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* A token, which can be sent as `pageToken` to retrieve the next page.
* If this field is omitted, there are no subsequent pages.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return This builder for chaining.
*/
public Builder clearNextPageToken() {
nextPageToken_ = getDefaultInstance().getNextPageToken();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
return this;
}
/**
*
*
* <pre>
* A token, which can be sent as `pageToken` to retrieve the next page.
* If this field is omitted, there are no subsequent pages.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @param value The bytes for nextPageToken to set.
* @return This builder for chaining.
*/
public Builder setNextPageTokenBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
nextPageToken_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.shopping.merchant.lfp.v1.ListLfpStoresResponse)
}
// @@protoc_insertion_point(class_scope:google.shopping.merchant.lfp.v1.ListLfpStoresResponse)
private static final com.google.shopping.merchant.lfp.v1.ListLfpStoresResponse DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.shopping.merchant.lfp.v1.ListLfpStoresResponse();
}
public static com.google.shopping.merchant.lfp.v1.ListLfpStoresResponse getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<ListLfpStoresResponse> PARSER =
new com.google.protobuf.AbstractParser<ListLfpStoresResponse>() {
@java.lang.Override
public ListLfpStoresResponse parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<ListLfpStoresResponse> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<ListLfpStoresResponse> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.shopping.merchant.lfp.v1.ListLfpStoresResponse getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
openjdk/jdk8 | 36,862 | jdk/test/java/text/Format/DecimalFormat/FormatMicroBenchmark.java | /*
* Copyright (c) 2012, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License version 2 only, as
* published by the Free Software Foundation.
*
* This code is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* version 2 for more details (a copy is included in the LICENSE file that
* accompanied this code).
*
* You should have received a copy of the GNU General Public License version
* 2 along with this work; if not, write to the Free Software Foundation,
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
*
* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
* or visit www.oracle.com if you need additional information or have any
* questions.
*/
/*
* @test
* @bug 7050528
* @summary Set of micro-benchmarks testing throughput of java.text.DecimalFormat.format()
* @author Olivier Lagneau
* @run main FormatMicroBenchmark
*/
/* This is a set of micro-benchmarks testing throughput of java.text.DecimalFormat.format().
* It never fails.
*
* Usage and arguments:
* - Run with no argument skips the whole benchmark and exits.
* - Run with "-help" as first argument calls the usage() method and exits.
* - Run with "-doit" runs the benchmark with summary details.
* - Run with "-verbose" provides additional details on the run.
*
* Example run :
* java -Xms500m -Xmx500m -XX:NewSize=400m FormatMicroBenchmark -doit -verbose
*
* Running with jtreg:
* The jtreg header "run" tag options+args must be changed to avoid skipping
* the execution. here is an example of run options:
* "main/othervm -Xms500m -Xmx500m -XX:NewSize=400m FormatMicroBenchmark -doit"
*
* Note:
* - Vm options -Xms, -Xmx, -XX:NewSize must be set correctly for
* getting reliable numbers. Otherwise GC activity may corrupt results.
* As of jdk80b48 using "-Xms500m -Xmx500m -XX:NewSize=400m" covers
* all cases.
* - Optionally using "-XX:+printGC" option provides information that
* helps checking any GC activity while benches are run.
*
* Vm Options:
* - Vm options to use (as of jdk80b48):
* fast-path case : -Xms128m -Xmx128m -XX:NewSize=100m
* non fast-path case: -Xms500m -Xmx500m -XX:NewSize=400m
* or use worst case (non fast-path above) with both types of algorithm.
*
* - use -XX:+PrintGC to verify memory consumption of the benchmarks.
* (See "Checking Memory Consumption" below).
*
* Description:
*
* Fast-path algorithm for format(double...) call stack is very different of
* the standard call stack. Where the standard algorithm for formating double
* uses internal class sun.misc.FloatingDecimal and its dtoa(double) method to
* provide digits, fast-path embeds its own algorithm for binary to decimal
* string conversion.
*
* FloatingDecimal always converts completely the passed double to a string.
* Fast-path converts only to the needed digits since it follows constraints
* on both the pattern rule, the DecimalFormat instance properties, and the
* passed double.
*
* Micro benchmarks below measure the throughput for formating double values
* using NumberFormat.format(double) call stack. The standard DecimalFormat
* call stack as well as the fast-path algorithm implementation are sensitive
* to the nature of the passed double values regarding throughput performance.
*
* These benchmarks are useful both for measuring the global performance gain
* of fast-path and to check that any modification done on fast-path algorithm
* does not bring any regression in the performance boost of fast-path.
*
* Note that these benchmarks will provide numbers without any knowledge of
* the implementation of DecimalFormat class. So to check regression any run
* should be compared to another reference run with a previous JDK, wether or
* not this previous reference JDK contains fast-path implementation.
*
* The eight benchmarks below are dedicated to measure throughput on different
* kinds of double that all fall in the fast-path case (all in Integer range):
*
* - Integer case : used double values are all "integer-like" (ex: -12345.0).
* This is the benchFormatInteger micro-benchmark.
*
* - Fractional case : double values are "fractional" (ex: -0.12345).
* This is the benchFormatFractional micro-benchmark.
*
* - Small integral case : like Integer case but double values are all limited
* in their magnitude, from -500.0 to 500.0 if the number of iterations N is
* set to 500000.
* This is the benchFormatSmallIntegral micro-benchmark.
*
* - Fractional All Nines : doubles values have fractional part that is very
* close to "999" (decimal pattern), or "99" (currency pattern),
* or "0000...".
* This is the benchFormatFractionalAllNines micro-benchmark.
*
* - All Nines : double values are such that both integral and fractional
* part consist only of '9' digits. None of these values are rounded up.
* This is the benchFormatAllNines micro-benchmark.
*
* - Fair simple case : calling J the loop variable and iterating over
* the N number of iterations, used double values are computed as
* d = (double) J + J*seed
* where seed is a very small value that adds a fractional part and adds a
* small number to integral part. Provides fairly distributed double values.
* This is the benchFormatFairSimple micro-benchmark.
*
* - Fair case : this is a combination of small integral case and fair simple
* case. Double values are limited in their magnitude but follow a parabolic
* curve y = x**2 / K, keeping large magnitude only for large values of J.
* The intent is trying to reproduce a distribution of double values as could
* be found in a business application, with most values in either the low
* range or the high range.
* This is the benchFormatFair micro-benchmark.
*
* - Tie cases: values are very close to a tie case (iii...ii.fff5)
* That is the worst situation that can happen for Fast-path algorithm when
* considering throughput.
* This is the benchFormatTie micro-benchmark.
*
* For all of the micro-benchmarks, the throughput load of the eventual
* additional computations inside the loop is calculated prior to running the
* benchmark, and provided in the output. That may be useful since this load
* may vary for each architecture or machine configuration.
*
* The "-verbose" flag, when set, provides the throughput load numbers, the
* time spent for each run of a benchmark, as well as an estimation of the
* memory consumed by the runs. Beware of incremental GCs, see "Checking
* Memory Consumption" section below. Every run should be done with correct
* ms, mx, and NewSize vm options to get fully reliable numbers.
*
* The output provides the mean time needed for a benchmark after the server
* jit compiler has done its optimization work if any. Thus only the last but
* first three runs are taken into account in the time measurement (server jit
* compiler shows to have done full optimization in most cases after the
* second run, given a base number of iterations set to 500000).
*
* The program cleans up memory (stabilizeMemory() method) between each run of
* the benchmarks to make sure that no garbage collection activity happens in
* measurements. However that does not preclude incremental GCs activity that
* may happen during the micro-benchmark if -Xms, -Xmx, and NewSize options
* have not been tuned and set correctly.
*
* Checking Memory Consumption:
*
* For getting confidence in the throughput numbers, there must not give any
* GC activity during the benchmark runs. That means that specific VM options
* related to memory must be tuned for any given implementation of the JDK.
*
* Running with "-verbose" arguments will provide clues of the memory consumed
* but is not enough, since any unexpected incremental GC may lower
* artificially the estimation of the memory consumption.
*
* Options to set are -Xms, -Xmx, -XX:NewSize, plus -XX:+PrintGC to evaluate
* correctly the values of these options. When running "-verbose", varying
* numbers reported for memory consumption may indicate bad choices for these
* options.
*
* For jdk80b25, fast-path shows a consuption of ~60Mbs for 500000 iterations
* while a jdk without fast-path will consume ~260Mbs for each benchmark run.
* Indeed these values will vary depending on the jdk used.
*
* Correct option settings found jdk80b48 were :
* fast-path : -Xms128m -Xmx128m -XX:NewSize=100m
* non fast-path : -Xms500m -Xmx500m -XX:NewSize=400m
* Greater values can be provided safely but not smaller ones.
* ----------------------------------------------------------------------
*/
import java.util.*;
import java.text.NumberFormat;
import java.text.DecimalFormat;
public class FormatMicroBenchmark {
// The number of times the bench method will be run (must be at least 4).
private static final int NB_RUNS = 20;
// The bench* methods below all iterates over [-MAX_RANGE , +MAX_RANGE] integer values.
private static final int MAX_RANGE = 500000;
// Flag for more details on each bench run (default is no).
private static boolean Verbose = false;
// Should we really execute the benches ? (no by default).
private static boolean DoIt = false;
// Prints out a message describing how to run the program.
private static void usage() {
System.out.println(
"This is a set of micro-benchmarks testing throughput of " +
"java.text.DecimalFormat.format(). It never fails.\n\n" +
"Usage and arguments:\n" +
" - Run with no argument skips the whole benchmark and exits.\n" +
" - Run with \"-help\" as first argument prints this message and exits.\n" +
" - Run with \"-doit\" runs the benchmark with summary details.\n" +
" - Run with \"-verbose\" provides additional details on the run.\n\n" +
"Example run :\n" +
" java -Xms500m -Xmx500m -XX:NewSize=400m FormatMicroBenchmark -doit -verbose\n\n" +
"Note: \n" +
" - Vm options -Xms, -Xmx, -XX:NewSize must be set correctly for \n" +
" getting reliable numbers. Otherwise GC activity may corrupt results.\n" +
" As of jdk80b48 using \"-Xms500m -Xmx500m -XX:NewSize=400m\" covers \n" +
" all cases.\n" +
" - Optionally using \"-XX:+printGC\" option provides information that \n" +
" helps checking any GC activity while benches are run.\n\n" +
"Look at the heading comments and description in source code for " +
"detailed information.\n");
}
/* We will call stabilizeMemory before each call of benchFormat***().
* This in turn tries to clean up as much memory as possible.
* As a safe bound we limit number of System.gc() calls to 10,
* but most of the time two calls to System.gc() will be enough.
* If memory reporting is asked for, the method returns the difference
* of free memory between entering an leaving the method.
*/
private static long stabilizeMemory(boolean reportConsumedMemory) {
final long oneMegabyte = 1024L * 1024L;
long refMemory = 0;
long initialMemoryLeft = Runtime.getRuntime().freeMemory();
long currMemoryLeft = initialMemoryLeft;
int nbGCCalls = 0;
do {
nbGCCalls++;
refMemory = currMemoryLeft;
System.gc();
currMemoryLeft = Runtime.getRuntime().freeMemory();
} while ((Math.abs(currMemoryLeft - refMemory) > oneMegabyte) &&
(nbGCCalls < 10));
if (Verbose &&
reportConsumedMemory)
System.out.println("Memory consumed by previous run : " +
(currMemoryLeft - initialMemoryLeft)/oneMegabyte + "Mbs.");
return currMemoryLeft;
}
// ---------- Integer only based bench --------------------
private static final String INTEGER_BENCH = "benchFormatInteger";
private static String benchFormatInteger(NumberFormat nf) {
String str = "";
for (int j = - MAX_RANGE; j <= MAX_RANGE; j++)
str = nf.format((double) j);
return str;
}
// This reproduces the throughput load added in benchFormatInteger
static double integerThroughputLoad() {
double d = 0.0d;
for (int j = - MAX_RANGE; j <= MAX_RANGE; j++) {
d = (double) j;
}
return d;
}
// Runs integerThroughputLoad and calculate its mean load
static void calculateIntegerThroughputLoad() {
int nbRuns = NB_RUNS;
long elapsedTime = 0;
double foo;
for (int i = 1; i <= nbRuns; i++) {
long startTime = System.nanoTime();
foo = integerThroughputLoad();
long estimatedTime = System.nanoTime() - startTime;
if (i > 3) elapsedTime += estimatedTime / 1000;
}
if (Verbose)
System.out.println(
"calculated throughput load for " + INTEGER_BENCH +
" bench is = " + (elapsedTime / (nbRuns - 3)) + " microseconds");
}
// ---------- Fractional only based bench --------------------
private static final String FRACTIONAL_BENCH = "benchFormatFractional";
private static String benchFormatFractional(NumberFormat nf) {
String str = "";
double floatingN = 1.0d / (double) MAX_RANGE;
for (int j = - MAX_RANGE; j <= MAX_RANGE; j++)
str = nf.format(floatingN * (double) j);
return str;
}
// This reproduces the throughput load added in benchFormatFractional
static double fractionalThroughputLoad() {
double d = 0.0d;
double floatingN = 1.0d / (double) MAX_RANGE;
for (int j = - MAX_RANGE; j <= MAX_RANGE; j++) {
d = floatingN * (double) j;
}
return d;
}
// Runs fractionalThroughputLoad and calculate its mean load
static void calculateFractionalThroughputLoad() {
int nbRuns = NB_RUNS;
long elapsedTime = 0;
double foo;
for (int i = 1; i <= nbRuns; i++) {
long startTime = System.nanoTime();
foo = fractionalThroughputLoad();
long estimatedTime = System.nanoTime() - startTime;
if (i > 3) elapsedTime += estimatedTime / 1000;
}
if (Verbose)
System.out.println(
"calculated throughput load for " + FRACTIONAL_BENCH +
" bench is = " + (elapsedTime / (nbRuns - 3)) + " microseconds");
}
// ---------- An Small Integral bench --------------------
// that limits the magnitude of tested double values
private static final String SMALL_INTEGRAL_BENCH = "benchFormatSmallIntegral";
private static String benchFormatSmallIntegral(NumberFormat nf) {
String str = "";
for (int j = - MAX_RANGE; j <= MAX_RANGE; j++)
str = nf.format(((double) j) / 1000.0d);
return str;
}
// This reproduces the throughput load added in benchFormatSmallIntegral
static double smallIntegralThroughputLoad() {
double d = 0.0d;
for (int j = - MAX_RANGE; j <= MAX_RANGE; j++) {
d = (double) j / 1000.0d;
}
return d;
}
// Runs small_integralThroughputLoad and calculate its mean load
static void calculateSmallIntegralThroughputLoad() {
int nbRuns = NB_RUNS;
long elapsedTime = 0;
double foo;
for (int i = 1; i <= nbRuns; i++) {
long startTime = System.nanoTime();
foo = smallIntegralThroughputLoad();
long estimatedTime = System.nanoTime() - startTime;
if (i > 3) elapsedTime += estimatedTime / 1000;
}
if (Verbose)
System.out.println(
"calculated throughput load for " + SMALL_INTEGRAL_BENCH +
" bench is = " + (elapsedTime / (nbRuns - 3)) + " microseconds");
}
// ---------- A fair and simple bench --------------------
private static final String FAIR_SIMPLE_BENCH = "benchFormatFairSimple";
private static String benchFormatFairSimple(NumberFormat nf, boolean isCurrency) {
String str = "";
double seed = isCurrency ? 0.0010203040506070809 : 0.00010203040506070809;
double d = (double) -MAX_RANGE;
for (int j = - MAX_RANGE; j <= MAX_RANGE; j++) {
d = d + 1.0d + seed;
str = nf.format(d);
}
return str;
}
// This reproduces the throughput load added in benchFormatFairSimple
static double fairSimpleThroughputLoad() {
double seed = 0.00010203040506070809;
double delta = 0.0d;
double d = (double) -MAX_RANGE;
for (int j = - MAX_RANGE; j <= MAX_RANGE; j++) {
d = d + 1.0d + seed;
}
return d;
}
// Runs fairThroughputLoad and calculate its mean load
static void calculateFairSimpleThroughputLoad() {
int nbRuns = NB_RUNS;
long elapsedTime = 0;
double foo;
for (int i = 1; i <= nbRuns; i++) {
long startTime = System.nanoTime();
foo = fairSimpleThroughputLoad();
long estimatedTime = System.nanoTime() - startTime;
if (i > 3) elapsedTime += estimatedTime / 1000;
}
if (Verbose)
System.out.println(
"calculated throughput load for " + FAIR_SIMPLE_BENCH +
" bench is = " + (elapsedTime / (nbRuns - 3)) + " microseconds");
}
// ---------- Fractional part is only made of nines bench --------------
private static final String FRACTIONAL_ALL_NINES_BENCH = "benchFormatFractionalAllNines";
private static String benchFormatFractionalAllNines(NumberFormat nf, boolean isCurrency) {
String str = "";
double fractionalEven = isCurrency ? 0.993000001 : 0.99930000001;
double fractionalOdd = isCurrency ? 0.996000001 : 0.99960000001;
double fractional;
double d;
for (int j = - MAX_RANGE; j <= MAX_RANGE; j++) {
if ((j & 1) == 0)
fractional = fractionalEven;
else
fractional = fractionalOdd;
if ( j >= 0)
d = (double ) j + fractional;
else d = (double) j - fractional;
str = nf.format(d);
}
return str;
}
// This reproduces the throughput load added in benchFormatFractionalAllNines
static double fractionalAllNinesThroughputLoad() {
double fractionalEven = 0.99930000001;
double fractionalOdd = 0.99960000001;
double fractional;
double d = 0.0d;
for (int j = - MAX_RANGE; j <= MAX_RANGE; j++) {
if ((j & 1) == 0)
fractional = fractionalEven;
else fractional = fractionalOdd;
if ( j >= 0)
d = (double ) j + fractional;
else d = (double) j - fractional;
}
return d;
}
// Runs fractionalAllNinesThroughputLoad and calculate its mean load
static void calculateFractionalAllNinesThroughputLoad() {
int nbRuns = NB_RUNS;
long elapsedTime = 0;
double foo;
for (int i = 1; i <= nbRuns; i++) {
long startTime = System.nanoTime();
foo = fractionalAllNinesThroughputLoad();
long estimatedTime = System.nanoTime() - startTime;
if (i > 3) elapsedTime += estimatedTime / 1000;
}
if (Verbose)
System.out.println(
"calculated throughput load for " + FRACTIONAL_ALL_NINES_BENCH +
" bench is = " + (elapsedTime / (nbRuns - 3)) + " microseconds");
}
// ---------- Number is only made of nines bench --------------
private static final String ALL_NINES_BENCH = "benchFormatAllNines";
private static String benchFormatAllNines(NumberFormat nf, boolean isCurrency) {
String str = "";
double[] decimaAllNines =
{9.9993, 99.9993, 999.9993, 9999.9993, 99999.9993,
999999.9993, 9999999.9993, 99999999.9993, 999999999.9993};
double[] currencyAllNines =
{9.993, 99.993, 999.993, 9999.993, 99999.993,
999999.993, 9999999.993, 99999999.993, 999999999.993};
double[] valuesArray = (isCurrency) ? currencyAllNines : decimaAllNines;
double seed = 1.0 / (double) MAX_RANGE;
double d;
int id;
for (int j = - MAX_RANGE; j <= MAX_RANGE; j++) {
id = (j >= 0) ? j % 9 : -j % 9;
if ((j & 1) == 0)
d = valuesArray[id] + id * seed;
else
d = valuesArray[id] - id * seed;
str = nf.format(d);
}
return str;
}
// This reproduces the throughput load added in benchFormatAllNines
static double allNinesThroughputLoad() {
double[] decimaAllNines =
{9.9993, 99.9993, 999.9993, 9999.9993, 99999.9993,
999999.9993, 9999999.9993, 99999999.9993, 999999999.9993};
double[] valuesArray = decimaAllNines;
double seed = 1.0 / (double) MAX_RANGE;
double d = 0.0d;
int id;
for (int j = - MAX_RANGE; j <= MAX_RANGE; j++) {
id = (j >= 0) ? j % 9 : -j % 9;
if ((j & 1) == 0)
d = valuesArray[id] + id * seed;
else
d = valuesArray[id] - id * seed;
}
return d;
}
// Runs allNinesThroughputLoad and calculate its mean load
static void calculateAllNinesThroughputLoad() {
int nbRuns = NB_RUNS;
long elapsedTime = 0;
double foo;
for (int i = 1; i <= nbRuns; i++) {
long startTime = System.nanoTime();
foo = allNinesThroughputLoad();
long estimatedTime = System.nanoTime() - startTime;
if (i > 3) elapsedTime += estimatedTime / 1000;
}
if (Verbose)
System.out.println(
"calculated throughput load for " + ALL_NINES_BENCH +
" bench is = " + (elapsedTime / (nbRuns - 3)) + " microseconds");
}
// --- A fair bench trying (hopefully) to reproduce business applicatons ---
/* benchFormatFair uses the following formula :
* y = F(x) = sign(x) * x**2 * ((1000/MAX_RANGE)**2).
*
* which converts in the loop as (if j is the loop index) :
* x = double(j)
* k = 1000.0d * double(MAX_RANGE)
* y = sign(j) * x**2 * k**2
*
* This is a flattened parabolic curve where only the j values
* in [-1000, 1000] will provide y results in [-1, +1] interval,
* and for abs(j) >= 1000 the result y will be greater than 1.
*
* The difference with benchFormatSmallIntegral is that since y results
* follow a parabolic curve the magnitude of y grows much more rapidly
* and closer to j values when abs(j) >= 1000:
* - for |j| < 1000, SmallIntegral(j) < 1.0 and fair(j) < 1.0
* - for j in [1000, 10000[
* SmallIntegral(j) is in [1, 10[
* Fair(j) is in [4, 400[
* - for j in [10000,100000[
* SmallIntegral(j) is in [10, 100[
* Fair(j) is in [400,40000[
* - for j in [100000,1000000[
* SmallIntegral(j) is in [100, 1000[
* Fair(j) is in [40000, 4000000[
*
* Since double values for j less than 100000 provide only 4 digits in the
* integral, values greater than 250000 provide at least 6 digits, and 500000
* computes to 1000000, the distribution is roughly half with less than 5
* digits and half with at least 6 digits in the integral part.
*
* Compared to FairSimple bench, this represents an application where 20% of
* the double values to format are less than 40000.0 absolute value.
*
* Fair(j) is close to the magnitude of j when j > 100000 and is hopefully
* more representative of what may be found in general in business apps.
* (assumption : there will be mainly either small or large values, and
* less values in middle range).
*
* We could get even more precise distribution of values using formula :
* y = sign(x) * abs(x)**n * ((1000 / MAX_RANGE)**n) where n > 2,
* or even well-known statistics function to fine target such distribution,
* but we have considred that the throughput load for calculating y would
* then be too high. We thus restrain the use of a power of 2 formula.
*/
private static final String FAIR_BENCH = "benchFormatFair";
private static String benchFormatFair(NumberFormat nf) {
String str = "";
double k = 1000.0d / (double) MAX_RANGE;
k *= k;
double d;
double absj;
double jPowerOf2;
for (int j = - MAX_RANGE; j <= MAX_RANGE; j++) {
absj = (double) j;
jPowerOf2 = absj * absj;
d = k * jPowerOf2;
if (j < 0) d = -d;
str = nf.format(d);
}
return str;
}
// This is the exact throughput load added in benchFormatFair
static double fairThroughputLoad() {
double k = 1000.0d / (double) MAX_RANGE;
k *= k;
double d = 0.0d;
double absj;
double jPowerOf2;
for (int j = - MAX_RANGE; j <= MAX_RANGE; j++) {
absj = (double) j;
jPowerOf2 = absj * absj;
d = k * jPowerOf2;
if (j < 0) d = -d;
}
return d;
}
// Runs fairThroughputLoad and calculate its mean load
static void calculateFairThroughputLoad() {
int nbRuns = NB_RUNS;
long elapsedTime = 0;
double foo;
for (int i = 1; i <= nbRuns; i++) {
long startTime = System.nanoTime();
foo = fairThroughputLoad();
long estimatedTime = System.nanoTime() - startTime;
if (i > 3) elapsedTime += estimatedTime / 1000;
}
if (Verbose)
System.out.println(
"calculated throughput load for " + FAIR_BENCH +
" bench is = " + (elapsedTime / (nbRuns - 3)) + " microseconds");
}
// ---------- All double values are very close to a tie --------------------
// i.e. like 123.1235 (for decimal case) or 123.125 (for currency case).
private static final String TIE_BENCH = "benchFormatTie";
private static String benchFormatTie(NumberFormat nf, boolean isCurrency) {
double d;
String str = "";
double fractionaScaling = (isCurrency) ? 1000.0d : 10000.0d;
int fixedFractionalPart = (isCurrency) ? 125 : 1235;
for (int j = - MAX_RANGE; j <= MAX_RANGE; j++) {
d = (((double) j * fractionaScaling) +
(double) fixedFractionalPart) / fractionaScaling;
str = nf.format(d);
}
return str;
}
// This is the exact throughput load added in benchFormatTie
static double tieThroughputLoad(boolean isCurrency) {
double d = 0.0d;
double fractionaScaling = (isCurrency) ? 1000.0d : 10000.0d;
int fixedFractionalPart = (isCurrency) ? 125 : 1235;
for (int j = - MAX_RANGE; j <= MAX_RANGE; j++) {
d = (((double) j * fractionaScaling) +
(double) fixedFractionalPart) / fractionaScaling;
}
return d;
}
// Runs tieThroughputLoad and calculate its mean load
static void calculateTieThroughputLoad(boolean isCurrency) {
int nbRuns = NB_RUNS;
long elapsedTime = 0;
double foo;
for (int i = 1; i <= nbRuns; i++) {
long startTime = System.nanoTime();
foo = tieThroughputLoad(isCurrency);
long estimatedTime = System.nanoTime() - startTime;
if (i > 3) elapsedTime += estimatedTime / 1000;
}
if (Verbose)
System.out.println(
"calculated throughput load for " + TIE_BENCH +
" bench is = " + (elapsedTime / (nbRuns - 3)) + " microseconds");
}
// Print statistics for passed times results of benchName.
static void printPerfResults(long[] times, String benchName) {
int nbBenches = times.length;
long totalTimeSpent = 0;
long meanTimeSpent;
double variance = 0;
double standardDeviation = 0;
// Calculates mean spent time
for (int i = 1; i <= nbBenches; i++)
totalTimeSpent += times[i-1];
meanTimeSpent = totalTimeSpent / nbBenches;
// Calculates standard deviation
for (int j = 1; j <= nbBenches; j++)
variance += Math.pow(((double)times[j-1] - (double)meanTimeSpent), 2);
variance = variance / (double) times.length;
standardDeviation = Math.sqrt(variance) / meanTimeSpent;
// Print result and statistics for benchName
System.out.println(
"Statistics (starting at 4th bench) for bench " + benchName +
"\n for last " + nbBenches +
" runs out of " + NB_RUNS +
" , each with 2x" + MAX_RANGE + " format(double) calls : " +
"\n mean exec time = " + meanTimeSpent + " microseconds" +
"\n standard deviation = " + String.format("%.3f", standardDeviation) + "% \n");
}
public static void main(String[] args) {
if (args.length >= 1) {
// Parse args, just checks expected ones. Ignore others or dups.
if (args[0].equals("-help")) {
usage();
return;
}
for (String s : args) {
if (s.equals("-doit"))
DoIt = true;
else if (s.equals("-verbose"))
Verbose = true;
}
} else {
// No arguments, skips the benchmarks and exits.
System.out.println(
"Test skipped with success by default. See -help for details.");
return;
}
if (!DoIt) {
if (Verbose)
usage();
System.out.println(
"Test skipped and considered successful.");
return;
}
System.out.println("Single Threaded micro benchmark evaluating " +
"the throughput of java.text.DecimalFormat.format() call stack.\n");
String fooString = "";
// Run benches for decimal instance
DecimalFormat df = (DecimalFormat) NumberFormat.getInstance(Locale.US);
System.out.println("Running with a decimal instance of DecimalFormat.");
calculateIntegerThroughputLoad();
fooString =
BenchType.INTEGER_BENCH.runBenchAndPrintStatistics(NB_RUNS, df, false);
calculateFractionalThroughputLoad();
fooString =
BenchType.FRACTIONAL_BENCH.runBenchAndPrintStatistics(NB_RUNS, df, false);
calculateSmallIntegralThroughputLoad();
fooString =
BenchType.SMALL_INTEGRAL_BENCH.runBenchAndPrintStatistics(NB_RUNS, df, false);
calculateFractionalAllNinesThroughputLoad();
fooString =
BenchType.FRACTIONAL_ALL_NINES_BENCH.runBenchAndPrintStatistics(NB_RUNS, df, false);
calculateAllNinesThroughputLoad();
fooString =
BenchType.ALL_NINES_BENCH.runBenchAndPrintStatistics(NB_RUNS, df, false);
calculateFairSimpleThroughputLoad();
fooString =
BenchType.FAIR_SIMPLE_BENCH.runBenchAndPrintStatistics(NB_RUNS, df, false);
calculateFairThroughputLoad();
fooString =
BenchType.FAIR_BENCH.runBenchAndPrintStatistics(NB_RUNS, df, false);
calculateTieThroughputLoad(false);
fooString =
BenchType.TIE_BENCH.runBenchAndPrintStatistics(NB_RUNS, df, false);
// Run benches for currency instance
DecimalFormat cf = (DecimalFormat) NumberFormat.getCurrencyInstance(Locale.US);
System.out.println("Running with a currency instance of DecimalFormat.");
calculateIntegerThroughputLoad();
fooString =
BenchType.INTEGER_BENCH.runBenchAndPrintStatistics(NB_RUNS, cf, false);
calculateFractionalThroughputLoad();
fooString =
BenchType.FRACTIONAL_BENCH.runBenchAndPrintStatistics(NB_RUNS, cf, false);
calculateSmallIntegralThroughputLoad();
fooString =
BenchType.SMALL_INTEGRAL_BENCH.runBenchAndPrintStatistics(NB_RUNS, cf, false);
calculateFractionalAllNinesThroughputLoad();
fooString =
BenchType.FRACTIONAL_ALL_NINES_BENCH.runBenchAndPrintStatistics(NB_RUNS, cf, false);
calculateAllNinesThroughputLoad();
fooString =
BenchType.ALL_NINES_BENCH.runBenchAndPrintStatistics(NB_RUNS, cf, false);
calculateFairSimpleThroughputLoad();
fooString =
BenchType.FAIR_SIMPLE_BENCH.runBenchAndPrintStatistics(NB_RUNS, cf, false);
calculateFairThroughputLoad();
fooString =
BenchType.FAIR_BENCH.runBenchAndPrintStatistics(NB_RUNS, cf, false);
calculateTieThroughputLoad(false);
fooString =
BenchType.TIE_BENCH.runBenchAndPrintStatistics(NB_RUNS, cf, false);
}
// This class to factorise what would be duplicated otherwise.
static enum BenchType {
INTEGER_BENCH("benchFormatInteger"),
FRACTIONAL_BENCH("benchFormatFractional"),
SMALL_INTEGRAL_BENCH("benchFormatSmallIntegral"),
FAIR_SIMPLE_BENCH("benchFormatFairSimple"),
FRACTIONAL_ALL_NINES_BENCH("benchFormatFractionalAllNines"),
ALL_NINES_BENCH("benchFormatAllNines"),
FAIR_BENCH("benchFormatFair"),
TIE_BENCH("benchFormatTie");
private final String name;
BenchType(String name) {
this.name = name;
}
String runBenchAndPrintStatistics(int nbRuns,
NumberFormat nf,
boolean isCurrency) {
// We eliminate the first 3 runs in the time measurements
// to let C2 do complete compilation and optimization work.
long[] elapsedTimes = new long[nbRuns - 3];
System.out.println("Now running " + nbRuns + " times bench " + name);
String str = "";
for (int i = 1; i <= nbRuns; i++) {
stabilizeMemory(false);
long startTime = System.nanoTime();
switch(this) {
case INTEGER_BENCH :
str = benchFormatInteger(nf);
break;
case FRACTIONAL_BENCH :
str = benchFormatFractional(nf);
break;
case SMALL_INTEGRAL_BENCH :
str = benchFormatSmallIntegral(nf);
break;
case FRACTIONAL_ALL_NINES_BENCH :
str = benchFormatFractionalAllNines(nf, isCurrency);
break;
case ALL_NINES_BENCH :
str = benchFormatAllNines(nf, isCurrency);
break;
case FAIR_SIMPLE_BENCH :
str = benchFormatFairSimple(nf, isCurrency);
break;
case FAIR_BENCH :
str = benchFormatFair(nf);
break;
case TIE_BENCH :
str = benchFormatTie(nf, isCurrency);
break;
default:
}
long estimatedTime = System.nanoTime() - startTime;
if (i > 3)
elapsedTimes[i-4] = estimatedTime / 1000;
if (Verbose)
System.out.println(
"calculated time for " + name +
" bench " + i + " is = " +
(estimatedTime / 1000) + " microseconds");
else System.out.print(".");
stabilizeMemory(true);
}
System.out.println(name + " Done.");
printPerfResults(elapsedTimes, name);
return str;
}
}
}
|
googleapis/google-cloud-java | 36,739 | java-dataplex/proto-google-cloud-dataplex-v1/src/main/java/com/google/cloud/dataplex/v1/CreateTaskRequest.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/dataplex/v1/service.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.dataplex.v1;
/**
*
*
* <pre>
* Create task request.
* </pre>
*
* Protobuf type {@code google.cloud.dataplex.v1.CreateTaskRequest}
*/
public final class CreateTaskRequest extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.dataplex.v1.CreateTaskRequest)
CreateTaskRequestOrBuilder {
private static final long serialVersionUID = 0L;
// Use CreateTaskRequest.newBuilder() to construct.
private CreateTaskRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private CreateTaskRequest() {
parent_ = "";
taskId_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new CreateTaskRequest();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.dataplex.v1.ServiceProto
.internal_static_google_cloud_dataplex_v1_CreateTaskRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.dataplex.v1.ServiceProto
.internal_static_google_cloud_dataplex_v1_CreateTaskRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.dataplex.v1.CreateTaskRequest.class,
com.google.cloud.dataplex.v1.CreateTaskRequest.Builder.class);
}
private int bitField0_;
public static final int PARENT_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private volatile java.lang.Object parent_ = "";
/**
*
*
* <pre>
* Required. The resource name of the parent lake:
* `projects/{project_number}/locations/{location_id}/lakes/{lake_id}`.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The parent.
*/
@java.lang.Override
public java.lang.String getParent() {
java.lang.Object ref = parent_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
parent_ = s;
return s;
}
}
/**
*
*
* <pre>
* Required. The resource name of the parent lake:
* `projects/{project_number}/locations/{location_id}/lakes/{lake_id}`.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for parent.
*/
@java.lang.Override
public com.google.protobuf.ByteString getParentBytes() {
java.lang.Object ref = parent_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
parent_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int TASK_ID_FIELD_NUMBER = 2;
@SuppressWarnings("serial")
private volatile java.lang.Object taskId_ = "";
/**
*
*
* <pre>
* Required. Task identifier.
* </pre>
*
* <code>string task_id = 2 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The taskId.
*/
@java.lang.Override
public java.lang.String getTaskId() {
java.lang.Object ref = taskId_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
taskId_ = s;
return s;
}
}
/**
*
*
* <pre>
* Required. Task identifier.
* </pre>
*
* <code>string task_id = 2 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The bytes for taskId.
*/
@java.lang.Override
public com.google.protobuf.ByteString getTaskIdBytes() {
java.lang.Object ref = taskId_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
taskId_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int TASK_FIELD_NUMBER = 3;
private com.google.cloud.dataplex.v1.Task task_;
/**
*
*
* <pre>
* Required. Task resource.
* </pre>
*
* <code>.google.cloud.dataplex.v1.Task task = 3 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return Whether the task field is set.
*/
@java.lang.Override
public boolean hasTask() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* Required. Task resource.
* </pre>
*
* <code>.google.cloud.dataplex.v1.Task task = 3 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The task.
*/
@java.lang.Override
public com.google.cloud.dataplex.v1.Task getTask() {
return task_ == null ? com.google.cloud.dataplex.v1.Task.getDefaultInstance() : task_;
}
/**
*
*
* <pre>
* Required. Task resource.
* </pre>
*
* <code>.google.cloud.dataplex.v1.Task task = 3 [(.google.api.field_behavior) = REQUIRED];</code>
*/
@java.lang.Override
public com.google.cloud.dataplex.v1.TaskOrBuilder getTaskOrBuilder() {
return task_ == null ? com.google.cloud.dataplex.v1.Task.getDefaultInstance() : task_;
}
public static final int VALIDATE_ONLY_FIELD_NUMBER = 4;
private boolean validateOnly_ = false;
/**
*
*
* <pre>
* Optional. Only validate the request, but do not perform mutations.
* The default is false.
* </pre>
*
* <code>bool validate_only = 4 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The validateOnly.
*/
@java.lang.Override
public boolean getValidateOnly() {
return validateOnly_;
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 1, parent_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(taskId_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 2, taskId_);
}
if (((bitField0_ & 0x00000001) != 0)) {
output.writeMessage(3, getTask());
}
if (validateOnly_ != false) {
output.writeBool(4, validateOnly_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, parent_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(taskId_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, taskId_);
}
if (((bitField0_ & 0x00000001) != 0)) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(3, getTask());
}
if (validateOnly_ != false) {
size += com.google.protobuf.CodedOutputStream.computeBoolSize(4, validateOnly_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.dataplex.v1.CreateTaskRequest)) {
return super.equals(obj);
}
com.google.cloud.dataplex.v1.CreateTaskRequest other =
(com.google.cloud.dataplex.v1.CreateTaskRequest) obj;
if (!getParent().equals(other.getParent())) return false;
if (!getTaskId().equals(other.getTaskId())) return false;
if (hasTask() != other.hasTask()) return false;
if (hasTask()) {
if (!getTask().equals(other.getTask())) return false;
}
if (getValidateOnly() != other.getValidateOnly()) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + PARENT_FIELD_NUMBER;
hash = (53 * hash) + getParent().hashCode();
hash = (37 * hash) + TASK_ID_FIELD_NUMBER;
hash = (53 * hash) + getTaskId().hashCode();
if (hasTask()) {
hash = (37 * hash) + TASK_FIELD_NUMBER;
hash = (53 * hash) + getTask().hashCode();
}
hash = (37 * hash) + VALIDATE_ONLY_FIELD_NUMBER;
hash = (53 * hash) + com.google.protobuf.Internal.hashBoolean(getValidateOnly());
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.dataplex.v1.CreateTaskRequest parseFrom(java.nio.ByteBuffer data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.dataplex.v1.CreateTaskRequest parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.dataplex.v1.CreateTaskRequest parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.dataplex.v1.CreateTaskRequest parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.dataplex.v1.CreateTaskRequest parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.dataplex.v1.CreateTaskRequest parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.dataplex.v1.CreateTaskRequest parseFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.dataplex.v1.CreateTaskRequest parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.dataplex.v1.CreateTaskRequest parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.dataplex.v1.CreateTaskRequest parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.dataplex.v1.CreateTaskRequest parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.dataplex.v1.CreateTaskRequest parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.cloud.dataplex.v1.CreateTaskRequest prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Create task request.
* </pre>
*
* Protobuf type {@code google.cloud.dataplex.v1.CreateTaskRequest}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.dataplex.v1.CreateTaskRequest)
com.google.cloud.dataplex.v1.CreateTaskRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.dataplex.v1.ServiceProto
.internal_static_google_cloud_dataplex_v1_CreateTaskRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.dataplex.v1.ServiceProto
.internal_static_google_cloud_dataplex_v1_CreateTaskRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.dataplex.v1.CreateTaskRequest.class,
com.google.cloud.dataplex.v1.CreateTaskRequest.Builder.class);
}
// Construct using com.google.cloud.dataplex.v1.CreateTaskRequest.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {
getTaskFieldBuilder();
}
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
parent_ = "";
taskId_ = "";
task_ = null;
if (taskBuilder_ != null) {
taskBuilder_.dispose();
taskBuilder_ = null;
}
validateOnly_ = false;
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.dataplex.v1.ServiceProto
.internal_static_google_cloud_dataplex_v1_CreateTaskRequest_descriptor;
}
@java.lang.Override
public com.google.cloud.dataplex.v1.CreateTaskRequest getDefaultInstanceForType() {
return com.google.cloud.dataplex.v1.CreateTaskRequest.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.dataplex.v1.CreateTaskRequest build() {
com.google.cloud.dataplex.v1.CreateTaskRequest result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.dataplex.v1.CreateTaskRequest buildPartial() {
com.google.cloud.dataplex.v1.CreateTaskRequest result =
new com.google.cloud.dataplex.v1.CreateTaskRequest(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(com.google.cloud.dataplex.v1.CreateTaskRequest result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.parent_ = parent_;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.taskId_ = taskId_;
}
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000004) != 0)) {
result.task_ = taskBuilder_ == null ? task_ : taskBuilder_.build();
to_bitField0_ |= 0x00000001;
}
if (((from_bitField0_ & 0x00000008) != 0)) {
result.validateOnly_ = validateOnly_;
}
result.bitField0_ |= to_bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.dataplex.v1.CreateTaskRequest) {
return mergeFrom((com.google.cloud.dataplex.v1.CreateTaskRequest) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.dataplex.v1.CreateTaskRequest other) {
if (other == com.google.cloud.dataplex.v1.CreateTaskRequest.getDefaultInstance()) return this;
if (!other.getParent().isEmpty()) {
parent_ = other.parent_;
bitField0_ |= 0x00000001;
onChanged();
}
if (!other.getTaskId().isEmpty()) {
taskId_ = other.taskId_;
bitField0_ |= 0x00000002;
onChanged();
}
if (other.hasTask()) {
mergeTask(other.getTask());
}
if (other.getValidateOnly() != false) {
setValidateOnly(other.getValidateOnly());
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
parent_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000001;
break;
} // case 10
case 18:
{
taskId_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000002;
break;
} // case 18
case 26:
{
input.readMessage(getTaskFieldBuilder().getBuilder(), extensionRegistry);
bitField0_ |= 0x00000004;
break;
} // case 26
case 32:
{
validateOnly_ = input.readBool();
bitField0_ |= 0x00000008;
break;
} // case 32
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.lang.Object parent_ = "";
/**
*
*
* <pre>
* Required. The resource name of the parent lake:
* `projects/{project_number}/locations/{location_id}/lakes/{lake_id}`.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The parent.
*/
public java.lang.String getParent() {
java.lang.Object ref = parent_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
parent_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Required. The resource name of the parent lake:
* `projects/{project_number}/locations/{location_id}/lakes/{lake_id}`.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for parent.
*/
public com.google.protobuf.ByteString getParentBytes() {
java.lang.Object ref = parent_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
parent_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Required. The resource name of the parent lake:
* `projects/{project_number}/locations/{location_id}/lakes/{lake_id}`.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The parent to set.
* @return This builder for chaining.
*/
public Builder setParent(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
parent_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The resource name of the parent lake:
* `projects/{project_number}/locations/{location_id}/lakes/{lake_id}`.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return This builder for chaining.
*/
public Builder clearParent() {
parent_ = getDefaultInstance().getParent();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The resource name of the parent lake:
* `projects/{project_number}/locations/{location_id}/lakes/{lake_id}`.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The bytes for parent to set.
* @return This builder for chaining.
*/
public Builder setParentBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
parent_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
private java.lang.Object taskId_ = "";
/**
*
*
* <pre>
* Required. Task identifier.
* </pre>
*
* <code>string task_id = 2 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The taskId.
*/
public java.lang.String getTaskId() {
java.lang.Object ref = taskId_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
taskId_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Required. Task identifier.
* </pre>
*
* <code>string task_id = 2 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The bytes for taskId.
*/
public com.google.protobuf.ByteString getTaskIdBytes() {
java.lang.Object ref = taskId_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
taskId_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Required. Task identifier.
* </pre>
*
* <code>string task_id = 2 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @param value The taskId to set.
* @return This builder for chaining.
*/
public Builder setTaskId(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
taskId_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. Task identifier.
* </pre>
*
* <code>string task_id = 2 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return This builder for chaining.
*/
public Builder clearTaskId() {
taskId_ = getDefaultInstance().getTaskId();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. Task identifier.
* </pre>
*
* <code>string task_id = 2 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @param value The bytes for taskId to set.
* @return This builder for chaining.
*/
public Builder setTaskIdBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
taskId_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
private com.google.cloud.dataplex.v1.Task task_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.dataplex.v1.Task,
com.google.cloud.dataplex.v1.Task.Builder,
com.google.cloud.dataplex.v1.TaskOrBuilder>
taskBuilder_;
/**
*
*
* <pre>
* Required. Task resource.
* </pre>
*
* <code>.google.cloud.dataplex.v1.Task task = 3 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the task field is set.
*/
public boolean hasTask() {
return ((bitField0_ & 0x00000004) != 0);
}
/**
*
*
* <pre>
* Required. Task resource.
* </pre>
*
* <code>.google.cloud.dataplex.v1.Task task = 3 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The task.
*/
public com.google.cloud.dataplex.v1.Task getTask() {
if (taskBuilder_ == null) {
return task_ == null ? com.google.cloud.dataplex.v1.Task.getDefaultInstance() : task_;
} else {
return taskBuilder_.getMessage();
}
}
/**
*
*
* <pre>
* Required. Task resource.
* </pre>
*
* <code>.google.cloud.dataplex.v1.Task task = 3 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setTask(com.google.cloud.dataplex.v1.Task value) {
if (taskBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
task_ = value;
} else {
taskBuilder_.setMessage(value);
}
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. Task resource.
* </pre>
*
* <code>.google.cloud.dataplex.v1.Task task = 3 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setTask(com.google.cloud.dataplex.v1.Task.Builder builderForValue) {
if (taskBuilder_ == null) {
task_ = builderForValue.build();
} else {
taskBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. Task resource.
* </pre>
*
* <code>.google.cloud.dataplex.v1.Task task = 3 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder mergeTask(com.google.cloud.dataplex.v1.Task value) {
if (taskBuilder_ == null) {
if (((bitField0_ & 0x00000004) != 0)
&& task_ != null
&& task_ != com.google.cloud.dataplex.v1.Task.getDefaultInstance()) {
getTaskBuilder().mergeFrom(value);
} else {
task_ = value;
}
} else {
taskBuilder_.mergeFrom(value);
}
if (task_ != null) {
bitField0_ |= 0x00000004;
onChanged();
}
return this;
}
/**
*
*
* <pre>
* Required. Task resource.
* </pre>
*
* <code>.google.cloud.dataplex.v1.Task task = 3 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder clearTask() {
bitField0_ = (bitField0_ & ~0x00000004);
task_ = null;
if (taskBuilder_ != null) {
taskBuilder_.dispose();
taskBuilder_ = null;
}
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. Task resource.
* </pre>
*
* <code>.google.cloud.dataplex.v1.Task task = 3 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.cloud.dataplex.v1.Task.Builder getTaskBuilder() {
bitField0_ |= 0x00000004;
onChanged();
return getTaskFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* Required. Task resource.
* </pre>
*
* <code>.google.cloud.dataplex.v1.Task task = 3 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.cloud.dataplex.v1.TaskOrBuilder getTaskOrBuilder() {
if (taskBuilder_ != null) {
return taskBuilder_.getMessageOrBuilder();
} else {
return task_ == null ? com.google.cloud.dataplex.v1.Task.getDefaultInstance() : task_;
}
}
/**
*
*
* <pre>
* Required. Task resource.
* </pre>
*
* <code>.google.cloud.dataplex.v1.Task task = 3 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.dataplex.v1.Task,
com.google.cloud.dataplex.v1.Task.Builder,
com.google.cloud.dataplex.v1.TaskOrBuilder>
getTaskFieldBuilder() {
if (taskBuilder_ == null) {
taskBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.dataplex.v1.Task,
com.google.cloud.dataplex.v1.Task.Builder,
com.google.cloud.dataplex.v1.TaskOrBuilder>(
getTask(), getParentForChildren(), isClean());
task_ = null;
}
return taskBuilder_;
}
private boolean validateOnly_;
/**
*
*
* <pre>
* Optional. Only validate the request, but do not perform mutations.
* The default is false.
* </pre>
*
* <code>bool validate_only = 4 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The validateOnly.
*/
@java.lang.Override
public boolean getValidateOnly() {
return validateOnly_;
}
/**
*
*
* <pre>
* Optional. Only validate the request, but do not perform mutations.
* The default is false.
* </pre>
*
* <code>bool validate_only = 4 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @param value The validateOnly to set.
* @return This builder for chaining.
*/
public Builder setValidateOnly(boolean value) {
validateOnly_ = value;
bitField0_ |= 0x00000008;
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. Only validate the request, but do not perform mutations.
* The default is false.
* </pre>
*
* <code>bool validate_only = 4 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return This builder for chaining.
*/
public Builder clearValidateOnly() {
bitField0_ = (bitField0_ & ~0x00000008);
validateOnly_ = false;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.dataplex.v1.CreateTaskRequest)
}
// @@protoc_insertion_point(class_scope:google.cloud.dataplex.v1.CreateTaskRequest)
private static final com.google.cloud.dataplex.v1.CreateTaskRequest DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.dataplex.v1.CreateTaskRequest();
}
public static com.google.cloud.dataplex.v1.CreateTaskRequest getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<CreateTaskRequest> PARSER =
new com.google.protobuf.AbstractParser<CreateTaskRequest>() {
@java.lang.Override
public CreateTaskRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<CreateTaskRequest> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<CreateTaskRequest> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.dataplex.v1.CreateTaskRequest getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleapis/google-cloud-java | 36,761 | java-private-catalog/proto-google-cloud-private-catalog-v1beta1/src/main/java/com/google/cloud/privatecatalog/v1beta1/SearchVersionsRequest.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/privatecatalog/v1beta1/private_catalog.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.privatecatalog.v1beta1;
/**
*
*
* <pre>
* Request message for [PrivateCatalog.SearchVersions][google.cloud.privatecatalog.v1beta1.PrivateCatalog.SearchVersions].
* </pre>
*
* Protobuf type {@code google.cloud.privatecatalog.v1beta1.SearchVersionsRequest}
*/
public final class SearchVersionsRequest extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.privatecatalog.v1beta1.SearchVersionsRequest)
SearchVersionsRequestOrBuilder {
private static final long serialVersionUID = 0L;
// Use SearchVersionsRequest.newBuilder() to construct.
private SearchVersionsRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private SearchVersionsRequest() {
resource_ = "";
query_ = "";
pageToken_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new SearchVersionsRequest();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.privatecatalog.v1beta1.PrivateCatalogProto
.internal_static_google_cloud_privatecatalog_v1beta1_SearchVersionsRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.privatecatalog.v1beta1.PrivateCatalogProto
.internal_static_google_cloud_privatecatalog_v1beta1_SearchVersionsRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.privatecatalog.v1beta1.SearchVersionsRequest.class,
com.google.cloud.privatecatalog.v1beta1.SearchVersionsRequest.Builder.class);
}
public static final int RESOURCE_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private volatile java.lang.Object resource_ = "";
/**
*
*
* <pre>
* Required. The name of the resource context. See [SearchCatalogsRequest.resource][google.cloud.privatecatalog.v1beta1.SearchCatalogsRequest.resource]
* for details.
* </pre>
*
* <code>string resource = 1 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The resource.
*/
@java.lang.Override
public java.lang.String getResource() {
java.lang.Object ref = resource_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
resource_ = s;
return s;
}
}
/**
*
*
* <pre>
* Required. The name of the resource context. See [SearchCatalogsRequest.resource][google.cloud.privatecatalog.v1beta1.SearchCatalogsRequest.resource]
* for details.
* </pre>
*
* <code>string resource = 1 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The bytes for resource.
*/
@java.lang.Override
public com.google.protobuf.ByteString getResourceBytes() {
java.lang.Object ref = resource_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
resource_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int QUERY_FIELD_NUMBER = 2;
@SuppressWarnings("serial")
private volatile java.lang.Object query_ = "";
/**
*
*
* <pre>
* Required. The query to filter the versions.
*
* The supported queries are:
* * List versions under a product:
* `parent=catalogs/{catalog}/products/{product}`
* * Get a version by name:
* `name=catalogs/{catalog}/products/{product}/versions/{version}`
* </pre>
*
* <code>string query = 2 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The query.
*/
@java.lang.Override
public java.lang.String getQuery() {
java.lang.Object ref = query_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
query_ = s;
return s;
}
}
/**
*
*
* <pre>
* Required. The query to filter the versions.
*
* The supported queries are:
* * List versions under a product:
* `parent=catalogs/{catalog}/products/{product}`
* * Get a version by name:
* `name=catalogs/{catalog}/products/{product}/versions/{version}`
* </pre>
*
* <code>string query = 2 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The bytes for query.
*/
@java.lang.Override
public com.google.protobuf.ByteString getQueryBytes() {
java.lang.Object ref = query_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
query_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int PAGE_SIZE_FIELD_NUMBER = 3;
private int pageSize_ = 0;
/**
*
*
* <pre>
* The maximum number of entries that are requested.
* </pre>
*
* <code>int32 page_size = 3;</code>
*
* @return The pageSize.
*/
@java.lang.Override
public int getPageSize() {
return pageSize_;
}
public static final int PAGE_TOKEN_FIELD_NUMBER = 4;
@SuppressWarnings("serial")
private volatile java.lang.Object pageToken_ = "";
/**
*
*
* <pre>
* A pagination token returned from a previous call to SearchVersions
* that indicates where this listing should continue from.
* </pre>
*
* <code>string page_token = 4;</code>
*
* @return The pageToken.
*/
@java.lang.Override
public java.lang.String getPageToken() {
java.lang.Object ref = pageToken_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
pageToken_ = s;
return s;
}
}
/**
*
*
* <pre>
* A pagination token returned from a previous call to SearchVersions
* that indicates where this listing should continue from.
* </pre>
*
* <code>string page_token = 4;</code>
*
* @return The bytes for pageToken.
*/
@java.lang.Override
public com.google.protobuf.ByteString getPageTokenBytes() {
java.lang.Object ref = pageToken_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
pageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(resource_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 1, resource_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(query_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 2, query_);
}
if (pageSize_ != 0) {
output.writeInt32(3, pageSize_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(pageToken_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 4, pageToken_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(resource_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, resource_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(query_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, query_);
}
if (pageSize_ != 0) {
size += com.google.protobuf.CodedOutputStream.computeInt32Size(3, pageSize_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(pageToken_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(4, pageToken_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.privatecatalog.v1beta1.SearchVersionsRequest)) {
return super.equals(obj);
}
com.google.cloud.privatecatalog.v1beta1.SearchVersionsRequest other =
(com.google.cloud.privatecatalog.v1beta1.SearchVersionsRequest) obj;
if (!getResource().equals(other.getResource())) return false;
if (!getQuery().equals(other.getQuery())) return false;
if (getPageSize() != other.getPageSize()) return false;
if (!getPageToken().equals(other.getPageToken())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + RESOURCE_FIELD_NUMBER;
hash = (53 * hash) + getResource().hashCode();
hash = (37 * hash) + QUERY_FIELD_NUMBER;
hash = (53 * hash) + getQuery().hashCode();
hash = (37 * hash) + PAGE_SIZE_FIELD_NUMBER;
hash = (53 * hash) + getPageSize();
hash = (37 * hash) + PAGE_TOKEN_FIELD_NUMBER;
hash = (53 * hash) + getPageToken().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.privatecatalog.v1beta1.SearchVersionsRequest parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.privatecatalog.v1beta1.SearchVersionsRequest parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.privatecatalog.v1beta1.SearchVersionsRequest parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.privatecatalog.v1beta1.SearchVersionsRequest parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.privatecatalog.v1beta1.SearchVersionsRequest parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.privatecatalog.v1beta1.SearchVersionsRequest parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.privatecatalog.v1beta1.SearchVersionsRequest parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.privatecatalog.v1beta1.SearchVersionsRequest parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.privatecatalog.v1beta1.SearchVersionsRequest parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.privatecatalog.v1beta1.SearchVersionsRequest parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.privatecatalog.v1beta1.SearchVersionsRequest parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.privatecatalog.v1beta1.SearchVersionsRequest parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.privatecatalog.v1beta1.SearchVersionsRequest prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Request message for [PrivateCatalog.SearchVersions][google.cloud.privatecatalog.v1beta1.PrivateCatalog.SearchVersions].
* </pre>
*
* Protobuf type {@code google.cloud.privatecatalog.v1beta1.SearchVersionsRequest}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.privatecatalog.v1beta1.SearchVersionsRequest)
com.google.cloud.privatecatalog.v1beta1.SearchVersionsRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.privatecatalog.v1beta1.PrivateCatalogProto
.internal_static_google_cloud_privatecatalog_v1beta1_SearchVersionsRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.privatecatalog.v1beta1.PrivateCatalogProto
.internal_static_google_cloud_privatecatalog_v1beta1_SearchVersionsRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.privatecatalog.v1beta1.SearchVersionsRequest.class,
com.google.cloud.privatecatalog.v1beta1.SearchVersionsRequest.Builder.class);
}
// Construct using com.google.cloud.privatecatalog.v1beta1.SearchVersionsRequest.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
resource_ = "";
query_ = "";
pageSize_ = 0;
pageToken_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.privatecatalog.v1beta1.PrivateCatalogProto
.internal_static_google_cloud_privatecatalog_v1beta1_SearchVersionsRequest_descriptor;
}
@java.lang.Override
public com.google.cloud.privatecatalog.v1beta1.SearchVersionsRequest
getDefaultInstanceForType() {
return com.google.cloud.privatecatalog.v1beta1.SearchVersionsRequest.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.privatecatalog.v1beta1.SearchVersionsRequest build() {
com.google.cloud.privatecatalog.v1beta1.SearchVersionsRequest result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.privatecatalog.v1beta1.SearchVersionsRequest buildPartial() {
com.google.cloud.privatecatalog.v1beta1.SearchVersionsRequest result =
new com.google.cloud.privatecatalog.v1beta1.SearchVersionsRequest(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(
com.google.cloud.privatecatalog.v1beta1.SearchVersionsRequest result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.resource_ = resource_;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.query_ = query_;
}
if (((from_bitField0_ & 0x00000004) != 0)) {
result.pageSize_ = pageSize_;
}
if (((from_bitField0_ & 0x00000008) != 0)) {
result.pageToken_ = pageToken_;
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.privatecatalog.v1beta1.SearchVersionsRequest) {
return mergeFrom((com.google.cloud.privatecatalog.v1beta1.SearchVersionsRequest) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.privatecatalog.v1beta1.SearchVersionsRequest other) {
if (other
== com.google.cloud.privatecatalog.v1beta1.SearchVersionsRequest.getDefaultInstance())
return this;
if (!other.getResource().isEmpty()) {
resource_ = other.resource_;
bitField0_ |= 0x00000001;
onChanged();
}
if (!other.getQuery().isEmpty()) {
query_ = other.query_;
bitField0_ |= 0x00000002;
onChanged();
}
if (other.getPageSize() != 0) {
setPageSize(other.getPageSize());
}
if (!other.getPageToken().isEmpty()) {
pageToken_ = other.pageToken_;
bitField0_ |= 0x00000008;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
resource_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000001;
break;
} // case 10
case 18:
{
query_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000002;
break;
} // case 18
case 24:
{
pageSize_ = input.readInt32();
bitField0_ |= 0x00000004;
break;
} // case 24
case 34:
{
pageToken_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000008;
break;
} // case 34
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.lang.Object resource_ = "";
/**
*
*
* <pre>
* Required. The name of the resource context. See [SearchCatalogsRequest.resource][google.cloud.privatecatalog.v1beta1.SearchCatalogsRequest.resource]
* for details.
* </pre>
*
* <code>string resource = 1 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The resource.
*/
public java.lang.String getResource() {
java.lang.Object ref = resource_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
resource_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Required. The name of the resource context. See [SearchCatalogsRequest.resource][google.cloud.privatecatalog.v1beta1.SearchCatalogsRequest.resource]
* for details.
* </pre>
*
* <code>string resource = 1 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The bytes for resource.
*/
public com.google.protobuf.ByteString getResourceBytes() {
java.lang.Object ref = resource_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
resource_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Required. The name of the resource context. See [SearchCatalogsRequest.resource][google.cloud.privatecatalog.v1beta1.SearchCatalogsRequest.resource]
* for details.
* </pre>
*
* <code>string resource = 1 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @param value The resource to set.
* @return This builder for chaining.
*/
public Builder setResource(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
resource_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The name of the resource context. See [SearchCatalogsRequest.resource][google.cloud.privatecatalog.v1beta1.SearchCatalogsRequest.resource]
* for details.
* </pre>
*
* <code>string resource = 1 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return This builder for chaining.
*/
public Builder clearResource() {
resource_ = getDefaultInstance().getResource();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The name of the resource context. See [SearchCatalogsRequest.resource][google.cloud.privatecatalog.v1beta1.SearchCatalogsRequest.resource]
* for details.
* </pre>
*
* <code>string resource = 1 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @param value The bytes for resource to set.
* @return This builder for chaining.
*/
public Builder setResourceBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
resource_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
private java.lang.Object query_ = "";
/**
*
*
* <pre>
* Required. The query to filter the versions.
*
* The supported queries are:
* * List versions under a product:
* `parent=catalogs/{catalog}/products/{product}`
* * Get a version by name:
* `name=catalogs/{catalog}/products/{product}/versions/{version}`
* </pre>
*
* <code>string query = 2 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The query.
*/
public java.lang.String getQuery() {
java.lang.Object ref = query_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
query_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Required. The query to filter the versions.
*
* The supported queries are:
* * List versions under a product:
* `parent=catalogs/{catalog}/products/{product}`
* * Get a version by name:
* `name=catalogs/{catalog}/products/{product}/versions/{version}`
* </pre>
*
* <code>string query = 2 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The bytes for query.
*/
public com.google.protobuf.ByteString getQueryBytes() {
java.lang.Object ref = query_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
query_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Required. The query to filter the versions.
*
* The supported queries are:
* * List versions under a product:
* `parent=catalogs/{catalog}/products/{product}`
* * Get a version by name:
* `name=catalogs/{catalog}/products/{product}/versions/{version}`
* </pre>
*
* <code>string query = 2 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @param value The query to set.
* @return This builder for chaining.
*/
public Builder setQuery(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
query_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The query to filter the versions.
*
* The supported queries are:
* * List versions under a product:
* `parent=catalogs/{catalog}/products/{product}`
* * Get a version by name:
* `name=catalogs/{catalog}/products/{product}/versions/{version}`
* </pre>
*
* <code>string query = 2 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return This builder for chaining.
*/
public Builder clearQuery() {
query_ = getDefaultInstance().getQuery();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The query to filter the versions.
*
* The supported queries are:
* * List versions under a product:
* `parent=catalogs/{catalog}/products/{product}`
* * Get a version by name:
* `name=catalogs/{catalog}/products/{product}/versions/{version}`
* </pre>
*
* <code>string query = 2 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @param value The bytes for query to set.
* @return This builder for chaining.
*/
public Builder setQueryBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
query_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
private int pageSize_;
/**
*
*
* <pre>
* The maximum number of entries that are requested.
* </pre>
*
* <code>int32 page_size = 3;</code>
*
* @return The pageSize.
*/
@java.lang.Override
public int getPageSize() {
return pageSize_;
}
/**
*
*
* <pre>
* The maximum number of entries that are requested.
* </pre>
*
* <code>int32 page_size = 3;</code>
*
* @param value The pageSize to set.
* @return This builder for chaining.
*/
public Builder setPageSize(int value) {
pageSize_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
*
*
* <pre>
* The maximum number of entries that are requested.
* </pre>
*
* <code>int32 page_size = 3;</code>
*
* @return This builder for chaining.
*/
public Builder clearPageSize() {
bitField0_ = (bitField0_ & ~0x00000004);
pageSize_ = 0;
onChanged();
return this;
}
private java.lang.Object pageToken_ = "";
/**
*
*
* <pre>
* A pagination token returned from a previous call to SearchVersions
* that indicates where this listing should continue from.
* </pre>
*
* <code>string page_token = 4;</code>
*
* @return The pageToken.
*/
public java.lang.String getPageToken() {
java.lang.Object ref = pageToken_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
pageToken_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* A pagination token returned from a previous call to SearchVersions
* that indicates where this listing should continue from.
* </pre>
*
* <code>string page_token = 4;</code>
*
* @return The bytes for pageToken.
*/
public com.google.protobuf.ByteString getPageTokenBytes() {
java.lang.Object ref = pageToken_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
pageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* A pagination token returned from a previous call to SearchVersions
* that indicates where this listing should continue from.
* </pre>
*
* <code>string page_token = 4;</code>
*
* @param value The pageToken to set.
* @return This builder for chaining.
*/
public Builder setPageToken(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
pageToken_ = value;
bitField0_ |= 0x00000008;
onChanged();
return this;
}
/**
*
*
* <pre>
* A pagination token returned from a previous call to SearchVersions
* that indicates where this listing should continue from.
* </pre>
*
* <code>string page_token = 4;</code>
*
* @return This builder for chaining.
*/
public Builder clearPageToken() {
pageToken_ = getDefaultInstance().getPageToken();
bitField0_ = (bitField0_ & ~0x00000008);
onChanged();
return this;
}
/**
*
*
* <pre>
* A pagination token returned from a previous call to SearchVersions
* that indicates where this listing should continue from.
* </pre>
*
* <code>string page_token = 4;</code>
*
* @param value The bytes for pageToken to set.
* @return This builder for chaining.
*/
public Builder setPageTokenBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
pageToken_ = value;
bitField0_ |= 0x00000008;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.privatecatalog.v1beta1.SearchVersionsRequest)
}
// @@protoc_insertion_point(class_scope:google.cloud.privatecatalog.v1beta1.SearchVersionsRequest)
private static final com.google.cloud.privatecatalog.v1beta1.SearchVersionsRequest
DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.privatecatalog.v1beta1.SearchVersionsRequest();
}
public static com.google.cloud.privatecatalog.v1beta1.SearchVersionsRequest getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<SearchVersionsRequest> PARSER =
new com.google.protobuf.AbstractParser<SearchVersionsRequest>() {
@java.lang.Override
public SearchVersionsRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<SearchVersionsRequest> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<SearchVersionsRequest> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.privatecatalog.v1beta1.SearchVersionsRequest getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
apache/hadoop | 36,722 | hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/delegation/AbstractDelegationTokenSecretManager.java | /**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.security.token.delegation;
import java.io.ByteArrayInputStream;
import java.io.DataInput;
import java.io.DataInputStream;
import java.io.DataOutput;
import java.io.IOException;
import java.security.MessageDigest;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.locks.ReentrantReadWriteLock;
import javax.crypto.SecretKey;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.fs.statistics.DurationTracker;
import org.apache.hadoop.fs.statistics.DurationTrackerFactory;
import org.apache.hadoop.fs.statistics.impl.IOStatisticsBinding;
import org.apache.hadoop.fs.statistics.impl.IOStatisticsStore;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.io.Writable;
import org.apache.hadoop.io.WritableUtils;
import org.apache.hadoop.metrics2.annotation.Metric;
import org.apache.hadoop.metrics2.annotation.Metrics;
import org.apache.hadoop.metrics2.lib.DefaultMetricsSystem;
import org.apache.hadoop.metrics2.lib.MetricsRegistry;
import org.apache.hadoop.metrics2.lib.MutableCounterLong;
import org.apache.hadoop.metrics2.lib.MutableRate;
import org.apache.hadoop.metrics2.util.Metrics2Util.NameValuePair;
import org.apache.hadoop.metrics2.util.Metrics2Util.TopN;
import org.apache.hadoop.security.AccessControlException;
import org.apache.hadoop.security.HadoopKerberosName;
import org.apache.hadoop.security.token.SecretManager;
import org.apache.hadoop.security.token.Token;
import org.apache.hadoop.util.Daemon;
import org.apache.hadoop.util.Time;
import org.apache.hadoop.util.Preconditions;
import org.apache.hadoop.util.functional.InvocationRaisingIOE;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@InterfaceAudience.Public
@InterfaceStability.Evolving
public abstract
class AbstractDelegationTokenSecretManager<TokenIdent
extends AbstractDelegationTokenIdentifier>
extends SecretManager<TokenIdent> {
private static final Logger LOG = LoggerFactory
.getLogger(AbstractDelegationTokenSecretManager.class);
/**
* Metrics to track token management operations.
*/
private static final DelegationTokenSecretManagerMetrics METRICS
= DelegationTokenSecretManagerMetrics.create();
private String formatTokenId(TokenIdent id) {
try {
return "(" + id + ")";
} catch (Exception e) {
LOG.warn("Exception in formatTokenId", e);
}
return "( SequenceNumber=" + id.getSequenceNumber() + " )";
}
/**
* Cache of currently valid tokens, mapping from DelegationTokenIdentifier
* to DelegationTokenInformation. Protected by this object lock.
*/
protected Map<TokenIdent, DelegationTokenInformation> currentTokens;
/**
* Map of token real owners to its token count. This is used to generate
* metrics of top users by owned tokens.
*/
protected final Map<String, Long> tokenOwnerStats = new ConcurrentHashMap<>();
/**
* Sequence number to create DelegationTokenIdentifier.
* Protected by this object lock.
*/
protected int delegationTokenSequenceNumber = 0;
/**
* Access to allKeys is protected by this object lock
*/
protected final Map<Integer, DelegationKey> allKeys
= new ConcurrentHashMap<>();
/**
* Access to currentId is protected by this object lock.
*/
protected int currentId = 0;
/**
* Access to currentKey is protected by this object lock
*/
private DelegationKey currentKey;
private final long keyUpdateInterval;
private final long tokenMaxLifetime;
private final long tokenRemoverScanInterval;
private final long tokenRenewInterval;
/**
* Whether to store a token's tracking ID in its TokenInformation.
* Can be overridden by a subclass.
*/
protected boolean storeTokenTrackingId;
private Thread tokenRemoverThread;
protected volatile boolean running;
/**
* If the delegation token update thread holds this lock, it will
* not get interrupted.
*/
protected Object noInterruptsLock = new Object();
private final ReentrantReadWriteLock apiLock = new ReentrantReadWriteLock(true);
/**
* Create a secret manager
* @param delegationKeyUpdateInterval the number of milliseconds for rolling
* new secret keys.
* @param delegationTokenMaxLifetime the maximum lifetime of the delegation
* tokens in milliseconds
* @param delegationTokenRenewInterval how often the tokens must be renewed
* in milliseconds
* @param delegationTokenRemoverScanInterval how often the tokens are scanned
* for expired tokens in milliseconds
*/
public AbstractDelegationTokenSecretManager(long delegationKeyUpdateInterval,
long delegationTokenMaxLifetime, long delegationTokenRenewInterval,
long delegationTokenRemoverScanInterval) {
this.keyUpdateInterval = delegationKeyUpdateInterval;
this.tokenMaxLifetime = delegationTokenMaxLifetime;
this.tokenRenewInterval = delegationTokenRenewInterval;
this.tokenRemoverScanInterval = delegationTokenRemoverScanInterval;
this.storeTokenTrackingId = false;
this.currentTokens = new ConcurrentHashMap<>();
}
/**
* should be called before this object is used.
* @throws IOException raised on errors performing I/O.
*/
public void startThreads() throws IOException {
Preconditions.checkState(!running);
updateCurrentKey();
this.apiLock.writeLock().lock();
try {
running = true;
tokenRemoverThread = new Daemon(new ExpiredTokenRemover());
tokenRemoverThread.start();
} finally {
this.apiLock.writeLock().unlock();
}
}
/**
* Reset all data structures and mutable state.
*/
public void reset() {
this.apiLock.writeLock().lock();
try {
setCurrentKeyId(0);
allKeys.clear();
setDelegationTokenSeqNum(0);
currentTokens.clear();
} finally {
this.apiLock.writeLock().unlock();
}
}
/**
* Total count of active delegation tokens.
*
* @return currentTokens.size.
*/
public long getCurrentTokensSize() {
return currentTokens.size();
}
/**
* Interval for tokens to be renewed.
* @return Renew interval in milliseconds.
*/
protected long getTokenRenewInterval() {
return this.tokenRenewInterval;
}
/**
* Add a previously used master key to cache (when NN restarts),
* should be called before activate().
*
* @param key delegation key.
* @throws IOException raised on errors performing I/O.
*/
public void addKey(DelegationKey key) throws IOException {
if (running) // a safety check
throw new IOException("Can't add delegation key to a running SecretManager.");
this.apiLock.writeLock().lock();
try {
if (key.getKeyId() > getCurrentKeyId()) {
setCurrentKeyId(key.getKeyId());
}
allKeys.put(key.getKeyId(), key);
} finally {
this.apiLock.writeLock().unlock();
}
}
public DelegationKey[] getAllKeys() {
this.apiLock.readLock().lock();
try {
return allKeys.values().toArray(new DelegationKey[0]);
} finally {
this.apiLock.readLock().unlock();
}
}
// HDFS
protected void logUpdateMasterKey(DelegationKey key) throws IOException {
return;
}
// HDFS
protected void logExpireToken(TokenIdent ident) throws IOException {
return;
}
// RM
protected void storeNewMasterKey(DelegationKey key) throws IOException {
return;
}
// RM
protected void removeStoredMasterKey(DelegationKey key) {
return;
}
// RM
protected void storeNewToken(TokenIdent ident, long renewDate) throws IOException{
return;
}
// RM
protected void removeStoredToken(TokenIdent ident) throws IOException {
}
// RM
protected void updateStoredToken(TokenIdent ident, long renewDate) throws IOException {
return;
}
/**
* For subclasses externalizing the storage, for example Zookeeper
* based implementations.
*
* @return currentId.
*/
protected int getCurrentKeyId() {
this.apiLock.readLock().lock();
try {
return currentId;
} finally {
this.apiLock.readLock().unlock();
}
}
/**
* For subclasses externalizing the storage, for example Zookeeper
* based implementations.
*
* @return currentId.
*/
protected int incrementCurrentKeyId() {
this.apiLock.writeLock().lock();
try {
return ++currentId;
} finally {
this.apiLock.writeLock().unlock();
}
}
/**
* For subclasses externalizing the storage, for example Zookeeper
* based implementations.
*
* @param keyId keyId.
*/
protected void setCurrentKeyId(int keyId) {
this.apiLock.writeLock().lock();
try {
currentId = keyId;
} finally {
this.apiLock.writeLock().unlock();
}
}
/**
* For subclasses externalizing the storage, for example Zookeeper
* based implementations.
*
* @return delegationTokenSequenceNumber.
*/
protected int getDelegationTokenSeqNum() {
this.apiLock.readLock().lock();
try {
return delegationTokenSequenceNumber;
} finally {
this.apiLock.readLock().unlock();
}
}
/**
* For subclasses externalizing the storage, for example Zookeeper
* based implementations.
*
* @return delegationTokenSequenceNumber.
*/
protected int incrementDelegationTokenSeqNum() {
this.apiLock.writeLock().lock();
try {
return ++delegationTokenSequenceNumber;
} finally {
this.apiLock.writeLock().unlock();
}
}
/**
* For subclasses externalizing the storage, for example Zookeeper
* based implementations.
*
* @param seqNum seqNum.
*/
protected void setDelegationTokenSeqNum(int seqNum) {
this.apiLock.writeLock().lock();
try {
delegationTokenSequenceNumber = seqNum;
} finally {
this.apiLock.writeLock().unlock();
}
}
/**
* For subclasses externalizing the storage, for example Zookeeper
* based implementations.
*
* @param keyId keyId.
* @return DelegationKey.
*/
protected DelegationKey getDelegationKey(int keyId) {
return allKeys.get(keyId);
}
/**
* For subclasses externalizing the storage, for example Zookeeper
* based implementations.
*
* @param key DelegationKey.
* @throws IOException raised on errors performing I/O.
*/
protected void storeDelegationKey(DelegationKey key) throws IOException {
allKeys.put(key.getKeyId(), key);
storeNewMasterKey(key);
}
/**
* For subclasses externalizing the storage, for example Zookeeper
* based implementations.
*
* @param key DelegationKey.
* @throws IOException raised on errors performing I/O.
*/
protected void updateDelegationKey(DelegationKey key) throws IOException {
allKeys.put(key.getKeyId(), key);
}
/**
* For subclasses externalizing the storage, for example Zookeeper
* based implementations
*
* @param ident ident.
* @return DelegationTokenInformation.
*/
protected DelegationTokenInformation getTokenInfo(TokenIdent ident) {
return currentTokens.get(ident);
}
/**
* For subclasses externalizing the storage, for example Zookeeper
* based implementations.
*
* @param ident ident.
* @param tokenInfo tokenInfo.
* @throws IOException raised on errors performing I/O.
*/
protected void storeToken(TokenIdent ident,
DelegationTokenInformation tokenInfo) throws IOException {
currentTokens.put(ident, tokenInfo);
addTokenForOwnerStats(ident);
storeNewToken(ident, tokenInfo.getRenewDate());
}
/**
* For subclasses externalizing the storage, for example Zookeeper
* based implementations.
*
* @param ident ident.
* @param tokenInfo tokenInfo.
* @throws IOException raised on errors performing I/O.
*/
protected void updateToken(TokenIdent ident,
DelegationTokenInformation tokenInfo) throws IOException {
currentTokens.put(ident, tokenInfo);
updateStoredToken(ident, tokenInfo.getRenewDate());
}
/**
* This method is intended to be used for recovering persisted delegation
* tokens. Tokens that have an unknown <code>DelegationKey</code> are
* marked as expired and automatically cleaned up.
* This method must be called before this secret manager is activated (before
* startThreads() is called)
* @param identifier identifier read from persistent storage
* @param renewDate token renew time
* @throws IOException raised on errors performing I/O.
*/
public void addPersistedDelegationToken(
TokenIdent identifier, long renewDate) throws IOException {
if (running) {
// a safety check
throw new IOException(
"Can't add persisted delegation token to a running SecretManager.");
}
this.apiLock.writeLock().lock();
try {
int keyId = identifier.getMasterKeyId();
DelegationKey dKey = allKeys.get(keyId);
byte[] password = null;
if (dKey == null) {
LOG.warn("No KEY found for persisted identifier, expiring stored token " + formatTokenId(
identifier));
// make sure the token is expired
renewDate = 0L;
} else {
password = createPassword(identifier.getBytes(), dKey.getKey());
}
if (identifier.getSequenceNumber() > getDelegationTokenSeqNum()) {
setDelegationTokenSeqNum(identifier.getSequenceNumber());
}
if (getTokenInfo(identifier) == null) {
currentTokens.put(identifier, new DelegationTokenInformation(renewDate, password,
getTrackingIdIfEnabled(identifier)));
addTokenForOwnerStats(identifier);
} else {
throw new IOException("Same delegation token being added twice: " +
formatTokenId(identifier));
}
} finally {
this.apiLock.writeLock().unlock();
}
}
/**
* Update the current master key
* This is called once by startThreads before tokenRemoverThread is created,
* and only by tokenRemoverThread afterwards.
*/
private void updateCurrentKey() throws IOException {
LOG.info("Updating the current master key for generating delegation tokens");
/* Create a new currentKey with an estimated expiry date. */
int newCurrentId;
newCurrentId = incrementCurrentKeyId();
DelegationKey newKey = new DelegationKey(newCurrentId, System
.currentTimeMillis()
+ keyUpdateInterval + tokenMaxLifetime, generateSecret());
//Log must be invoked outside the lock on 'this'
logUpdateMasterKey(newKey);
this.apiLock.writeLock().lock();
try {
currentKey = newKey;
storeDelegationKey(currentKey);
} finally {
this.apiLock.writeLock().unlock();
}
}
/**
* Update the current master key for generating delegation tokens
* It should be called only by tokenRemoverThread.
* @throws IOException raised on errors performing I/O.
*/
protected void rollMasterKey() throws IOException {
this.apiLock.writeLock().lock();
try {
removeExpiredKeys();
/* set final expiry date for retiring currentKey */
currentKey.setExpiryDate(Time.now() + tokenMaxLifetime);
/*
* currentKey might have been removed by removeExpiredKeys(), if
* updateMasterKey() isn't called at expected interval. Add it back to
* allKeys just in case.
*/
updateDelegationKey(currentKey);
} finally {
this.apiLock.writeLock().unlock();
}
updateCurrentKey();
}
private void removeExpiredKeys() {
this.apiLock.writeLock().lock();
try {
long now = Time.now();
for (Iterator<Map.Entry<Integer, DelegationKey>> it =
allKeys.entrySet().iterator(); it.hasNext();) {
Map.Entry<Integer, DelegationKey> e = it.next();
if (e.getValue().getExpiryDate() < now) {
it.remove();
// ensure the tokens generated by this current key can be recovered
// with this current key after this current key is rolled
if (!e.getValue().equals(currentKey)) {
removeStoredMasterKey(e.getValue());
}
}
}
} finally {
this.apiLock.writeLock().unlock();
}
}
@Override
protected byte[] createPassword(TokenIdent identifier) {
this.apiLock.writeLock().lock();
try {
int sequenceNum;
long now = Time.now();
sequenceNum = incrementDelegationTokenSeqNum();
identifier.setIssueDate(now);
identifier.setMaxDate(now + tokenMaxLifetime);
identifier.setMasterKeyId(currentKey.getKeyId());
identifier.setSequenceNumber(sequenceNum);
LOG.info("Creating password for identifier: " + formatTokenId(identifier) +
", currentKey: " + currentKey.getKeyId());
byte[] password = createPassword(identifier.getBytes(), currentKey.getKey());
DelegationTokenInformation tokenInfo =
new DelegationTokenInformation(now + tokenRenewInterval, password,
getTrackingIdIfEnabled(identifier));
try {
METRICS.trackStoreToken(() -> storeToken(identifier, tokenInfo));
} catch (IOException ioe) {
LOG.error("Could not store token " + formatTokenId(identifier) + "!!", ioe);
}
return password;
} finally {
this.apiLock.writeLock().unlock();
}
}
/**
* Find the DelegationTokenInformation for the given token id, and verify that
* if the token is expired. Note that this method should be called with
* acquiring the secret manager's monitor.
*
* @param identifier identifier.
* @throws InvalidToken invalid token exception.
* @return DelegationTokenInformation.
*/
protected DelegationTokenInformation checkToken(TokenIdent identifier)
throws InvalidToken {
DelegationTokenInformation info = getTokenInfo(identifier);
String err;
if (info == null) {
err = "Token for real user: " + identifier.getRealUser() + ", can't be found in cache";
LOG.warn("{}, Token={}", err, formatTokenId(identifier));
throw new InvalidToken(err);
}
long now = Time.now();
if (info.getRenewDate() < now) {
err = "Token " + identifier.getRealUser() + " has expired, current time: "
+ Time.formatTime(now) + " expected renewal time: " + Time
.formatTime(info.getRenewDate());
LOG.info("{}, Token={}", err, formatTokenId(identifier));
throw new InvalidToken(err);
}
return info;
}
@Override
public byte[] retrievePassword(TokenIdent identifier)
throws InvalidToken {
this.apiLock.readLock().lock();
try {
return checkToken(identifier).getPassword();
} finally {
this.apiLock.readLock().unlock();
}
}
protected String getTrackingIdIfEnabled(TokenIdent ident) {
if (storeTokenTrackingId) {
return ident.getTrackingId();
}
return null;
}
public String getTokenTrackingId(TokenIdent identifier) {
this.apiLock.readLock().lock();
try {
DelegationTokenInformation info = getTokenInfo(identifier);
if (info == null) {
return null;
}
return info.getTrackingId();
} finally {
this.apiLock.readLock().unlock();
}
}
/**
* Verifies that the given identifier and password are valid and match.
* @param identifier Token identifier.
* @param password Password in the token.
* @throws InvalidToken InvalidToken.
*/
public void verifyToken(TokenIdent identifier, byte[] password)
throws InvalidToken {
this.apiLock.readLock().lock();
try {
byte[] storedPassword = retrievePassword(identifier);
if (!MessageDigest.isEqual(password, storedPassword)) {
throw new InvalidToken("token " + formatTokenId(identifier)
+ " is invalid, password doesn't match");
}
} finally {
this.apiLock.readLock().unlock();
}
}
/**
* Renew a delegation token.
* @param token the token to renew
* @param renewer the full principal name of the user doing the renewal
* @return the new expiration time
* @throws InvalidToken if the token is invalid
* @throws AccessControlException if the user can't renew token
*/
public long renewToken(Token<TokenIdent> token,
String renewer) throws InvalidToken, IOException {
this.apiLock.writeLock().lock();
try {
ByteArrayInputStream buf = new ByteArrayInputStream(token.getIdentifier());
DataInputStream in = new DataInputStream(buf);
TokenIdent id = createIdentifier();
id.readFields(in);
LOG.info("Token renewal for identifier: " + formatTokenId(id) + "; total currentTokens "
+ currentTokens.size());
long now = Time.now();
if (id.getMaxDate() < now) {
throw new InvalidToken(renewer + " tried to renew an expired token " + formatTokenId(id) +
" max expiration date: " + Time.formatTime(id.getMaxDate()) + " currentTime: " +
Time.formatTime(now));
}
if ((id.getRenewer() == null) || (id.getRenewer().toString().isEmpty())) {
throw new AccessControlException(renewer + " tried to renew a token " + formatTokenId(id) +
" without a renewer");
}
if (!id.getRenewer().toString().equals(renewer)) {
throw new AccessControlException(renewer + " tries to renew a token " + formatTokenId(id) +
" with non-matching renewer " + id.getRenewer());
}
DelegationKey key = getDelegationKey(id.getMasterKeyId());
if (key == null) {
throw new InvalidToken("Unable to find master key for keyId=" + id.getMasterKeyId() +
" from cache. Failed to renew an unexpired token " + formatTokenId(id) +
" with sequenceNumber=" + id.getSequenceNumber());
}
byte[] password = createPassword(token.getIdentifier(), key.getKey());
if (!MessageDigest.isEqual(password, token.getPassword())) {
throw new AccessControlException(
renewer + " is trying to renew a token " + formatTokenId(id) + " with wrong password");
}
long renewTime = Math.min(id.getMaxDate(), now + tokenRenewInterval);
String trackingId = getTrackingIdIfEnabled(id);
DelegationTokenInformation info =
new DelegationTokenInformation(renewTime, password, trackingId);
if (getTokenInfo(id) == null) {
throw new InvalidToken("Renewal request for unknown token " + formatTokenId(id));
}
METRICS.trackUpdateToken(() -> updateToken(id, info));
return renewTime;
} finally {
this.apiLock.writeLock().unlock();
}
}
/**
* Cancel a token by removing it from cache.
*
* @param token token.
* @param canceller canceller.
* @return Identifier of the canceled token
* @throws InvalidToken for invalid token
* @throws AccessControlException if the user isn't allowed to cancel
*/
public TokenIdent cancelToken(Token<TokenIdent> token,
String canceller) throws IOException {
this.apiLock.writeLock().lock();
try {
ByteArrayInputStream buf = new ByteArrayInputStream(token.getIdentifier());
DataInputStream in = new DataInputStream(buf);
TokenIdent id = createIdentifier();
id.readFields(in);
LOG.info("Token cancellation requested for identifier: " + formatTokenId(id));
if (id.getUser() == null) {
throw new InvalidToken("Token with no owner " + formatTokenId(id));
}
String owner = id.getUser().getUserName();
Text renewer = id.getRenewer();
HadoopKerberosName cancelerKrbName = new HadoopKerberosName(canceller);
String cancelerShortName = cancelerKrbName.getShortName();
if (!canceller.equals(owner) &&
(renewer == null || renewer.toString().isEmpty() ||
!cancelerShortName.equals(renewer.toString()))) {
throw new AccessControlException(canceller + " is not authorized to cancel the token " +
formatTokenId(id));
}
DelegationTokenInformation info = currentTokens.remove(id);
if (info == null) {
throw new InvalidToken("Token not found " + formatTokenId(id));
}
METRICS.trackRemoveToken(() -> {
removeTokenForOwnerStats(id);
removeStoredToken(id);
});
return id;
} finally {
this.apiLock.writeLock().unlock();
}
}
/**
* Convert the byte[] to a secret key
* @param key the byte[] to create the secret key from
* @return the secret key
*/
public static SecretKey createSecretKey(byte[] key) {
return SecretManager.createSecretKey(key);
}
/** Class to encapsulate a token's renew date and password. */
@InterfaceStability.Evolving
public static class DelegationTokenInformation implements Writable {
long renewDate;
byte[] password;
String trackingId;
public DelegationTokenInformation() {
this(0, null);
}
public DelegationTokenInformation(long renewDate, byte[] password) {
this(renewDate, password, null);
}
public DelegationTokenInformation(long renewDate, byte[] password,
String trackingId) {
this.renewDate = renewDate;
this.password = password;
this.trackingId = trackingId;
}
/**
* @return returns renew date.
*/
public long getRenewDate() {
return renewDate;
}
/**
* @return returns password.
*/
byte[] getPassword() {
return password;
}
/**
* @return returns tracking id.
*/
public String getTrackingId() {
return trackingId;
}
@Override
public void write(DataOutput out) throws IOException {
WritableUtils.writeVLong(out, renewDate);
if (password == null) {
WritableUtils.writeVInt(out, -1);
} else {
WritableUtils.writeVInt(out, password.length);
out.write(password);
}
WritableUtils.writeString(out, trackingId);
}
@Override
public void readFields(DataInput in) throws IOException {
renewDate = WritableUtils.readVLong(in);
int len = WritableUtils.readVInt(in);
if (len > -1) {
password = new byte[len];
in.readFully(password);
}
trackingId = WritableUtils.readString(in);
}
}
/** Remove expired delegation tokens from cache */
private void removeExpiredToken() throws IOException {
long now = Time.now();
Set<TokenIdent> expiredTokens = new HashSet<>();
this.apiLock.writeLock().lock();
try {
Iterator<Map.Entry<TokenIdent, DelegationTokenInformation>> i =
getCandidateTokensForCleanup().entrySet().iterator();
while (i.hasNext()) {
Map.Entry<TokenIdent, DelegationTokenInformation> entry = i.next();
long renewDate = entry.getValue().getRenewDate();
if (renewDate < now) {
expiredTokens.add(entry.getKey());
removeTokenForOwnerStats(entry.getKey());
i.remove();
}
}
} finally {
this.apiLock.writeLock().unlock();
}
// don't hold lock on 'this' to avoid edit log updates blocking token ops
logExpireTokens(expiredTokens);
}
protected Map<TokenIdent, DelegationTokenInformation> getCandidateTokensForCleanup() {
return this.currentTokens;
}
protected void logExpireTokens(
Collection<TokenIdent> expiredTokens) throws IOException {
for (TokenIdent ident : expiredTokens) {
logExpireToken(ident);
LOG.info("Removing expired token " + formatTokenId(ident));
removeExpiredStoredToken(ident);
}
}
protected void removeExpiredStoredToken(TokenIdent ident) throws IOException {
removeStoredToken(ident);
}
public void stopThreads() {
if (LOG.isDebugEnabled())
LOG.debug("Stopping expired delegation token remover thread");
running = false;
if (tokenRemoverThread != null) {
synchronized (noInterruptsLock) {
tokenRemoverThread.interrupt();
}
try {
tokenRemoverThread.join();
} catch (InterruptedException e) {
throw new RuntimeException(
"Unable to join on token removal thread", e);
}
}
}
/**
* is secretMgr running
* @return true if secret mgr is running
*/
public boolean isRunning() {
return running;
}
private class ExpiredTokenRemover extends Thread {
private long lastMasterKeyUpdate;
private long lastTokenCacheCleanup;
@Override
public void run() {
LOG.info("Starting expired delegation token remover thread, "
+ "tokenRemoverScanInterval=" + tokenRemoverScanInterval
/ (60 * 1000) + " min(s)");
try {
while (running) {
long now = Time.now();
if (lastMasterKeyUpdate + keyUpdateInterval < now) {
try {
rollMasterKey();
lastMasterKeyUpdate = now;
} catch (IOException e) {
LOG.error("Master key updating failed: ", e);
}
}
if (lastTokenCacheCleanup + tokenRemoverScanInterval < now) {
removeExpiredToken();
lastTokenCacheCleanup = now;
}
try {
Thread.sleep(Math.min(5000, keyUpdateInterval)); // 5 seconds
} catch (InterruptedException ie) {
LOG.error("ExpiredTokenRemover received " + ie);
}
}
} catch (Throwable t) {
LOG.error("ExpiredTokenRemover thread received unexpected exception", t);
Runtime.getRuntime().exit(-1);
}
}
}
/**
* Decode the token identifier. The subclass can customize the way to decode
* the token identifier.
*
* @param token the token where to extract the identifier
* @return the delegation token identifier
* @throws IOException raised on errors performing I/O.
*/
public TokenIdent decodeTokenIdentifier(Token<TokenIdent> token) throws IOException {
return token.decodeIdentifier();
}
/**
* Return top token real owners list as well as the tokens count.
*
* @param n top number of users
* @return map of owners to counts
*/
public List<NameValuePair> getTopTokenRealOwners(int n) {
n = Math.min(n, tokenOwnerStats.size());
if (n == 0) {
return new ArrayList<>();
}
TopN topN = new TopN(n);
for (Map.Entry<String, Long> entry : tokenOwnerStats.entrySet()) {
topN.offer(new NameValuePair(
entry.getKey(), entry.getValue()));
}
List<NameValuePair> list = new ArrayList<>();
while (!topN.isEmpty()) {
list.add(topN.poll());
}
Collections.reverse(list);
return list;
}
/**
* Return the real owner for a token. If this is a token from a proxy user,
* the real/effective user will be returned.
*
* @param id
* @return real owner
*/
private String getTokenRealOwner(TokenIdent id) {
String realUser;
if (id.getRealUser() != null && !id.getRealUser().toString().isEmpty()) {
realUser = id.getRealUser().toString();
} else {
// if there is no real user -> this is a non proxy user
// the user itself is the real owner
realUser = id.getUser().getUserName();
}
return realUser;
}
/**
* Add token stats to the owner to token count mapping.
*
* @param id token id.
*/
protected void addTokenForOwnerStats(TokenIdent id) {
String realOwner = getTokenRealOwner(id);
tokenOwnerStats.put(realOwner,
tokenOwnerStats.getOrDefault(realOwner, 0L)+1);
}
/**
* Remove token stats to the owner to token count mapping.
*
* @param id
*/
private void removeTokenForOwnerStats(TokenIdent id) {
String realOwner = getTokenRealOwner(id);
if (tokenOwnerStats.containsKey(realOwner)) {
// unlikely to be less than 1 but in case
if (tokenOwnerStats.get(realOwner) <= 1) {
tokenOwnerStats.remove(realOwner);
} else {
tokenOwnerStats.put(realOwner, tokenOwnerStats.get(realOwner)-1);
}
}
}
/**
* This method syncs token information from currentTokens to tokenOwnerStats.
* It is used when the currentTokens is initialized or refreshed. This is
* called from a single thread thus no synchronization is needed.
*/
protected void syncTokenOwnerStats() {
tokenOwnerStats.clear();
for (TokenIdent id : currentTokens.keySet()) {
addTokenForOwnerStats(id);
}
}
protected DelegationTokenSecretManagerMetrics getMetrics() {
return METRICS;
}
/**
* DelegationTokenSecretManagerMetrics tracks token management operations
* and publishes them through the metrics interfaces.
*/
@Metrics(about="Delegation token secret manager metrics", context="token")
static class DelegationTokenSecretManagerMetrics implements DurationTrackerFactory {
private static final Logger LOG = LoggerFactory.getLogger(
DelegationTokenSecretManagerMetrics.class);
final static String STORE_TOKEN_STAT = "storeToken";
final static String UPDATE_TOKEN_STAT = "updateToken";
final static String REMOVE_TOKEN_STAT = "removeToken";
final static String TOKEN_FAILURE_STAT = "tokenFailure";
private final MetricsRegistry registry;
private final IOStatisticsStore ioStatistics;
@Metric("Rate of storage of delegation tokens and latency (milliseconds)")
private MutableRate storeToken;
@Metric("Rate of update of delegation tokens and latency (milliseconds)")
private MutableRate updateToken;
@Metric("Rate of removal of delegation tokens and latency (milliseconds)")
private MutableRate removeToken;
@Metric("Counter of delegation tokens operation failures")
private MutableCounterLong tokenFailure;
static DelegationTokenSecretManagerMetrics create() {
return DefaultMetricsSystem.instance().register(new DelegationTokenSecretManagerMetrics());
}
DelegationTokenSecretManagerMetrics() {
ioStatistics = IOStatisticsBinding.iostatisticsStore()
.withDurationTracking(STORE_TOKEN_STAT, UPDATE_TOKEN_STAT, REMOVE_TOKEN_STAT)
.withCounters(TOKEN_FAILURE_STAT)
.build();
registry = new MetricsRegistry("DelegationTokenSecretManagerMetrics");
LOG.debug("Initialized {}", registry);
}
public void trackStoreToken(InvocationRaisingIOE invocation) throws IOException {
trackInvocation(invocation, STORE_TOKEN_STAT, storeToken);
}
public void trackUpdateToken(InvocationRaisingIOE invocation) throws IOException {
trackInvocation(invocation, UPDATE_TOKEN_STAT, updateToken);
}
public void trackRemoveToken(InvocationRaisingIOE invocation) throws IOException {
trackInvocation(invocation, REMOVE_TOKEN_STAT, removeToken);
}
public void trackInvocation(InvocationRaisingIOE invocation, String statistic,
MutableRate metric) throws IOException {
try {
long start = Time.monotonicNow();
IOStatisticsBinding.trackDurationOfInvocation(this, statistic, invocation);
metric.add(Time.monotonicNow() - start);
} catch (Exception ex) {
tokenFailure.incr();
throw ex;
}
}
@Override
public DurationTracker trackDuration(String key, long count) {
return ioStatistics.trackDuration(key, count);
}
protected MutableRate getStoreToken() {
return storeToken;
}
protected MutableRate getUpdateToken() {
return updateToken;
}
protected MutableRate getRemoveToken() {
return removeToken;
}
protected MutableCounterLong getTokenFailure() {
return tokenFailure;
}
protected IOStatisticsStore getIoStatistics() {
return ioStatistics;
}
}
}
|
apache/hadoop | 37,212 | hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/api/impl/pb/service/ApplicationClientProtocolPBServiceImpl.java | /**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.yarn.api.impl.pb.service;
import java.io.IOException;
import org.apache.hadoop.classification.InterfaceAudience.Private;
import org.apache.hadoop.security.proto.SecurityProtos.CancelDelegationTokenRequestProto;
import org.apache.hadoop.security.proto.SecurityProtos.CancelDelegationTokenResponseProto;
import org.apache.hadoop.security.proto.SecurityProtos.GetDelegationTokenRequestProto;
import org.apache.hadoop.security.proto.SecurityProtos.GetDelegationTokenResponseProto;
import org.apache.hadoop.security.proto.SecurityProtos.RenewDelegationTokenRequestProto;
import org.apache.hadoop.security.proto.SecurityProtos.RenewDelegationTokenResponseProto;
import org.apache.hadoop.yarn.api.ApplicationClientProtocol;
import org.apache.hadoop.yarn.api.ApplicationClientProtocolPB;
import org.apache.hadoop.yarn.api.protocolrecords.CancelDelegationTokenResponse;
import org.apache.hadoop.yarn.api.protocolrecords.FailApplicationAttemptResponse;
import org.apache.hadoop.yarn.api.protocolrecords.GetApplicationAttemptReportResponse;
import org.apache.hadoop.yarn.api.protocolrecords.GetApplicationAttemptsResponse;
import org.apache.hadoop.yarn.api.protocolrecords.GetApplicationReportResponse;
import org.apache.hadoop.yarn.api.protocolrecords.GetApplicationsResponse;
import org.apache.hadoop.yarn.api.protocolrecords.GetAttributesToNodesResponse;
import org.apache.hadoop.yarn.api.protocolrecords.GetClusterMetricsResponse;
import org.apache.hadoop.yarn.api.protocolrecords.GetClusterNodeAttributesRequest;
import org.apache.hadoop.yarn.api.protocolrecords.GetClusterNodeAttributesResponse;
import org.apache.hadoop.yarn.api.protocolrecords.GetClusterNodeLabelsResponse;
import org.apache.hadoop.yarn.api.protocolrecords.GetClusterNodesResponse;
import org.apache.hadoop.yarn.api.protocolrecords.GetContainerReportResponse;
import org.apache.hadoop.yarn.api.protocolrecords.GetContainersResponse;
import org.apache.hadoop.yarn.api.protocolrecords.GetDelegationTokenResponse;
import org.apache.hadoop.yarn.api.protocolrecords.GetLabelsToNodesResponse;
import org.apache.hadoop.yarn.api.protocolrecords.GetNewApplicationResponse;
import org.apache.hadoop.yarn.api.protocolrecords.GetNewReservationResponse;
import org.apache.hadoop.yarn.api.protocolrecords.GetNodesToAttributesResponse;
import org.apache.hadoop.yarn.api.protocolrecords.GetNodesToLabelsResponse;
import org.apache.hadoop.yarn.api.protocolrecords.GetQueueInfoResponse;
import org.apache.hadoop.yarn.api.protocolrecords.GetQueueUserAclsInfoResponse;
import org.apache.hadoop.yarn.api.protocolrecords.KillApplicationResponse;
import org.apache.hadoop.yarn.api.protocolrecords.MoveApplicationAcrossQueuesResponse;
import org.apache.hadoop.yarn.api.protocolrecords.RenewDelegationTokenResponse;
import org.apache.hadoop.yarn.api.protocolrecords.ReservationDeleteResponse;
import org.apache.hadoop.yarn.api.protocolrecords.ReservationListResponse;
import org.apache.hadoop.yarn.api.protocolrecords.ReservationSubmissionResponse;
import org.apache.hadoop.yarn.api.protocolrecords.ReservationUpdateResponse;
import org.apache.hadoop.yarn.api.protocolrecords.UpdateApplicationPriorityResponse;
import org.apache.hadoop.yarn.api.protocolrecords.UpdateApplicationTimeoutsResponse;
import org.apache.hadoop.yarn.api.protocolrecords.SubmitApplicationResponse;
import org.apache.hadoop.yarn.api.protocolrecords.SignalContainerResponse;
import org.apache.hadoop.yarn.api.protocolrecords.GetAllResourceProfilesResponse;
import org.apache.hadoop.yarn.api.protocolrecords.GetAllResourceTypeInfoResponse;
import org.apache.hadoop.yarn.api.protocolrecords.GetResourceProfileResponse;
import org.apache.hadoop.yarn.api.protocolrecords.impl.pb.CancelDelegationTokenRequestPBImpl;
import org.apache.hadoop.yarn.api.protocolrecords.impl.pb.CancelDelegationTokenResponsePBImpl;
import org.apache.hadoop.yarn.api.protocolrecords.impl.pb.FailApplicationAttemptRequestPBImpl;
import org.apache.hadoop.yarn.api.protocolrecords.impl.pb.FailApplicationAttemptResponsePBImpl;
import org.apache.hadoop.yarn.api.protocolrecords.impl.pb.GetApplicationAttemptReportRequestPBImpl;
import org.apache.hadoop.yarn.api.protocolrecords.impl.pb.GetApplicationAttemptReportResponsePBImpl;
import org.apache.hadoop.yarn.api.protocolrecords.impl.pb.GetApplicationAttemptsRequestPBImpl;
import org.apache.hadoop.yarn.api.protocolrecords.impl.pb.GetApplicationAttemptsResponsePBImpl;
import org.apache.hadoop.yarn.api.protocolrecords.impl.pb.GetApplicationReportRequestPBImpl;
import org.apache.hadoop.yarn.api.protocolrecords.impl.pb.GetApplicationReportResponsePBImpl;
import org.apache.hadoop.yarn.api.protocolrecords.impl.pb.GetApplicationsRequestPBImpl;
import org.apache.hadoop.yarn.api.protocolrecords.impl.pb.GetApplicationsResponsePBImpl;
import org.apache.hadoop.yarn.api.protocolrecords.impl.pb.GetAttributesToNodesRequestPBImpl;
import org.apache.hadoop.yarn.api.protocolrecords.impl.pb.GetAttributesToNodesResponsePBImpl;
import org.apache.hadoop.yarn.api.protocolrecords.impl.pb.GetClusterMetricsRequestPBImpl;
import org.apache.hadoop.yarn.api.protocolrecords.impl.pb.GetClusterMetricsResponsePBImpl;
import org.apache.hadoop.yarn.api.protocolrecords.impl.pb.GetClusterNodeAttributesRequestPBImpl;
import org.apache.hadoop.yarn.api.protocolrecords.impl.pb.GetClusterNodeAttributesResponsePBImpl;
import org.apache.hadoop.yarn.api.protocolrecords.impl.pb.GetClusterNodeLabelsRequestPBImpl;
import org.apache.hadoop.yarn.api.protocolrecords.impl.pb.GetClusterNodeLabelsResponsePBImpl;
import org.apache.hadoop.yarn.api.protocolrecords.impl.pb.GetClusterNodesRequestPBImpl;
import org.apache.hadoop.yarn.api.protocolrecords.impl.pb.GetClusterNodesResponsePBImpl;
import org.apache.hadoop.yarn.api.protocolrecords.impl.pb.GetContainerReportRequestPBImpl;
import org.apache.hadoop.yarn.api.protocolrecords.impl.pb.GetContainerReportResponsePBImpl;
import org.apache.hadoop.yarn.api.protocolrecords.impl.pb.GetContainersRequestPBImpl;
import org.apache.hadoop.yarn.api.protocolrecords.impl.pb.GetContainersResponsePBImpl;
import org.apache.hadoop.yarn.api.protocolrecords.impl.pb.GetDelegationTokenRequestPBImpl;
import org.apache.hadoop.yarn.api.protocolrecords.impl.pb.GetDelegationTokenResponsePBImpl;
import org.apache.hadoop.yarn.api.protocolrecords.impl.pb.GetLabelsToNodesRequestPBImpl;
import org.apache.hadoop.yarn.api.protocolrecords.impl.pb.GetLabelsToNodesResponsePBImpl;
import org.apache.hadoop.yarn.api.protocolrecords.impl.pb.GetNewApplicationRequestPBImpl;
import org.apache.hadoop.yarn.api.protocolrecords.impl.pb.GetNewApplicationResponsePBImpl;
import org.apache.hadoop.yarn.api.protocolrecords.impl.pb.GetNewReservationRequestPBImpl;
import org.apache.hadoop.yarn.api.protocolrecords.impl.pb.GetNewReservationResponsePBImpl;
import org.apache.hadoop.yarn.api.protocolrecords.impl.pb.GetNodesToAttributesRequestPBImpl;
import org.apache.hadoop.yarn.api.protocolrecords.impl.pb.GetNodesToAttributesResponsePBImpl;
import org.apache.hadoop.yarn.api.protocolrecords.impl.pb.GetNodesToLabelsRequestPBImpl;
import org.apache.hadoop.yarn.api.protocolrecords.impl.pb.GetNodesToLabelsResponsePBImpl;
import org.apache.hadoop.yarn.api.protocolrecords.impl.pb.GetQueueInfoRequestPBImpl;
import org.apache.hadoop.yarn.api.protocolrecords.impl.pb.GetQueueInfoResponsePBImpl;
import org.apache.hadoop.yarn.api.protocolrecords.impl.pb.GetQueueUserAclsInfoRequestPBImpl;
import org.apache.hadoop.yarn.api.protocolrecords.impl.pb.GetQueueUserAclsInfoResponsePBImpl;
import org.apache.hadoop.yarn.api.protocolrecords.impl.pb.KillApplicationRequestPBImpl;
import org.apache.hadoop.yarn.api.protocolrecords.impl.pb.KillApplicationResponsePBImpl;
import org.apache.hadoop.yarn.api.protocolrecords.impl.pb.MoveApplicationAcrossQueuesRequestPBImpl;
import org.apache.hadoop.yarn.api.protocolrecords.impl.pb.MoveApplicationAcrossQueuesResponsePBImpl;
import org.apache.hadoop.yarn.api.protocolrecords.impl.pb.RenewDelegationTokenRequestPBImpl;
import org.apache.hadoop.yarn.api.protocolrecords.impl.pb.RenewDelegationTokenResponsePBImpl;
import org.apache.hadoop.yarn.api.protocolrecords.impl.pb.ReservationDeleteRequestPBImpl;
import org.apache.hadoop.yarn.api.protocolrecords.impl.pb.ReservationDeleteResponsePBImpl;
import org.apache.hadoop.yarn.api.protocolrecords.impl.pb.ReservationListRequestPBImpl;
import org.apache.hadoop.yarn.api.protocolrecords.impl.pb.ReservationListResponsePBImpl;
import org.apache.hadoop.yarn.api.protocolrecords.impl.pb.ReservationSubmissionRequestPBImpl;
import org.apache.hadoop.yarn.api.protocolrecords.impl.pb.ReservationSubmissionResponsePBImpl;
import org.apache.hadoop.yarn.api.protocolrecords.impl.pb.ReservationUpdateRequestPBImpl;
import org.apache.hadoop.yarn.api.protocolrecords.impl.pb.ReservationUpdateResponsePBImpl;
import org.apache.hadoop.yarn.api.protocolrecords.impl.pb.SignalContainerRequestPBImpl;
import org.apache.hadoop.yarn.api.protocolrecords.impl.pb.SignalContainerResponsePBImpl;
import org.apache.hadoop.yarn.api.protocolrecords.impl.pb.UpdateApplicationPriorityRequestPBImpl;
import org.apache.hadoop.yarn.api.protocolrecords.impl.pb.UpdateApplicationPriorityResponsePBImpl;
import org.apache.hadoop.yarn.api.protocolrecords.impl.pb.UpdateApplicationTimeoutsRequestPBImpl;
import org.apache.hadoop.yarn.api.protocolrecords.impl.pb.UpdateApplicationTimeoutsResponsePBImpl;
import org.apache.hadoop.yarn.api.protocolrecords.impl.pb.SubmitApplicationRequestPBImpl;
import org.apache.hadoop.yarn.api.protocolrecords.impl.pb.SubmitApplicationResponsePBImpl;
import org.apache.hadoop.yarn.api.protocolrecords.impl.pb.GetAllResourceProfilesRequestPBImpl;
import org.apache.hadoop.yarn.api.protocolrecords.impl.pb.GetAllResourceProfilesResponsePBImpl;
import org.apache.hadoop.yarn.api.protocolrecords.impl.pb.GetAllResourceTypeInfoRequestPBImpl;
import org.apache.hadoop.yarn.api.protocolrecords.impl.pb.GetAllResourceTypeInfoResponsePBImpl;
import org.apache.hadoop.yarn.api.protocolrecords.impl.pb.GetResourceProfileRequestPBImpl;
import org.apache.hadoop.yarn.api.protocolrecords.impl.pb.GetResourceProfileResponsePBImpl;
import org.apache.hadoop.yarn.exceptions.YarnException;
import org.apache.hadoop.yarn.proto.YarnServiceProtos;
import org.apache.hadoop.yarn.proto.YarnServiceProtos.FailApplicationAttemptRequestProto;
import org.apache.hadoop.yarn.proto.YarnServiceProtos.FailApplicationAttemptResponseProto;
import org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationAttemptReportRequestProto;
import org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationAttemptReportResponseProto;
import org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationAttemptsRequestProto;
import org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationAttemptsResponseProto;
import org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationReportRequestProto;
import org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationReportResponseProto;
import org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationsRequestProto;
import org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationsResponseProto;
import org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterMetricsRequestProto;
import org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterMetricsResponseProto;
import org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodeLabelsRequestProto;
import org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodeLabelsResponseProto;
import org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodesRequestProto;
import org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodesResponseProto;
import org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainerReportRequestProto;
import org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainerReportResponseProto;
import org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainersRequestProto;
import org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainersResponseProto;
import org.apache.hadoop.yarn.proto.YarnServiceProtos.GetLabelsToNodesRequestProto;
import org.apache.hadoop.yarn.proto.YarnServiceProtos.GetLabelsToNodesResponseProto;
import org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNewApplicationRequestProto;
import org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNewApplicationResponseProto;
import org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNewReservationRequestProto;
import org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNewReservationResponseProto;
import org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNodesToLabelsRequestProto;
import org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNodesToLabelsResponseProto;
import org.apache.hadoop.yarn.proto.YarnServiceProtos.GetQueueInfoRequestProto;
import org.apache.hadoop.yarn.proto.YarnServiceProtos.GetQueueInfoResponseProto;
import org.apache.hadoop.yarn.proto.YarnServiceProtos.GetQueueUserAclsInfoRequestProto;
import org.apache.hadoop.yarn.proto.YarnServiceProtos.GetQueueUserAclsInfoResponseProto;
import org.apache.hadoop.yarn.proto.YarnServiceProtos.KillApplicationRequestProto;
import org.apache.hadoop.yarn.proto.YarnServiceProtos.KillApplicationResponseProto;
import org.apache.hadoop.yarn.proto.YarnServiceProtos.MoveApplicationAcrossQueuesRequestProto;
import org.apache.hadoop.yarn.proto.YarnServiceProtos.MoveApplicationAcrossQueuesResponseProto;
import org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationDeleteRequestProto;
import org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationDeleteResponseProto;
import org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationSubmissionRequestProto;
import org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationSubmissionResponseProto;
import org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationUpdateRequestProto;
import org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationUpdateResponseProto;
import org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationListRequestProto;
import org.apache.hadoop.yarn.proto.YarnServiceProtos.ReservationListResponseProto;
import org.apache.hadoop.yarn.proto.YarnServiceProtos.SignalContainerResponseProto;
import org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateApplicationPriorityRequestProto;
import org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateApplicationPriorityResponseProto;
import org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateApplicationTimeoutsRequestProto;
import org.apache.hadoop.yarn.proto.YarnServiceProtos.UpdateApplicationTimeoutsResponseProto;
import org.apache.hadoop.yarn.proto.YarnServiceProtos.SubmitApplicationRequestProto;
import org.apache.hadoop.yarn.proto.YarnServiceProtos.SubmitApplicationResponseProto;
import org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAllResourceProfilesResponseProto;
import org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAllResourceTypeInfoRequestProto;
import org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAllResourceTypeInfoResponseProto;
import org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAllResourceProfilesRequestProto;
import org.apache.hadoop.yarn.proto.YarnServiceProtos.GetResourceProfileRequestProto;
import org.apache.hadoop.yarn.proto.YarnServiceProtos.GetResourceProfileResponseProto;
import org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodeAttributesResponseProto;
import org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAttributesToNodesResponseProto;
import org.apache.hadoop.yarn.proto.YarnServiceProtos.GetNodesToAttributesResponseProto;
import org.apache.hadoop.thirdparty.protobuf.RpcController;
import org.apache.hadoop.thirdparty.protobuf.ServiceException;
@Private
public class ApplicationClientProtocolPBServiceImpl implements ApplicationClientProtocolPB {
private ApplicationClientProtocol real;
public ApplicationClientProtocolPBServiceImpl(ApplicationClientProtocol impl) {
this.real = impl;
}
@Override
public FailApplicationAttemptResponseProto failApplicationAttempt(RpcController arg0,
FailApplicationAttemptRequestProto proto) throws ServiceException {
FailApplicationAttemptRequestPBImpl request = new FailApplicationAttemptRequestPBImpl(proto);
try {
FailApplicationAttemptResponse response = real.failApplicationAttempt(request);
return ((FailApplicationAttemptResponsePBImpl)response).getProto();
} catch (YarnException e) {
throw new ServiceException(e);
} catch (IOException e) {
throw new ServiceException(e);
}
}
@Override
public KillApplicationResponseProto forceKillApplication(RpcController arg0,
KillApplicationRequestProto proto) throws ServiceException {
KillApplicationRequestPBImpl request = new KillApplicationRequestPBImpl(proto);
try {
KillApplicationResponse response = real.forceKillApplication(request);
return ((KillApplicationResponsePBImpl)response).getProto();
} catch (YarnException e) {
throw new ServiceException(e);
} catch (IOException e) {
throw new ServiceException(e);
}
}
@Override
public GetApplicationReportResponseProto getApplicationReport(
RpcController arg0, GetApplicationReportRequestProto proto)
throws ServiceException {
GetApplicationReportRequestPBImpl request = new GetApplicationReportRequestPBImpl(proto);
try {
GetApplicationReportResponse response = real.getApplicationReport(request);
return ((GetApplicationReportResponsePBImpl)response).getProto();
} catch (YarnException e) {
throw new ServiceException(e);
} catch (IOException e) {
throw new ServiceException(e);
}
}
@Override
public GetClusterMetricsResponseProto getClusterMetrics(RpcController arg0,
GetClusterMetricsRequestProto proto) throws ServiceException {
GetClusterMetricsRequestPBImpl request = new GetClusterMetricsRequestPBImpl(proto);
try {
GetClusterMetricsResponse response = real.getClusterMetrics(request);
return ((GetClusterMetricsResponsePBImpl)response).getProto();
} catch (YarnException e) {
throw new ServiceException(e);
} catch (IOException e) {
throw new ServiceException(e);
}
}
@Override
public GetNewApplicationResponseProto getNewApplication(
RpcController arg0, GetNewApplicationRequestProto proto)
throws ServiceException {
GetNewApplicationRequestPBImpl request = new GetNewApplicationRequestPBImpl(proto);
try {
GetNewApplicationResponse response = real.getNewApplication(request);
return ((GetNewApplicationResponsePBImpl)response).getProto();
} catch (YarnException e) {
throw new ServiceException(e);
} catch (IOException e) {
throw new ServiceException(e);
}
}
@Override
public SubmitApplicationResponseProto submitApplication(RpcController arg0,
SubmitApplicationRequestProto proto) throws ServiceException {
SubmitApplicationRequestPBImpl request = new SubmitApplicationRequestPBImpl(proto);
try {
SubmitApplicationResponse response = real.submitApplication(request);
return ((SubmitApplicationResponsePBImpl)response).getProto();
} catch (YarnException e) {
throw new ServiceException(e);
} catch (IOException e) {
throw new ServiceException(e);
}
}
@Override
public GetApplicationsResponseProto getApplications(
RpcController controller, GetApplicationsRequestProto proto)
throws ServiceException {
GetApplicationsRequestPBImpl request =
new GetApplicationsRequestPBImpl(proto);
try {
GetApplicationsResponse response = real.getApplications(request);
return ((GetApplicationsResponsePBImpl)response).getProto();
} catch (YarnException e) {
throw new ServiceException(e);
} catch (IOException e) {
throw new ServiceException(e);
}
}
@Override
public GetClusterNodesResponseProto getClusterNodes(RpcController controller,
GetClusterNodesRequestProto proto) throws ServiceException {
GetClusterNodesRequestPBImpl request =
new GetClusterNodesRequestPBImpl(proto);
try {
GetClusterNodesResponse response = real.getClusterNodes(request);
return ((GetClusterNodesResponsePBImpl)response).getProto();
} catch (YarnException e) {
throw new ServiceException(e);
} catch (IOException e) {
throw new ServiceException(e);
}
}
@Override
public GetQueueInfoResponseProto getQueueInfo(RpcController controller,
GetQueueInfoRequestProto proto) throws ServiceException {
GetQueueInfoRequestPBImpl request =
new GetQueueInfoRequestPBImpl(proto);
try {
GetQueueInfoResponse response = real.getQueueInfo(request);
return ((GetQueueInfoResponsePBImpl)response).getProto();
} catch (YarnException e) {
throw new ServiceException(e);
} catch (IOException e) {
throw new ServiceException(e);
}
}
@Override
public GetQueueUserAclsInfoResponseProto getQueueUserAcls(
RpcController controller, GetQueueUserAclsInfoRequestProto proto)
throws ServiceException {
GetQueueUserAclsInfoRequestPBImpl request =
new GetQueueUserAclsInfoRequestPBImpl(proto);
try {
GetQueueUserAclsInfoResponse response = real.getQueueUserAcls(request);
return ((GetQueueUserAclsInfoResponsePBImpl)response).getProto();
} catch (YarnException e) {
throw new ServiceException(e);
} catch (IOException e) {
throw new ServiceException(e);
}
}
@Override
public GetDelegationTokenResponseProto getDelegationToken(
RpcController controller, GetDelegationTokenRequestProto proto)
throws ServiceException {
GetDelegationTokenRequestPBImpl request =
new GetDelegationTokenRequestPBImpl(proto);
try {
GetDelegationTokenResponse response = real.getDelegationToken(request);
return ((GetDelegationTokenResponsePBImpl)response).getProto();
} catch (YarnException e) {
throw new ServiceException(e);
} catch (IOException e) {
throw new ServiceException(e);
}
}
@Override
public RenewDelegationTokenResponseProto renewDelegationToken(
RpcController controller, RenewDelegationTokenRequestProto proto)
throws ServiceException {
RenewDelegationTokenRequestPBImpl request =
new RenewDelegationTokenRequestPBImpl(proto);
try {
RenewDelegationTokenResponse response = real.renewDelegationToken(request);
return ((RenewDelegationTokenResponsePBImpl)response).getProto();
} catch (YarnException e) {
throw new ServiceException(e);
} catch (IOException e) {
throw new ServiceException(e);
}
}
@Override
public CancelDelegationTokenResponseProto cancelDelegationToken(
RpcController controller, CancelDelegationTokenRequestProto proto)
throws ServiceException {
CancelDelegationTokenRequestPBImpl request =
new CancelDelegationTokenRequestPBImpl(proto);
try {
CancelDelegationTokenResponse response = real.cancelDelegationToken(request);
return ((CancelDelegationTokenResponsePBImpl)response).getProto();
} catch (YarnException e) {
throw new ServiceException(e);
} catch (IOException e) {
throw new ServiceException(e);
}
}
@Override
public MoveApplicationAcrossQueuesResponseProto moveApplicationAcrossQueues(
RpcController controller, MoveApplicationAcrossQueuesRequestProto proto)
throws ServiceException {
MoveApplicationAcrossQueuesRequestPBImpl request =
new MoveApplicationAcrossQueuesRequestPBImpl(proto);
try {
MoveApplicationAcrossQueuesResponse response = real.moveApplicationAcrossQueues(request);
return ((MoveApplicationAcrossQueuesResponsePBImpl)response).getProto();
} catch (YarnException e) {
throw new ServiceException(e);
} catch (IOException e) {
throw new ServiceException(e);
}
}
@Override
public GetApplicationAttemptReportResponseProto getApplicationAttemptReport(
RpcController controller, GetApplicationAttemptReportRequestProto proto)
throws ServiceException {
GetApplicationAttemptReportRequestPBImpl request =
new GetApplicationAttemptReportRequestPBImpl(proto);
try {
GetApplicationAttemptReportResponse response =
real.getApplicationAttemptReport(request);
return ((GetApplicationAttemptReportResponsePBImpl) response).getProto();
} catch (YarnException e) {
throw new ServiceException(e);
} catch (IOException e) {
throw new ServiceException(e);
}
}
@Override
public GetApplicationAttemptsResponseProto getApplicationAttempts(
RpcController controller, GetApplicationAttemptsRequestProto proto)
throws ServiceException {
GetApplicationAttemptsRequestPBImpl request =
new GetApplicationAttemptsRequestPBImpl(proto);
try {
GetApplicationAttemptsResponse response =
real.getApplicationAttempts(request);
return ((GetApplicationAttemptsResponsePBImpl) response).getProto();
} catch (YarnException e) {
throw new ServiceException(e);
} catch (IOException e) {
throw new ServiceException(e);
}
}
@Override
public GetContainerReportResponseProto getContainerReport(
RpcController controller, GetContainerReportRequestProto proto)
throws ServiceException {
GetContainerReportRequestPBImpl request =
new GetContainerReportRequestPBImpl(proto);
try {
GetContainerReportResponse response = real.getContainerReport(request);
return ((GetContainerReportResponsePBImpl) response).getProto();
} catch (YarnException e) {
throw new ServiceException(e);
} catch (IOException e) {
throw new ServiceException(e);
}
}
@Override
public GetContainersResponseProto getContainers(RpcController controller,
GetContainersRequestProto proto) throws ServiceException {
GetContainersRequestPBImpl request = new GetContainersRequestPBImpl(proto);
try {
GetContainersResponse response = real.getContainers(request);
return ((GetContainersResponsePBImpl) response).getProto();
} catch (YarnException e) {
throw new ServiceException(e);
} catch (IOException e) {
throw new ServiceException(e);
}
}
@Override
public GetNewReservationResponseProto getNewReservation(
RpcController arg0, GetNewReservationRequestProto proto) throws
ServiceException {
GetNewReservationRequestPBImpl request =
new GetNewReservationRequestPBImpl(proto);
try {
GetNewReservationResponse response = real.getNewReservation(request);
return ((GetNewReservationResponsePBImpl)response).getProto();
} catch (YarnException e) {
throw new ServiceException(e);
} catch (IOException e) {
throw new ServiceException(e);
}
}
@Override
public ReservationSubmissionResponseProto submitReservation(RpcController controller,
ReservationSubmissionRequestProto requestProto) throws ServiceException {
ReservationSubmissionRequestPBImpl request =
new ReservationSubmissionRequestPBImpl(requestProto);
try {
ReservationSubmissionResponse response = real.submitReservation(request);
return ((ReservationSubmissionResponsePBImpl) response).getProto();
} catch (YarnException e) {
throw new ServiceException(e);
} catch (IOException e) {
throw new ServiceException(e);
}
}
@Override
public ReservationUpdateResponseProto updateReservation(RpcController controller,
ReservationUpdateRequestProto requestProto) throws ServiceException {
ReservationUpdateRequestPBImpl request =
new ReservationUpdateRequestPBImpl(requestProto);
try {
ReservationUpdateResponse response = real.updateReservation(request);
return ((ReservationUpdateResponsePBImpl) response).getProto();
} catch (YarnException e) {
throw new ServiceException(e);
} catch (IOException e) {
throw new ServiceException(e);
}
}
@Override
public ReservationDeleteResponseProto deleteReservation(RpcController controller,
ReservationDeleteRequestProto requestProto) throws ServiceException {
ReservationDeleteRequestPBImpl request =
new ReservationDeleteRequestPBImpl(requestProto);
try {
ReservationDeleteResponse response = real.deleteReservation(request);
return ((ReservationDeleteResponsePBImpl) response).getProto();
} catch (YarnException e) {
throw new ServiceException(e);
} catch (IOException e) {
throw new ServiceException(e);
}
}
@Override
public ReservationListResponseProto listReservations(RpcController controller,
ReservationListRequestProto requestProto) throws ServiceException {
ReservationListRequestPBImpl request =
new ReservationListRequestPBImpl(requestProto);
try {
ReservationListResponse response = real.listReservations(request);
return ((ReservationListResponsePBImpl) response).getProto();
} catch (YarnException e) {
throw new ServiceException(e);
} catch (IOException e) {
throw new ServiceException(e);
}
}
@Override
public GetNodesToLabelsResponseProto getNodeToLabels(
RpcController controller, GetNodesToLabelsRequestProto proto)
throws ServiceException {
GetNodesToLabelsRequestPBImpl request =
new GetNodesToLabelsRequestPBImpl(proto);
try {
GetNodesToLabelsResponse response = real.getNodeToLabels(request);
return ((GetNodesToLabelsResponsePBImpl) response).getProto();
} catch (YarnException e) {
throw new ServiceException(e);
} catch (IOException e) {
throw new ServiceException(e);
}
}
@Override
public GetLabelsToNodesResponseProto getLabelsToNodes(
RpcController controller, GetLabelsToNodesRequestProto proto)
throws ServiceException {
GetLabelsToNodesRequestPBImpl request =
new GetLabelsToNodesRequestPBImpl(proto);
try {
GetLabelsToNodesResponse response = real.getLabelsToNodes(request);
return ((GetLabelsToNodesResponsePBImpl) response).getProto();
} catch (YarnException e) {
throw new ServiceException(e);
} catch (IOException e) {
throw new ServiceException(e);
}
}
@Override
public GetClusterNodeLabelsResponseProto getClusterNodeLabels(
RpcController controller, GetClusterNodeLabelsRequestProto proto)
throws ServiceException {
GetClusterNodeLabelsRequestPBImpl request =
new GetClusterNodeLabelsRequestPBImpl(proto);
try {
GetClusterNodeLabelsResponse response =
real.getClusterNodeLabels(request);
return ((GetClusterNodeLabelsResponsePBImpl) response).getProto();
} catch (YarnException e) {
throw new ServiceException(e);
} catch (IOException e) {
throw new ServiceException(e);
}
}
@Override
public UpdateApplicationPriorityResponseProto updateApplicationPriority(
RpcController controller, UpdateApplicationPriorityRequestProto proto)
throws ServiceException {
UpdateApplicationPriorityRequestPBImpl request =
new UpdateApplicationPriorityRequestPBImpl(proto);
try {
UpdateApplicationPriorityResponse response =
real.updateApplicationPriority(request);
return ((UpdateApplicationPriorityResponsePBImpl) response).getProto();
} catch (YarnException e) {
throw new ServiceException(e);
} catch (IOException e) {
throw new ServiceException(e);
}
}
@Override
public SignalContainerResponseProto signalToContainer(
RpcController controller,
YarnServiceProtos.SignalContainerRequestProto proto) throws ServiceException {
SignalContainerRequestPBImpl request = new SignalContainerRequestPBImpl(proto);
try {
SignalContainerResponse response = real.signalToContainer(request);
return ((SignalContainerResponsePBImpl)response).getProto();
} catch (YarnException e) {
throw new ServiceException(e);
} catch (IOException e) {
throw new ServiceException(e);
}
}
@Override
public UpdateApplicationTimeoutsResponseProto updateApplicationTimeouts(
RpcController controller, UpdateApplicationTimeoutsRequestProto proto)
throws ServiceException {
UpdateApplicationTimeoutsRequestPBImpl request =
new UpdateApplicationTimeoutsRequestPBImpl(proto);
try {
UpdateApplicationTimeoutsResponse response =
real.updateApplicationTimeouts(request);
return ((UpdateApplicationTimeoutsResponsePBImpl) response).getProto();
} catch (YarnException e) {
throw new ServiceException(e);
} catch (IOException e) {
throw new ServiceException(e);
}
}
@Override
public GetAllResourceProfilesResponseProto getResourceProfiles(
RpcController controller, GetAllResourceProfilesRequestProto proto)
throws ServiceException {
GetAllResourceProfilesRequestPBImpl req =
new GetAllResourceProfilesRequestPBImpl(proto);
try {
GetAllResourceProfilesResponse resp = real.getResourceProfiles(req);
return ((GetAllResourceProfilesResponsePBImpl) resp).getProto();
} catch (YarnException ye) {
throw new ServiceException(ye);
} catch (IOException ie) {
throw new ServiceException(ie);
}
}
@Override
public GetResourceProfileResponseProto getResourceProfile(
RpcController controller, GetResourceProfileRequestProto proto)
throws ServiceException {
GetResourceProfileRequestPBImpl req =
new GetResourceProfileRequestPBImpl(proto);
try {
GetResourceProfileResponse resp = real.getResourceProfile(req);
return ((GetResourceProfileResponsePBImpl) resp).getProto();
} catch (YarnException ye) {
throw new ServiceException(ye);
} catch (IOException ie) {
throw new ServiceException(ie);
}
}
@Override
public GetAllResourceTypeInfoResponseProto getResourceTypeInfo(
RpcController controller, GetAllResourceTypeInfoRequestProto proto)
throws ServiceException {
GetAllResourceTypeInfoRequestPBImpl req = new GetAllResourceTypeInfoRequestPBImpl(
proto);
try {
GetAllResourceTypeInfoResponse resp = real.getResourceTypeInfo(req);
return ((GetAllResourceTypeInfoResponsePBImpl) resp).getProto();
} catch (YarnException ye) {
throw new ServiceException(ye);
} catch (IOException ie) {
throw new ServiceException(ie);
}
}
@Override
public GetClusterNodeAttributesResponseProto getClusterNodeAttributes(
RpcController controller,
YarnServiceProtos.GetClusterNodeAttributesRequestProto proto)
throws ServiceException {
GetClusterNodeAttributesRequest req =
new GetClusterNodeAttributesRequestPBImpl(proto);
try {
GetClusterNodeAttributesResponse resp =
real.getClusterNodeAttributes(req);
return ((GetClusterNodeAttributesResponsePBImpl) resp).getProto();
} catch (YarnException ye) {
throw new ServiceException(ye);
} catch (IOException ie) {
throw new ServiceException(ie);
}
}
@Override
public GetAttributesToNodesResponseProto getAttributesToNodes(
RpcController controller,
YarnServiceProtos.GetAttributesToNodesRequestProto proto)
throws ServiceException {
GetAttributesToNodesRequestPBImpl req =
new GetAttributesToNodesRequestPBImpl(proto);
try {
GetAttributesToNodesResponse resp = real.getAttributesToNodes(req);
return ((GetAttributesToNodesResponsePBImpl) resp).getProto();
} catch (YarnException ye) {
throw new ServiceException(ye);
} catch (IOException ie) {
throw new ServiceException(ie);
}
}
@Override
public GetNodesToAttributesResponseProto getNodesToAttributes(
RpcController controller,
YarnServiceProtos.GetNodesToAttributesRequestProto proto)
throws ServiceException {
GetNodesToAttributesRequestPBImpl req =
new GetNodesToAttributesRequestPBImpl(proto);
try {
GetNodesToAttributesResponse resp = real.getNodesToAttributes(req);
return ((GetNodesToAttributesResponsePBImpl) resp).getProto();
} catch (YarnException ye) {
throw new ServiceException(ye);
} catch (IOException ie) {
throw new ServiceException(ie);
}
}
}
|
apache/hive | 36,803 | standalone-metastore/metastore-common/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/NotificationEvent.java | /**
* Autogenerated by Thrift Compiler (0.16.0)
*
* DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
* @generated
*/
package org.apache.hadoop.hive.metastore.api;
@SuppressWarnings({"cast", "rawtypes", "serial", "unchecked", "unused"})
@javax.annotation.Generated(value = "Autogenerated by Thrift Compiler (0.16.0)")
@org.apache.hadoop.classification.InterfaceAudience.Public @org.apache.hadoop.classification.InterfaceStability.Stable public class NotificationEvent implements org.apache.thrift.TBase<NotificationEvent, NotificationEvent._Fields>, java.io.Serializable, Cloneable, Comparable<NotificationEvent> {
private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("NotificationEvent");
private static final org.apache.thrift.protocol.TField EVENT_ID_FIELD_DESC = new org.apache.thrift.protocol.TField("eventId", org.apache.thrift.protocol.TType.I64, (short)1);
private static final org.apache.thrift.protocol.TField EVENT_TIME_FIELD_DESC = new org.apache.thrift.protocol.TField("eventTime", org.apache.thrift.protocol.TType.I32, (short)2);
private static final org.apache.thrift.protocol.TField EVENT_TYPE_FIELD_DESC = new org.apache.thrift.protocol.TField("eventType", org.apache.thrift.protocol.TType.STRING, (short)3);
private static final org.apache.thrift.protocol.TField DB_NAME_FIELD_DESC = new org.apache.thrift.protocol.TField("dbName", org.apache.thrift.protocol.TType.STRING, (short)4);
private static final org.apache.thrift.protocol.TField TABLE_NAME_FIELD_DESC = new org.apache.thrift.protocol.TField("tableName", org.apache.thrift.protocol.TType.STRING, (short)5);
private static final org.apache.thrift.protocol.TField MESSAGE_FIELD_DESC = new org.apache.thrift.protocol.TField("message", org.apache.thrift.protocol.TType.STRING, (short)6);
private static final org.apache.thrift.protocol.TField MESSAGE_FORMAT_FIELD_DESC = new org.apache.thrift.protocol.TField("messageFormat", org.apache.thrift.protocol.TType.STRING, (short)7);
private static final org.apache.thrift.protocol.TField CAT_NAME_FIELD_DESC = new org.apache.thrift.protocol.TField("catName", org.apache.thrift.protocol.TType.STRING, (short)8);
private static final org.apache.thrift.scheme.SchemeFactory STANDARD_SCHEME_FACTORY = new NotificationEventStandardSchemeFactory();
private static final org.apache.thrift.scheme.SchemeFactory TUPLE_SCHEME_FACTORY = new NotificationEventTupleSchemeFactory();
private long eventId; // required
private int eventTime; // required
private @org.apache.thrift.annotation.Nullable java.lang.String eventType; // required
private @org.apache.thrift.annotation.Nullable java.lang.String dbName; // optional
private @org.apache.thrift.annotation.Nullable java.lang.String tableName; // optional
private @org.apache.thrift.annotation.Nullable java.lang.String message; // required
private @org.apache.thrift.annotation.Nullable java.lang.String messageFormat; // optional
private @org.apache.thrift.annotation.Nullable java.lang.String catName; // optional
/** The set of fields this struct contains, along with convenience methods for finding and manipulating them. */
public enum _Fields implements org.apache.thrift.TFieldIdEnum {
EVENT_ID((short)1, "eventId"),
EVENT_TIME((short)2, "eventTime"),
EVENT_TYPE((short)3, "eventType"),
DB_NAME((short)4, "dbName"),
TABLE_NAME((short)5, "tableName"),
MESSAGE((short)6, "message"),
MESSAGE_FORMAT((short)7, "messageFormat"),
CAT_NAME((short)8, "catName");
private static final java.util.Map<java.lang.String, _Fields> byName = new java.util.HashMap<java.lang.String, _Fields>();
static {
for (_Fields field : java.util.EnumSet.allOf(_Fields.class)) {
byName.put(field.getFieldName(), field);
}
}
/**
* Find the _Fields constant that matches fieldId, or null if its not found.
*/
@org.apache.thrift.annotation.Nullable
public static _Fields findByThriftId(int fieldId) {
switch(fieldId) {
case 1: // EVENT_ID
return EVENT_ID;
case 2: // EVENT_TIME
return EVENT_TIME;
case 3: // EVENT_TYPE
return EVENT_TYPE;
case 4: // DB_NAME
return DB_NAME;
case 5: // TABLE_NAME
return TABLE_NAME;
case 6: // MESSAGE
return MESSAGE;
case 7: // MESSAGE_FORMAT
return MESSAGE_FORMAT;
case 8: // CAT_NAME
return CAT_NAME;
default:
return null;
}
}
/**
* Find the _Fields constant that matches fieldId, throwing an exception
* if it is not found.
*/
public static _Fields findByThriftIdOrThrow(int fieldId) {
_Fields fields = findByThriftId(fieldId);
if (fields == null) throw new java.lang.IllegalArgumentException("Field " + fieldId + " doesn't exist!");
return fields;
}
/**
* Find the _Fields constant that matches name, or null if its not found.
*/
@org.apache.thrift.annotation.Nullable
public static _Fields findByName(java.lang.String name) {
return byName.get(name);
}
private final short _thriftId;
private final java.lang.String _fieldName;
_Fields(short thriftId, java.lang.String fieldName) {
_thriftId = thriftId;
_fieldName = fieldName;
}
public short getThriftFieldId() {
return _thriftId;
}
public java.lang.String getFieldName() {
return _fieldName;
}
}
// isset id assignments
private static final int __EVENTID_ISSET_ID = 0;
private static final int __EVENTTIME_ISSET_ID = 1;
private byte __isset_bitfield = 0;
private static final _Fields optionals[] = {_Fields.DB_NAME,_Fields.TABLE_NAME,_Fields.MESSAGE_FORMAT,_Fields.CAT_NAME};
public static final java.util.Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> metaDataMap;
static {
java.util.Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> tmpMap = new java.util.EnumMap<_Fields, org.apache.thrift.meta_data.FieldMetaData>(_Fields.class);
tmpMap.put(_Fields.EVENT_ID, new org.apache.thrift.meta_data.FieldMetaData("eventId", org.apache.thrift.TFieldRequirementType.REQUIRED,
new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.I64)));
tmpMap.put(_Fields.EVENT_TIME, new org.apache.thrift.meta_data.FieldMetaData("eventTime", org.apache.thrift.TFieldRequirementType.REQUIRED,
new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.I32)));
tmpMap.put(_Fields.EVENT_TYPE, new org.apache.thrift.meta_data.FieldMetaData("eventType", org.apache.thrift.TFieldRequirementType.REQUIRED,
new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING)));
tmpMap.put(_Fields.DB_NAME, new org.apache.thrift.meta_data.FieldMetaData("dbName", org.apache.thrift.TFieldRequirementType.OPTIONAL,
new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING)));
tmpMap.put(_Fields.TABLE_NAME, new org.apache.thrift.meta_data.FieldMetaData("tableName", org.apache.thrift.TFieldRequirementType.OPTIONAL,
new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING)));
tmpMap.put(_Fields.MESSAGE, new org.apache.thrift.meta_data.FieldMetaData("message", org.apache.thrift.TFieldRequirementType.REQUIRED,
new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING)));
tmpMap.put(_Fields.MESSAGE_FORMAT, new org.apache.thrift.meta_data.FieldMetaData("messageFormat", org.apache.thrift.TFieldRequirementType.OPTIONAL,
new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING)));
tmpMap.put(_Fields.CAT_NAME, new org.apache.thrift.meta_data.FieldMetaData("catName", org.apache.thrift.TFieldRequirementType.OPTIONAL,
new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING)));
metaDataMap = java.util.Collections.unmodifiableMap(tmpMap);
org.apache.thrift.meta_data.FieldMetaData.addStructMetaDataMap(NotificationEvent.class, metaDataMap);
}
public NotificationEvent() {
}
public NotificationEvent(
long eventId,
int eventTime,
java.lang.String eventType,
java.lang.String message)
{
this();
this.eventId = eventId;
setEventIdIsSet(true);
this.eventTime = eventTime;
setEventTimeIsSet(true);
this.eventType = eventType;
this.message = message;
}
/**
* Performs a deep copy on <i>other</i>.
*/
public NotificationEvent(NotificationEvent other) {
__isset_bitfield = other.__isset_bitfield;
this.eventId = other.eventId;
this.eventTime = other.eventTime;
if (other.isSetEventType()) {
this.eventType = other.eventType;
}
if (other.isSetDbName()) {
this.dbName = other.dbName;
}
if (other.isSetTableName()) {
this.tableName = other.tableName;
}
if (other.isSetMessage()) {
this.message = other.message;
}
if (other.isSetMessageFormat()) {
this.messageFormat = other.messageFormat;
}
if (other.isSetCatName()) {
this.catName = other.catName;
}
}
public NotificationEvent deepCopy() {
return new NotificationEvent(this);
}
@Override
public void clear() {
setEventIdIsSet(false);
this.eventId = 0;
setEventTimeIsSet(false);
this.eventTime = 0;
this.eventType = null;
this.dbName = null;
this.tableName = null;
this.message = null;
this.messageFormat = null;
this.catName = null;
}
public long getEventId() {
return this.eventId;
}
public void setEventId(long eventId) {
this.eventId = eventId;
setEventIdIsSet(true);
}
public void unsetEventId() {
__isset_bitfield = org.apache.thrift.EncodingUtils.clearBit(__isset_bitfield, __EVENTID_ISSET_ID);
}
/** Returns true if field eventId is set (has been assigned a value) and false otherwise */
public boolean isSetEventId() {
return org.apache.thrift.EncodingUtils.testBit(__isset_bitfield, __EVENTID_ISSET_ID);
}
public void setEventIdIsSet(boolean value) {
__isset_bitfield = org.apache.thrift.EncodingUtils.setBit(__isset_bitfield, __EVENTID_ISSET_ID, value);
}
public int getEventTime() {
return this.eventTime;
}
public void setEventTime(int eventTime) {
this.eventTime = eventTime;
setEventTimeIsSet(true);
}
public void unsetEventTime() {
__isset_bitfield = org.apache.thrift.EncodingUtils.clearBit(__isset_bitfield, __EVENTTIME_ISSET_ID);
}
/** Returns true if field eventTime is set (has been assigned a value) and false otherwise */
public boolean isSetEventTime() {
return org.apache.thrift.EncodingUtils.testBit(__isset_bitfield, __EVENTTIME_ISSET_ID);
}
public void setEventTimeIsSet(boolean value) {
__isset_bitfield = org.apache.thrift.EncodingUtils.setBit(__isset_bitfield, __EVENTTIME_ISSET_ID, value);
}
@org.apache.thrift.annotation.Nullable
public java.lang.String getEventType() {
return this.eventType;
}
public void setEventType(@org.apache.thrift.annotation.Nullable java.lang.String eventType) {
this.eventType = eventType;
}
public void unsetEventType() {
this.eventType = null;
}
/** Returns true if field eventType is set (has been assigned a value) and false otherwise */
public boolean isSetEventType() {
return this.eventType != null;
}
public void setEventTypeIsSet(boolean value) {
if (!value) {
this.eventType = null;
}
}
@org.apache.thrift.annotation.Nullable
public java.lang.String getDbName() {
return this.dbName;
}
public void setDbName(@org.apache.thrift.annotation.Nullable java.lang.String dbName) {
this.dbName = dbName;
}
public void unsetDbName() {
this.dbName = null;
}
/** Returns true if field dbName is set (has been assigned a value) and false otherwise */
public boolean isSetDbName() {
return this.dbName != null;
}
public void setDbNameIsSet(boolean value) {
if (!value) {
this.dbName = null;
}
}
@org.apache.thrift.annotation.Nullable
public java.lang.String getTableName() {
return this.tableName;
}
public void setTableName(@org.apache.thrift.annotation.Nullable java.lang.String tableName) {
this.tableName = tableName;
}
public void unsetTableName() {
this.tableName = null;
}
/** Returns true if field tableName is set (has been assigned a value) and false otherwise */
public boolean isSetTableName() {
return this.tableName != null;
}
public void setTableNameIsSet(boolean value) {
if (!value) {
this.tableName = null;
}
}
@org.apache.thrift.annotation.Nullable
public java.lang.String getMessage() {
return this.message;
}
public void setMessage(@org.apache.thrift.annotation.Nullable java.lang.String message) {
this.message = message;
}
public void unsetMessage() {
this.message = null;
}
/** Returns true if field message is set (has been assigned a value) and false otherwise */
public boolean isSetMessage() {
return this.message != null;
}
public void setMessageIsSet(boolean value) {
if (!value) {
this.message = null;
}
}
@org.apache.thrift.annotation.Nullable
public java.lang.String getMessageFormat() {
return this.messageFormat;
}
public void setMessageFormat(@org.apache.thrift.annotation.Nullable java.lang.String messageFormat) {
this.messageFormat = messageFormat;
}
public void unsetMessageFormat() {
this.messageFormat = null;
}
/** Returns true if field messageFormat is set (has been assigned a value) and false otherwise */
public boolean isSetMessageFormat() {
return this.messageFormat != null;
}
public void setMessageFormatIsSet(boolean value) {
if (!value) {
this.messageFormat = null;
}
}
@org.apache.thrift.annotation.Nullable
public java.lang.String getCatName() {
return this.catName;
}
public void setCatName(@org.apache.thrift.annotation.Nullable java.lang.String catName) {
this.catName = catName;
}
public void unsetCatName() {
this.catName = null;
}
/** Returns true if field catName is set (has been assigned a value) and false otherwise */
public boolean isSetCatName() {
return this.catName != null;
}
public void setCatNameIsSet(boolean value) {
if (!value) {
this.catName = null;
}
}
public void setFieldValue(_Fields field, @org.apache.thrift.annotation.Nullable java.lang.Object value) {
switch (field) {
case EVENT_ID:
if (value == null) {
unsetEventId();
} else {
setEventId((java.lang.Long)value);
}
break;
case EVENT_TIME:
if (value == null) {
unsetEventTime();
} else {
setEventTime((java.lang.Integer)value);
}
break;
case EVENT_TYPE:
if (value == null) {
unsetEventType();
} else {
setEventType((java.lang.String)value);
}
break;
case DB_NAME:
if (value == null) {
unsetDbName();
} else {
setDbName((java.lang.String)value);
}
break;
case TABLE_NAME:
if (value == null) {
unsetTableName();
} else {
setTableName((java.lang.String)value);
}
break;
case MESSAGE:
if (value == null) {
unsetMessage();
} else {
setMessage((java.lang.String)value);
}
break;
case MESSAGE_FORMAT:
if (value == null) {
unsetMessageFormat();
} else {
setMessageFormat((java.lang.String)value);
}
break;
case CAT_NAME:
if (value == null) {
unsetCatName();
} else {
setCatName((java.lang.String)value);
}
break;
}
}
@org.apache.thrift.annotation.Nullable
public java.lang.Object getFieldValue(_Fields field) {
switch (field) {
case EVENT_ID:
return getEventId();
case EVENT_TIME:
return getEventTime();
case EVENT_TYPE:
return getEventType();
case DB_NAME:
return getDbName();
case TABLE_NAME:
return getTableName();
case MESSAGE:
return getMessage();
case MESSAGE_FORMAT:
return getMessageFormat();
case CAT_NAME:
return getCatName();
}
throw new java.lang.IllegalStateException();
}
/** Returns true if field corresponding to fieldID is set (has been assigned a value) and false otherwise */
public boolean isSet(_Fields field) {
if (field == null) {
throw new java.lang.IllegalArgumentException();
}
switch (field) {
case EVENT_ID:
return isSetEventId();
case EVENT_TIME:
return isSetEventTime();
case EVENT_TYPE:
return isSetEventType();
case DB_NAME:
return isSetDbName();
case TABLE_NAME:
return isSetTableName();
case MESSAGE:
return isSetMessage();
case MESSAGE_FORMAT:
return isSetMessageFormat();
case CAT_NAME:
return isSetCatName();
}
throw new java.lang.IllegalStateException();
}
@Override
public boolean equals(java.lang.Object that) {
if (that instanceof NotificationEvent)
return this.equals((NotificationEvent)that);
return false;
}
public boolean equals(NotificationEvent that) {
if (that == null)
return false;
if (this == that)
return true;
boolean this_present_eventId = true;
boolean that_present_eventId = true;
if (this_present_eventId || that_present_eventId) {
if (!(this_present_eventId && that_present_eventId))
return false;
if (this.eventId != that.eventId)
return false;
}
boolean this_present_eventTime = true;
boolean that_present_eventTime = true;
if (this_present_eventTime || that_present_eventTime) {
if (!(this_present_eventTime && that_present_eventTime))
return false;
if (this.eventTime != that.eventTime)
return false;
}
boolean this_present_eventType = true && this.isSetEventType();
boolean that_present_eventType = true && that.isSetEventType();
if (this_present_eventType || that_present_eventType) {
if (!(this_present_eventType && that_present_eventType))
return false;
if (!this.eventType.equals(that.eventType))
return false;
}
boolean this_present_dbName = true && this.isSetDbName();
boolean that_present_dbName = true && that.isSetDbName();
if (this_present_dbName || that_present_dbName) {
if (!(this_present_dbName && that_present_dbName))
return false;
if (!this.dbName.equals(that.dbName))
return false;
}
boolean this_present_tableName = true && this.isSetTableName();
boolean that_present_tableName = true && that.isSetTableName();
if (this_present_tableName || that_present_tableName) {
if (!(this_present_tableName && that_present_tableName))
return false;
if (!this.tableName.equals(that.tableName))
return false;
}
boolean this_present_message = true && this.isSetMessage();
boolean that_present_message = true && that.isSetMessage();
if (this_present_message || that_present_message) {
if (!(this_present_message && that_present_message))
return false;
if (!this.message.equals(that.message))
return false;
}
boolean this_present_messageFormat = true && this.isSetMessageFormat();
boolean that_present_messageFormat = true && that.isSetMessageFormat();
if (this_present_messageFormat || that_present_messageFormat) {
if (!(this_present_messageFormat && that_present_messageFormat))
return false;
if (!this.messageFormat.equals(that.messageFormat))
return false;
}
boolean this_present_catName = true && this.isSetCatName();
boolean that_present_catName = true && that.isSetCatName();
if (this_present_catName || that_present_catName) {
if (!(this_present_catName && that_present_catName))
return false;
if (!this.catName.equals(that.catName))
return false;
}
return true;
}
@Override
public int hashCode() {
int hashCode = 1;
hashCode = hashCode * 8191 + org.apache.thrift.TBaseHelper.hashCode(eventId);
hashCode = hashCode * 8191 + eventTime;
hashCode = hashCode * 8191 + ((isSetEventType()) ? 131071 : 524287);
if (isSetEventType())
hashCode = hashCode * 8191 + eventType.hashCode();
hashCode = hashCode * 8191 + ((isSetDbName()) ? 131071 : 524287);
if (isSetDbName())
hashCode = hashCode * 8191 + dbName.hashCode();
hashCode = hashCode * 8191 + ((isSetTableName()) ? 131071 : 524287);
if (isSetTableName())
hashCode = hashCode * 8191 + tableName.hashCode();
hashCode = hashCode * 8191 + ((isSetMessage()) ? 131071 : 524287);
if (isSetMessage())
hashCode = hashCode * 8191 + message.hashCode();
hashCode = hashCode * 8191 + ((isSetMessageFormat()) ? 131071 : 524287);
if (isSetMessageFormat())
hashCode = hashCode * 8191 + messageFormat.hashCode();
hashCode = hashCode * 8191 + ((isSetCatName()) ? 131071 : 524287);
if (isSetCatName())
hashCode = hashCode * 8191 + catName.hashCode();
return hashCode;
}
@Override
public int compareTo(NotificationEvent other) {
if (!getClass().equals(other.getClass())) {
return getClass().getName().compareTo(other.getClass().getName());
}
int lastComparison = 0;
lastComparison = java.lang.Boolean.compare(isSetEventId(), other.isSetEventId());
if (lastComparison != 0) {
return lastComparison;
}
if (isSetEventId()) {
lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.eventId, other.eventId);
if (lastComparison != 0) {
return lastComparison;
}
}
lastComparison = java.lang.Boolean.compare(isSetEventTime(), other.isSetEventTime());
if (lastComparison != 0) {
return lastComparison;
}
if (isSetEventTime()) {
lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.eventTime, other.eventTime);
if (lastComparison != 0) {
return lastComparison;
}
}
lastComparison = java.lang.Boolean.compare(isSetEventType(), other.isSetEventType());
if (lastComparison != 0) {
return lastComparison;
}
if (isSetEventType()) {
lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.eventType, other.eventType);
if (lastComparison != 0) {
return lastComparison;
}
}
lastComparison = java.lang.Boolean.compare(isSetDbName(), other.isSetDbName());
if (lastComparison != 0) {
return lastComparison;
}
if (isSetDbName()) {
lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.dbName, other.dbName);
if (lastComparison != 0) {
return lastComparison;
}
}
lastComparison = java.lang.Boolean.compare(isSetTableName(), other.isSetTableName());
if (lastComparison != 0) {
return lastComparison;
}
if (isSetTableName()) {
lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.tableName, other.tableName);
if (lastComparison != 0) {
return lastComparison;
}
}
lastComparison = java.lang.Boolean.compare(isSetMessage(), other.isSetMessage());
if (lastComparison != 0) {
return lastComparison;
}
if (isSetMessage()) {
lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.message, other.message);
if (lastComparison != 0) {
return lastComparison;
}
}
lastComparison = java.lang.Boolean.compare(isSetMessageFormat(), other.isSetMessageFormat());
if (lastComparison != 0) {
return lastComparison;
}
if (isSetMessageFormat()) {
lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.messageFormat, other.messageFormat);
if (lastComparison != 0) {
return lastComparison;
}
}
lastComparison = java.lang.Boolean.compare(isSetCatName(), other.isSetCatName());
if (lastComparison != 0) {
return lastComparison;
}
if (isSetCatName()) {
lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.catName, other.catName);
if (lastComparison != 0) {
return lastComparison;
}
}
return 0;
}
@org.apache.thrift.annotation.Nullable
public _Fields fieldForId(int fieldId) {
return _Fields.findByThriftId(fieldId);
}
public void read(org.apache.thrift.protocol.TProtocol iprot) throws org.apache.thrift.TException {
scheme(iprot).read(iprot, this);
}
public void write(org.apache.thrift.protocol.TProtocol oprot) throws org.apache.thrift.TException {
scheme(oprot).write(oprot, this);
}
@Override
public java.lang.String toString() {
java.lang.StringBuilder sb = new java.lang.StringBuilder("NotificationEvent(");
boolean first = true;
sb.append("eventId:");
sb.append(this.eventId);
first = false;
if (!first) sb.append(", ");
sb.append("eventTime:");
sb.append(this.eventTime);
first = false;
if (!first) sb.append(", ");
sb.append("eventType:");
if (this.eventType == null) {
sb.append("null");
} else {
sb.append(this.eventType);
}
first = false;
if (isSetDbName()) {
if (!first) sb.append(", ");
sb.append("dbName:");
if (this.dbName == null) {
sb.append("null");
} else {
sb.append(this.dbName);
}
first = false;
}
if (isSetTableName()) {
if (!first) sb.append(", ");
sb.append("tableName:");
if (this.tableName == null) {
sb.append("null");
} else {
sb.append(this.tableName);
}
first = false;
}
if (!first) sb.append(", ");
sb.append("message:");
if (this.message == null) {
sb.append("null");
} else {
sb.append(this.message);
}
first = false;
if (isSetMessageFormat()) {
if (!first) sb.append(", ");
sb.append("messageFormat:");
if (this.messageFormat == null) {
sb.append("null");
} else {
sb.append(this.messageFormat);
}
first = false;
}
if (isSetCatName()) {
if (!first) sb.append(", ");
sb.append("catName:");
if (this.catName == null) {
sb.append("null");
} else {
sb.append(this.catName);
}
first = false;
}
sb.append(")");
return sb.toString();
}
public void validate() throws org.apache.thrift.TException {
// check for required fields
if (!isSetEventId()) {
throw new org.apache.thrift.protocol.TProtocolException("Required field 'eventId' is unset! Struct:" + toString());
}
if (!isSetEventTime()) {
throw new org.apache.thrift.protocol.TProtocolException("Required field 'eventTime' is unset! Struct:" + toString());
}
if (!isSetEventType()) {
throw new org.apache.thrift.protocol.TProtocolException("Required field 'eventType' is unset! Struct:" + toString());
}
if (!isSetMessage()) {
throw new org.apache.thrift.protocol.TProtocolException("Required field 'message' is unset! Struct:" + toString());
}
// check for sub-struct validity
}
private void writeObject(java.io.ObjectOutputStream out) throws java.io.IOException {
try {
write(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(out)));
} catch (org.apache.thrift.TException te) {
throw new java.io.IOException(te);
}
}
private void readObject(java.io.ObjectInputStream in) throws java.io.IOException, java.lang.ClassNotFoundException {
try {
// it doesn't seem like you should have to do this, but java serialization is wacky, and doesn't call the default constructor.
__isset_bitfield = 0;
read(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(in)));
} catch (org.apache.thrift.TException te) {
throw new java.io.IOException(te);
}
}
private static class NotificationEventStandardSchemeFactory implements org.apache.thrift.scheme.SchemeFactory {
public NotificationEventStandardScheme getScheme() {
return new NotificationEventStandardScheme();
}
}
private static class NotificationEventStandardScheme extends org.apache.thrift.scheme.StandardScheme<NotificationEvent> {
public void read(org.apache.thrift.protocol.TProtocol iprot, NotificationEvent struct) throws org.apache.thrift.TException {
org.apache.thrift.protocol.TField schemeField;
iprot.readStructBegin();
while (true)
{
schemeField = iprot.readFieldBegin();
if (schemeField.type == org.apache.thrift.protocol.TType.STOP) {
break;
}
switch (schemeField.id) {
case 1: // EVENT_ID
if (schemeField.type == org.apache.thrift.protocol.TType.I64) {
struct.eventId = iprot.readI64();
struct.setEventIdIsSet(true);
} else {
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
}
break;
case 2: // EVENT_TIME
if (schemeField.type == org.apache.thrift.protocol.TType.I32) {
struct.eventTime = iprot.readI32();
struct.setEventTimeIsSet(true);
} else {
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
}
break;
case 3: // EVENT_TYPE
if (schemeField.type == org.apache.thrift.protocol.TType.STRING) {
struct.eventType = iprot.readString();
struct.setEventTypeIsSet(true);
} else {
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
}
break;
case 4: // DB_NAME
if (schemeField.type == org.apache.thrift.protocol.TType.STRING) {
struct.dbName = iprot.readString();
struct.setDbNameIsSet(true);
} else {
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
}
break;
case 5: // TABLE_NAME
if (schemeField.type == org.apache.thrift.protocol.TType.STRING) {
struct.tableName = iprot.readString();
struct.setTableNameIsSet(true);
} else {
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
}
break;
case 6: // MESSAGE
if (schemeField.type == org.apache.thrift.protocol.TType.STRING) {
struct.message = iprot.readString();
struct.setMessageIsSet(true);
} else {
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
}
break;
case 7: // MESSAGE_FORMAT
if (schemeField.type == org.apache.thrift.protocol.TType.STRING) {
struct.messageFormat = iprot.readString();
struct.setMessageFormatIsSet(true);
} else {
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
}
break;
case 8: // CAT_NAME
if (schemeField.type == org.apache.thrift.protocol.TType.STRING) {
struct.catName = iprot.readString();
struct.setCatNameIsSet(true);
} else {
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
}
break;
default:
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
}
iprot.readFieldEnd();
}
iprot.readStructEnd();
struct.validate();
}
public void write(org.apache.thrift.protocol.TProtocol oprot, NotificationEvent struct) throws org.apache.thrift.TException {
struct.validate();
oprot.writeStructBegin(STRUCT_DESC);
oprot.writeFieldBegin(EVENT_ID_FIELD_DESC);
oprot.writeI64(struct.eventId);
oprot.writeFieldEnd();
oprot.writeFieldBegin(EVENT_TIME_FIELD_DESC);
oprot.writeI32(struct.eventTime);
oprot.writeFieldEnd();
if (struct.eventType != null) {
oprot.writeFieldBegin(EVENT_TYPE_FIELD_DESC);
oprot.writeString(struct.eventType);
oprot.writeFieldEnd();
}
if (struct.dbName != null) {
if (struct.isSetDbName()) {
oprot.writeFieldBegin(DB_NAME_FIELD_DESC);
oprot.writeString(struct.dbName);
oprot.writeFieldEnd();
}
}
if (struct.tableName != null) {
if (struct.isSetTableName()) {
oprot.writeFieldBegin(TABLE_NAME_FIELD_DESC);
oprot.writeString(struct.tableName);
oprot.writeFieldEnd();
}
}
if (struct.message != null) {
oprot.writeFieldBegin(MESSAGE_FIELD_DESC);
oprot.writeString(struct.message);
oprot.writeFieldEnd();
}
if (struct.messageFormat != null) {
if (struct.isSetMessageFormat()) {
oprot.writeFieldBegin(MESSAGE_FORMAT_FIELD_DESC);
oprot.writeString(struct.messageFormat);
oprot.writeFieldEnd();
}
}
if (struct.catName != null) {
if (struct.isSetCatName()) {
oprot.writeFieldBegin(CAT_NAME_FIELD_DESC);
oprot.writeString(struct.catName);
oprot.writeFieldEnd();
}
}
oprot.writeFieldStop();
oprot.writeStructEnd();
}
}
private static class NotificationEventTupleSchemeFactory implements org.apache.thrift.scheme.SchemeFactory {
public NotificationEventTupleScheme getScheme() {
return new NotificationEventTupleScheme();
}
}
private static class NotificationEventTupleScheme extends org.apache.thrift.scheme.TupleScheme<NotificationEvent> {
@Override
public void write(org.apache.thrift.protocol.TProtocol prot, NotificationEvent struct) throws org.apache.thrift.TException {
org.apache.thrift.protocol.TTupleProtocol oprot = (org.apache.thrift.protocol.TTupleProtocol) prot;
oprot.writeI64(struct.eventId);
oprot.writeI32(struct.eventTime);
oprot.writeString(struct.eventType);
oprot.writeString(struct.message);
java.util.BitSet optionals = new java.util.BitSet();
if (struct.isSetDbName()) {
optionals.set(0);
}
if (struct.isSetTableName()) {
optionals.set(1);
}
if (struct.isSetMessageFormat()) {
optionals.set(2);
}
if (struct.isSetCatName()) {
optionals.set(3);
}
oprot.writeBitSet(optionals, 4);
if (struct.isSetDbName()) {
oprot.writeString(struct.dbName);
}
if (struct.isSetTableName()) {
oprot.writeString(struct.tableName);
}
if (struct.isSetMessageFormat()) {
oprot.writeString(struct.messageFormat);
}
if (struct.isSetCatName()) {
oprot.writeString(struct.catName);
}
}
@Override
public void read(org.apache.thrift.protocol.TProtocol prot, NotificationEvent struct) throws org.apache.thrift.TException {
org.apache.thrift.protocol.TTupleProtocol iprot = (org.apache.thrift.protocol.TTupleProtocol) prot;
struct.eventId = iprot.readI64();
struct.setEventIdIsSet(true);
struct.eventTime = iprot.readI32();
struct.setEventTimeIsSet(true);
struct.eventType = iprot.readString();
struct.setEventTypeIsSet(true);
struct.message = iprot.readString();
struct.setMessageIsSet(true);
java.util.BitSet incoming = iprot.readBitSet(4);
if (incoming.get(0)) {
struct.dbName = iprot.readString();
struct.setDbNameIsSet(true);
}
if (incoming.get(1)) {
struct.tableName = iprot.readString();
struct.setTableNameIsSet(true);
}
if (incoming.get(2)) {
struct.messageFormat = iprot.readString();
struct.setMessageFormatIsSet(true);
}
if (incoming.get(3)) {
struct.catName = iprot.readString();
struct.setCatNameIsSet(true);
}
}
}
private static <S extends org.apache.thrift.scheme.IScheme> S scheme(org.apache.thrift.protocol.TProtocol proto) {
return (org.apache.thrift.scheme.StandardScheme.class.equals(proto.getScheme()) ? STANDARD_SCHEME_FACTORY : TUPLE_SCHEME_FACTORY).getScheme();
}
}
|
googleapis/google-cloud-java | 36,941 | java-speech/proto-google-cloud-speech-v1/src/main/java/com/google/cloud/speech/v1/StreamingRecognizeRequest.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/speech/v1/cloud_speech.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.speech.v1;
/**
*
*
* <pre>
* The top-level message sent by the client for the `StreamingRecognize` method.
* Multiple `StreamingRecognizeRequest` messages are sent. The first message
* must contain a `streaming_config` message and must not contain
* `audio_content`. All subsequent messages must contain `audio_content` and
* must not contain a `streaming_config` message.
* </pre>
*
* Protobuf type {@code google.cloud.speech.v1.StreamingRecognizeRequest}
*/
public final class StreamingRecognizeRequest extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.speech.v1.StreamingRecognizeRequest)
StreamingRecognizeRequestOrBuilder {
private static final long serialVersionUID = 0L;
// Use StreamingRecognizeRequest.newBuilder() to construct.
private StreamingRecognizeRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private StreamingRecognizeRequest() {}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new StreamingRecognizeRequest();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.speech.v1.SpeechProto
.internal_static_google_cloud_speech_v1_StreamingRecognizeRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.speech.v1.SpeechProto
.internal_static_google_cloud_speech_v1_StreamingRecognizeRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.speech.v1.StreamingRecognizeRequest.class,
com.google.cloud.speech.v1.StreamingRecognizeRequest.Builder.class);
}
private int streamingRequestCase_ = 0;
@SuppressWarnings("serial")
private java.lang.Object streamingRequest_;
public enum StreamingRequestCase
implements
com.google.protobuf.Internal.EnumLite,
com.google.protobuf.AbstractMessage.InternalOneOfEnum {
STREAMING_CONFIG(1),
AUDIO_CONTENT(2),
STREAMINGREQUEST_NOT_SET(0);
private final int value;
private StreamingRequestCase(int value) {
this.value = value;
}
/**
* @param value The number of the enum to look for.
* @return The enum associated with the given number.
* @deprecated Use {@link #forNumber(int)} instead.
*/
@java.lang.Deprecated
public static StreamingRequestCase valueOf(int value) {
return forNumber(value);
}
public static StreamingRequestCase forNumber(int value) {
switch (value) {
case 1:
return STREAMING_CONFIG;
case 2:
return AUDIO_CONTENT;
case 0:
return STREAMINGREQUEST_NOT_SET;
default:
return null;
}
}
public int getNumber() {
return this.value;
}
};
public StreamingRequestCase getStreamingRequestCase() {
return StreamingRequestCase.forNumber(streamingRequestCase_);
}
public static final int STREAMING_CONFIG_FIELD_NUMBER = 1;
/**
*
*
* <pre>
* Provides information to the recognizer that specifies how to process the
* request. The first `StreamingRecognizeRequest` message must contain a
* `streaming_config` message.
* </pre>
*
* <code>.google.cloud.speech.v1.StreamingRecognitionConfig streaming_config = 1;</code>
*
* @return Whether the streamingConfig field is set.
*/
@java.lang.Override
public boolean hasStreamingConfig() {
return streamingRequestCase_ == 1;
}
/**
*
*
* <pre>
* Provides information to the recognizer that specifies how to process the
* request. The first `StreamingRecognizeRequest` message must contain a
* `streaming_config` message.
* </pre>
*
* <code>.google.cloud.speech.v1.StreamingRecognitionConfig streaming_config = 1;</code>
*
* @return The streamingConfig.
*/
@java.lang.Override
public com.google.cloud.speech.v1.StreamingRecognitionConfig getStreamingConfig() {
if (streamingRequestCase_ == 1) {
return (com.google.cloud.speech.v1.StreamingRecognitionConfig) streamingRequest_;
}
return com.google.cloud.speech.v1.StreamingRecognitionConfig.getDefaultInstance();
}
/**
*
*
* <pre>
* Provides information to the recognizer that specifies how to process the
* request. The first `StreamingRecognizeRequest` message must contain a
* `streaming_config` message.
* </pre>
*
* <code>.google.cloud.speech.v1.StreamingRecognitionConfig streaming_config = 1;</code>
*/
@java.lang.Override
public com.google.cloud.speech.v1.StreamingRecognitionConfigOrBuilder
getStreamingConfigOrBuilder() {
if (streamingRequestCase_ == 1) {
return (com.google.cloud.speech.v1.StreamingRecognitionConfig) streamingRequest_;
}
return com.google.cloud.speech.v1.StreamingRecognitionConfig.getDefaultInstance();
}
public static final int AUDIO_CONTENT_FIELD_NUMBER = 2;
/**
*
*
* <pre>
* The audio data to be recognized. Sequential chunks of audio data are sent
* in sequential `StreamingRecognizeRequest` messages. The first
* `StreamingRecognizeRequest` message must not contain `audio_content` data
* and all subsequent `StreamingRecognizeRequest` messages must contain
* `audio_content` data. The audio bytes must be encoded as specified in
* `RecognitionConfig`. Note: as with all bytes fields, proto buffers use a
* pure binary representation (not base64). See
* [content limits](https://cloud.google.com/speech-to-text/quotas#content).
* </pre>
*
* <code>bytes audio_content = 2;</code>
*
* @return Whether the audioContent field is set.
*/
@java.lang.Override
public boolean hasAudioContent() {
return streamingRequestCase_ == 2;
}
/**
*
*
* <pre>
* The audio data to be recognized. Sequential chunks of audio data are sent
* in sequential `StreamingRecognizeRequest` messages. The first
* `StreamingRecognizeRequest` message must not contain `audio_content` data
* and all subsequent `StreamingRecognizeRequest` messages must contain
* `audio_content` data. The audio bytes must be encoded as specified in
* `RecognitionConfig`. Note: as with all bytes fields, proto buffers use a
* pure binary representation (not base64). See
* [content limits](https://cloud.google.com/speech-to-text/quotas#content).
* </pre>
*
* <code>bytes audio_content = 2;</code>
*
* @return The audioContent.
*/
@java.lang.Override
public com.google.protobuf.ByteString getAudioContent() {
if (streamingRequestCase_ == 2) {
return (com.google.protobuf.ByteString) streamingRequest_;
}
return com.google.protobuf.ByteString.EMPTY;
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (streamingRequestCase_ == 1) {
output.writeMessage(
1, (com.google.cloud.speech.v1.StreamingRecognitionConfig) streamingRequest_);
}
if (streamingRequestCase_ == 2) {
output.writeBytes(2, (com.google.protobuf.ByteString) streamingRequest_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (streamingRequestCase_ == 1) {
size +=
com.google.protobuf.CodedOutputStream.computeMessageSize(
1, (com.google.cloud.speech.v1.StreamingRecognitionConfig) streamingRequest_);
}
if (streamingRequestCase_ == 2) {
size +=
com.google.protobuf.CodedOutputStream.computeBytesSize(
2, (com.google.protobuf.ByteString) streamingRequest_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.speech.v1.StreamingRecognizeRequest)) {
return super.equals(obj);
}
com.google.cloud.speech.v1.StreamingRecognizeRequest other =
(com.google.cloud.speech.v1.StreamingRecognizeRequest) obj;
if (!getStreamingRequestCase().equals(other.getStreamingRequestCase())) return false;
switch (streamingRequestCase_) {
case 1:
if (!getStreamingConfig().equals(other.getStreamingConfig())) return false;
break;
case 2:
if (!getAudioContent().equals(other.getAudioContent())) return false;
break;
case 0:
default:
}
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
switch (streamingRequestCase_) {
case 1:
hash = (37 * hash) + STREAMING_CONFIG_FIELD_NUMBER;
hash = (53 * hash) + getStreamingConfig().hashCode();
break;
case 2:
hash = (37 * hash) + AUDIO_CONTENT_FIELD_NUMBER;
hash = (53 * hash) + getAudioContent().hashCode();
break;
case 0:
default:
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.speech.v1.StreamingRecognizeRequest parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.speech.v1.StreamingRecognizeRequest parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.speech.v1.StreamingRecognizeRequest parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.speech.v1.StreamingRecognizeRequest parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.speech.v1.StreamingRecognizeRequest parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.speech.v1.StreamingRecognizeRequest parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.speech.v1.StreamingRecognizeRequest parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.speech.v1.StreamingRecognizeRequest parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.speech.v1.StreamingRecognizeRequest parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.speech.v1.StreamingRecognizeRequest parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.speech.v1.StreamingRecognizeRequest parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.speech.v1.StreamingRecognizeRequest parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.cloud.speech.v1.StreamingRecognizeRequest prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* The top-level message sent by the client for the `StreamingRecognize` method.
* Multiple `StreamingRecognizeRequest` messages are sent. The first message
* must contain a `streaming_config` message and must not contain
* `audio_content`. All subsequent messages must contain `audio_content` and
* must not contain a `streaming_config` message.
* </pre>
*
* Protobuf type {@code google.cloud.speech.v1.StreamingRecognizeRequest}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.speech.v1.StreamingRecognizeRequest)
com.google.cloud.speech.v1.StreamingRecognizeRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.speech.v1.SpeechProto
.internal_static_google_cloud_speech_v1_StreamingRecognizeRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.speech.v1.SpeechProto
.internal_static_google_cloud_speech_v1_StreamingRecognizeRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.speech.v1.StreamingRecognizeRequest.class,
com.google.cloud.speech.v1.StreamingRecognizeRequest.Builder.class);
}
// Construct using com.google.cloud.speech.v1.StreamingRecognizeRequest.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
if (streamingConfigBuilder_ != null) {
streamingConfigBuilder_.clear();
}
streamingRequestCase_ = 0;
streamingRequest_ = null;
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.speech.v1.SpeechProto
.internal_static_google_cloud_speech_v1_StreamingRecognizeRequest_descriptor;
}
@java.lang.Override
public com.google.cloud.speech.v1.StreamingRecognizeRequest getDefaultInstanceForType() {
return com.google.cloud.speech.v1.StreamingRecognizeRequest.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.speech.v1.StreamingRecognizeRequest build() {
com.google.cloud.speech.v1.StreamingRecognizeRequest result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.speech.v1.StreamingRecognizeRequest buildPartial() {
com.google.cloud.speech.v1.StreamingRecognizeRequest result =
new com.google.cloud.speech.v1.StreamingRecognizeRequest(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
buildPartialOneofs(result);
onBuilt();
return result;
}
private void buildPartial0(com.google.cloud.speech.v1.StreamingRecognizeRequest result) {
int from_bitField0_ = bitField0_;
}
private void buildPartialOneofs(com.google.cloud.speech.v1.StreamingRecognizeRequest result) {
result.streamingRequestCase_ = streamingRequestCase_;
result.streamingRequest_ = this.streamingRequest_;
if (streamingRequestCase_ == 1 && streamingConfigBuilder_ != null) {
result.streamingRequest_ = streamingConfigBuilder_.build();
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.speech.v1.StreamingRecognizeRequest) {
return mergeFrom((com.google.cloud.speech.v1.StreamingRecognizeRequest) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.speech.v1.StreamingRecognizeRequest other) {
if (other == com.google.cloud.speech.v1.StreamingRecognizeRequest.getDefaultInstance())
return this;
switch (other.getStreamingRequestCase()) {
case STREAMING_CONFIG:
{
mergeStreamingConfig(other.getStreamingConfig());
break;
}
case AUDIO_CONTENT:
{
setAudioContent(other.getAudioContent());
break;
}
case STREAMINGREQUEST_NOT_SET:
{
break;
}
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
input.readMessage(getStreamingConfigFieldBuilder().getBuilder(), extensionRegistry);
streamingRequestCase_ = 1;
break;
} // case 10
case 18:
{
streamingRequest_ = input.readBytes();
streamingRequestCase_ = 2;
break;
} // case 18
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int streamingRequestCase_ = 0;
private java.lang.Object streamingRequest_;
public StreamingRequestCase getStreamingRequestCase() {
return StreamingRequestCase.forNumber(streamingRequestCase_);
}
public Builder clearStreamingRequest() {
streamingRequestCase_ = 0;
streamingRequest_ = null;
onChanged();
return this;
}
private int bitField0_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.speech.v1.StreamingRecognitionConfig,
com.google.cloud.speech.v1.StreamingRecognitionConfig.Builder,
com.google.cloud.speech.v1.StreamingRecognitionConfigOrBuilder>
streamingConfigBuilder_;
/**
*
*
* <pre>
* Provides information to the recognizer that specifies how to process the
* request. The first `StreamingRecognizeRequest` message must contain a
* `streaming_config` message.
* </pre>
*
* <code>.google.cloud.speech.v1.StreamingRecognitionConfig streaming_config = 1;</code>
*
* @return Whether the streamingConfig field is set.
*/
@java.lang.Override
public boolean hasStreamingConfig() {
return streamingRequestCase_ == 1;
}
/**
*
*
* <pre>
* Provides information to the recognizer that specifies how to process the
* request. The first `StreamingRecognizeRequest` message must contain a
* `streaming_config` message.
* </pre>
*
* <code>.google.cloud.speech.v1.StreamingRecognitionConfig streaming_config = 1;</code>
*
* @return The streamingConfig.
*/
@java.lang.Override
public com.google.cloud.speech.v1.StreamingRecognitionConfig getStreamingConfig() {
if (streamingConfigBuilder_ == null) {
if (streamingRequestCase_ == 1) {
return (com.google.cloud.speech.v1.StreamingRecognitionConfig) streamingRequest_;
}
return com.google.cloud.speech.v1.StreamingRecognitionConfig.getDefaultInstance();
} else {
if (streamingRequestCase_ == 1) {
return streamingConfigBuilder_.getMessage();
}
return com.google.cloud.speech.v1.StreamingRecognitionConfig.getDefaultInstance();
}
}
/**
*
*
* <pre>
* Provides information to the recognizer that specifies how to process the
* request. The first `StreamingRecognizeRequest` message must contain a
* `streaming_config` message.
* </pre>
*
* <code>.google.cloud.speech.v1.StreamingRecognitionConfig streaming_config = 1;</code>
*/
public Builder setStreamingConfig(com.google.cloud.speech.v1.StreamingRecognitionConfig value) {
if (streamingConfigBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
streamingRequest_ = value;
onChanged();
} else {
streamingConfigBuilder_.setMessage(value);
}
streamingRequestCase_ = 1;
return this;
}
/**
*
*
* <pre>
* Provides information to the recognizer that specifies how to process the
* request. The first `StreamingRecognizeRequest` message must contain a
* `streaming_config` message.
* </pre>
*
* <code>.google.cloud.speech.v1.StreamingRecognitionConfig streaming_config = 1;</code>
*/
public Builder setStreamingConfig(
com.google.cloud.speech.v1.StreamingRecognitionConfig.Builder builderForValue) {
if (streamingConfigBuilder_ == null) {
streamingRequest_ = builderForValue.build();
onChanged();
} else {
streamingConfigBuilder_.setMessage(builderForValue.build());
}
streamingRequestCase_ = 1;
return this;
}
/**
*
*
* <pre>
* Provides information to the recognizer that specifies how to process the
* request. The first `StreamingRecognizeRequest` message must contain a
* `streaming_config` message.
* </pre>
*
* <code>.google.cloud.speech.v1.StreamingRecognitionConfig streaming_config = 1;</code>
*/
public Builder mergeStreamingConfig(
com.google.cloud.speech.v1.StreamingRecognitionConfig value) {
if (streamingConfigBuilder_ == null) {
if (streamingRequestCase_ == 1
&& streamingRequest_
!= com.google.cloud.speech.v1.StreamingRecognitionConfig.getDefaultInstance()) {
streamingRequest_ =
com.google.cloud.speech.v1.StreamingRecognitionConfig.newBuilder(
(com.google.cloud.speech.v1.StreamingRecognitionConfig) streamingRequest_)
.mergeFrom(value)
.buildPartial();
} else {
streamingRequest_ = value;
}
onChanged();
} else {
if (streamingRequestCase_ == 1) {
streamingConfigBuilder_.mergeFrom(value);
} else {
streamingConfigBuilder_.setMessage(value);
}
}
streamingRequestCase_ = 1;
return this;
}
/**
*
*
* <pre>
* Provides information to the recognizer that specifies how to process the
* request. The first `StreamingRecognizeRequest` message must contain a
* `streaming_config` message.
* </pre>
*
* <code>.google.cloud.speech.v1.StreamingRecognitionConfig streaming_config = 1;</code>
*/
public Builder clearStreamingConfig() {
if (streamingConfigBuilder_ == null) {
if (streamingRequestCase_ == 1) {
streamingRequestCase_ = 0;
streamingRequest_ = null;
onChanged();
}
} else {
if (streamingRequestCase_ == 1) {
streamingRequestCase_ = 0;
streamingRequest_ = null;
}
streamingConfigBuilder_.clear();
}
return this;
}
/**
*
*
* <pre>
* Provides information to the recognizer that specifies how to process the
* request. The first `StreamingRecognizeRequest` message must contain a
* `streaming_config` message.
* </pre>
*
* <code>.google.cloud.speech.v1.StreamingRecognitionConfig streaming_config = 1;</code>
*/
public com.google.cloud.speech.v1.StreamingRecognitionConfig.Builder
getStreamingConfigBuilder() {
return getStreamingConfigFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* Provides information to the recognizer that specifies how to process the
* request. The first `StreamingRecognizeRequest` message must contain a
* `streaming_config` message.
* </pre>
*
* <code>.google.cloud.speech.v1.StreamingRecognitionConfig streaming_config = 1;</code>
*/
@java.lang.Override
public com.google.cloud.speech.v1.StreamingRecognitionConfigOrBuilder
getStreamingConfigOrBuilder() {
if ((streamingRequestCase_ == 1) && (streamingConfigBuilder_ != null)) {
return streamingConfigBuilder_.getMessageOrBuilder();
} else {
if (streamingRequestCase_ == 1) {
return (com.google.cloud.speech.v1.StreamingRecognitionConfig) streamingRequest_;
}
return com.google.cloud.speech.v1.StreamingRecognitionConfig.getDefaultInstance();
}
}
/**
*
*
* <pre>
* Provides information to the recognizer that specifies how to process the
* request. The first `StreamingRecognizeRequest` message must contain a
* `streaming_config` message.
* </pre>
*
* <code>.google.cloud.speech.v1.StreamingRecognitionConfig streaming_config = 1;</code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.speech.v1.StreamingRecognitionConfig,
com.google.cloud.speech.v1.StreamingRecognitionConfig.Builder,
com.google.cloud.speech.v1.StreamingRecognitionConfigOrBuilder>
getStreamingConfigFieldBuilder() {
if (streamingConfigBuilder_ == null) {
if (!(streamingRequestCase_ == 1)) {
streamingRequest_ =
com.google.cloud.speech.v1.StreamingRecognitionConfig.getDefaultInstance();
}
streamingConfigBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.speech.v1.StreamingRecognitionConfig,
com.google.cloud.speech.v1.StreamingRecognitionConfig.Builder,
com.google.cloud.speech.v1.StreamingRecognitionConfigOrBuilder>(
(com.google.cloud.speech.v1.StreamingRecognitionConfig) streamingRequest_,
getParentForChildren(),
isClean());
streamingRequest_ = null;
}
streamingRequestCase_ = 1;
onChanged();
return streamingConfigBuilder_;
}
/**
*
*
* <pre>
* The audio data to be recognized. Sequential chunks of audio data are sent
* in sequential `StreamingRecognizeRequest` messages. The first
* `StreamingRecognizeRequest` message must not contain `audio_content` data
* and all subsequent `StreamingRecognizeRequest` messages must contain
* `audio_content` data. The audio bytes must be encoded as specified in
* `RecognitionConfig`. Note: as with all bytes fields, proto buffers use a
* pure binary representation (not base64). See
* [content limits](https://cloud.google.com/speech-to-text/quotas#content).
* </pre>
*
* <code>bytes audio_content = 2;</code>
*
* @return Whether the audioContent field is set.
*/
public boolean hasAudioContent() {
return streamingRequestCase_ == 2;
}
/**
*
*
* <pre>
* The audio data to be recognized. Sequential chunks of audio data are sent
* in sequential `StreamingRecognizeRequest` messages. The first
* `StreamingRecognizeRequest` message must not contain `audio_content` data
* and all subsequent `StreamingRecognizeRequest` messages must contain
* `audio_content` data. The audio bytes must be encoded as specified in
* `RecognitionConfig`. Note: as with all bytes fields, proto buffers use a
* pure binary representation (not base64). See
* [content limits](https://cloud.google.com/speech-to-text/quotas#content).
* </pre>
*
* <code>bytes audio_content = 2;</code>
*
* @return The audioContent.
*/
public com.google.protobuf.ByteString getAudioContent() {
if (streamingRequestCase_ == 2) {
return (com.google.protobuf.ByteString) streamingRequest_;
}
return com.google.protobuf.ByteString.EMPTY;
}
/**
*
*
* <pre>
* The audio data to be recognized. Sequential chunks of audio data are sent
* in sequential `StreamingRecognizeRequest` messages. The first
* `StreamingRecognizeRequest` message must not contain `audio_content` data
* and all subsequent `StreamingRecognizeRequest` messages must contain
* `audio_content` data. The audio bytes must be encoded as specified in
* `RecognitionConfig`. Note: as with all bytes fields, proto buffers use a
* pure binary representation (not base64). See
* [content limits](https://cloud.google.com/speech-to-text/quotas#content).
* </pre>
*
* <code>bytes audio_content = 2;</code>
*
* @param value The audioContent to set.
* @return This builder for chaining.
*/
public Builder setAudioContent(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
streamingRequestCase_ = 2;
streamingRequest_ = value;
onChanged();
return this;
}
/**
*
*
* <pre>
* The audio data to be recognized. Sequential chunks of audio data are sent
* in sequential `StreamingRecognizeRequest` messages. The first
* `StreamingRecognizeRequest` message must not contain `audio_content` data
* and all subsequent `StreamingRecognizeRequest` messages must contain
* `audio_content` data. The audio bytes must be encoded as specified in
* `RecognitionConfig`. Note: as with all bytes fields, proto buffers use a
* pure binary representation (not base64). See
* [content limits](https://cloud.google.com/speech-to-text/quotas#content).
* </pre>
*
* <code>bytes audio_content = 2;</code>
*
* @return This builder for chaining.
*/
public Builder clearAudioContent() {
if (streamingRequestCase_ == 2) {
streamingRequestCase_ = 0;
streamingRequest_ = null;
onChanged();
}
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.speech.v1.StreamingRecognizeRequest)
}
// @@protoc_insertion_point(class_scope:google.cloud.speech.v1.StreamingRecognizeRequest)
private static final com.google.cloud.speech.v1.StreamingRecognizeRequest DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.speech.v1.StreamingRecognizeRequest();
}
public static com.google.cloud.speech.v1.StreamingRecognizeRequest getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<StreamingRecognizeRequest> PARSER =
new com.google.protobuf.AbstractParser<StreamingRecognizeRequest>() {
@java.lang.Override
public StreamingRecognizeRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<StreamingRecognizeRequest> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<StreamingRecognizeRequest> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.speech.v1.StreamingRecognizeRequest getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleapis/google-cloud-java | 36,825 | java-dataform/proto-google-cloud-dataform-v1/src/main/java/com/google/cloud/dataform/v1/SearchFilesResponse.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/dataform/v1/dataform.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.dataform.v1;
/**
*
*
* <pre>
* Client-facing representation of a file search response.
* </pre>
*
* Protobuf type {@code google.cloud.dataform.v1.SearchFilesResponse}
*/
public final class SearchFilesResponse extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.dataform.v1.SearchFilesResponse)
SearchFilesResponseOrBuilder {
private static final long serialVersionUID = 0L;
// Use SearchFilesResponse.newBuilder() to construct.
private SearchFilesResponse(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private SearchFilesResponse() {
searchResults_ = java.util.Collections.emptyList();
nextPageToken_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new SearchFilesResponse();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.dataform.v1.DataformProto
.internal_static_google_cloud_dataform_v1_SearchFilesResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.dataform.v1.DataformProto
.internal_static_google_cloud_dataform_v1_SearchFilesResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.dataform.v1.SearchFilesResponse.class,
com.google.cloud.dataform.v1.SearchFilesResponse.Builder.class);
}
public static final int SEARCH_RESULTS_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private java.util.List<com.google.cloud.dataform.v1.SearchResult> searchResults_;
/**
*
*
* <pre>
* List of matched results.
* </pre>
*
* <code>repeated .google.cloud.dataform.v1.SearchResult search_results = 1;</code>
*/
@java.lang.Override
public java.util.List<com.google.cloud.dataform.v1.SearchResult> getSearchResultsList() {
return searchResults_;
}
/**
*
*
* <pre>
* List of matched results.
* </pre>
*
* <code>repeated .google.cloud.dataform.v1.SearchResult search_results = 1;</code>
*/
@java.lang.Override
public java.util.List<? extends com.google.cloud.dataform.v1.SearchResultOrBuilder>
getSearchResultsOrBuilderList() {
return searchResults_;
}
/**
*
*
* <pre>
* List of matched results.
* </pre>
*
* <code>repeated .google.cloud.dataform.v1.SearchResult search_results = 1;</code>
*/
@java.lang.Override
public int getSearchResultsCount() {
return searchResults_.size();
}
/**
*
*
* <pre>
* List of matched results.
* </pre>
*
* <code>repeated .google.cloud.dataform.v1.SearchResult search_results = 1;</code>
*/
@java.lang.Override
public com.google.cloud.dataform.v1.SearchResult getSearchResults(int index) {
return searchResults_.get(index);
}
/**
*
*
* <pre>
* List of matched results.
* </pre>
*
* <code>repeated .google.cloud.dataform.v1.SearchResult search_results = 1;</code>
*/
@java.lang.Override
public com.google.cloud.dataform.v1.SearchResultOrBuilder getSearchResultsOrBuilder(int index) {
return searchResults_.get(index);
}
public static final int NEXT_PAGE_TOKEN_FIELD_NUMBER = 2;
@SuppressWarnings("serial")
private volatile java.lang.Object nextPageToken_ = "";
/**
*
*
* <pre>
* Optional. A token, which can be sent as `page_token` to retrieve the next
* page. If this field is omitted, there are no subsequent pages.
* </pre>
*
* <code>string next_page_token = 2 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The nextPageToken.
*/
@java.lang.Override
public java.lang.String getNextPageToken() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
nextPageToken_ = s;
return s;
}
}
/**
*
*
* <pre>
* Optional. A token, which can be sent as `page_token` to retrieve the next
* page. If this field is omitted, there are no subsequent pages.
* </pre>
*
* <code>string next_page_token = 2 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The bytes for nextPageToken.
*/
@java.lang.Override
public com.google.protobuf.ByteString getNextPageTokenBytes() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
nextPageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
for (int i = 0; i < searchResults_.size(); i++) {
output.writeMessage(1, searchResults_.get(i));
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 2, nextPageToken_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
for (int i = 0; i < searchResults_.size(); i++) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, searchResults_.get(i));
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, nextPageToken_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.dataform.v1.SearchFilesResponse)) {
return super.equals(obj);
}
com.google.cloud.dataform.v1.SearchFilesResponse other =
(com.google.cloud.dataform.v1.SearchFilesResponse) obj;
if (!getSearchResultsList().equals(other.getSearchResultsList())) return false;
if (!getNextPageToken().equals(other.getNextPageToken())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (getSearchResultsCount() > 0) {
hash = (37 * hash) + SEARCH_RESULTS_FIELD_NUMBER;
hash = (53 * hash) + getSearchResultsList().hashCode();
}
hash = (37 * hash) + NEXT_PAGE_TOKEN_FIELD_NUMBER;
hash = (53 * hash) + getNextPageToken().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.dataform.v1.SearchFilesResponse parseFrom(java.nio.ByteBuffer data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.dataform.v1.SearchFilesResponse parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.dataform.v1.SearchFilesResponse parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.dataform.v1.SearchFilesResponse parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.dataform.v1.SearchFilesResponse parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.dataform.v1.SearchFilesResponse parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.dataform.v1.SearchFilesResponse parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.dataform.v1.SearchFilesResponse parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.dataform.v1.SearchFilesResponse parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.dataform.v1.SearchFilesResponse parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.dataform.v1.SearchFilesResponse parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.dataform.v1.SearchFilesResponse parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.cloud.dataform.v1.SearchFilesResponse prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Client-facing representation of a file search response.
* </pre>
*
* Protobuf type {@code google.cloud.dataform.v1.SearchFilesResponse}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.dataform.v1.SearchFilesResponse)
com.google.cloud.dataform.v1.SearchFilesResponseOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.dataform.v1.DataformProto
.internal_static_google_cloud_dataform_v1_SearchFilesResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.dataform.v1.DataformProto
.internal_static_google_cloud_dataform_v1_SearchFilesResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.dataform.v1.SearchFilesResponse.class,
com.google.cloud.dataform.v1.SearchFilesResponse.Builder.class);
}
// Construct using com.google.cloud.dataform.v1.SearchFilesResponse.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
if (searchResultsBuilder_ == null) {
searchResults_ = java.util.Collections.emptyList();
} else {
searchResults_ = null;
searchResultsBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000001);
nextPageToken_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.dataform.v1.DataformProto
.internal_static_google_cloud_dataform_v1_SearchFilesResponse_descriptor;
}
@java.lang.Override
public com.google.cloud.dataform.v1.SearchFilesResponse getDefaultInstanceForType() {
return com.google.cloud.dataform.v1.SearchFilesResponse.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.dataform.v1.SearchFilesResponse build() {
com.google.cloud.dataform.v1.SearchFilesResponse result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.dataform.v1.SearchFilesResponse buildPartial() {
com.google.cloud.dataform.v1.SearchFilesResponse result =
new com.google.cloud.dataform.v1.SearchFilesResponse(this);
buildPartialRepeatedFields(result);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartialRepeatedFields(
com.google.cloud.dataform.v1.SearchFilesResponse result) {
if (searchResultsBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0)) {
searchResults_ = java.util.Collections.unmodifiableList(searchResults_);
bitField0_ = (bitField0_ & ~0x00000001);
}
result.searchResults_ = searchResults_;
} else {
result.searchResults_ = searchResultsBuilder_.build();
}
}
private void buildPartial0(com.google.cloud.dataform.v1.SearchFilesResponse result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000002) != 0)) {
result.nextPageToken_ = nextPageToken_;
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.dataform.v1.SearchFilesResponse) {
return mergeFrom((com.google.cloud.dataform.v1.SearchFilesResponse) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.dataform.v1.SearchFilesResponse other) {
if (other == com.google.cloud.dataform.v1.SearchFilesResponse.getDefaultInstance())
return this;
if (searchResultsBuilder_ == null) {
if (!other.searchResults_.isEmpty()) {
if (searchResults_.isEmpty()) {
searchResults_ = other.searchResults_;
bitField0_ = (bitField0_ & ~0x00000001);
} else {
ensureSearchResultsIsMutable();
searchResults_.addAll(other.searchResults_);
}
onChanged();
}
} else {
if (!other.searchResults_.isEmpty()) {
if (searchResultsBuilder_.isEmpty()) {
searchResultsBuilder_.dispose();
searchResultsBuilder_ = null;
searchResults_ = other.searchResults_;
bitField0_ = (bitField0_ & ~0x00000001);
searchResultsBuilder_ =
com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders
? getSearchResultsFieldBuilder()
: null;
} else {
searchResultsBuilder_.addAllMessages(other.searchResults_);
}
}
}
if (!other.getNextPageToken().isEmpty()) {
nextPageToken_ = other.nextPageToken_;
bitField0_ |= 0x00000002;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
com.google.cloud.dataform.v1.SearchResult m =
input.readMessage(
com.google.cloud.dataform.v1.SearchResult.parser(), extensionRegistry);
if (searchResultsBuilder_ == null) {
ensureSearchResultsIsMutable();
searchResults_.add(m);
} else {
searchResultsBuilder_.addMessage(m);
}
break;
} // case 10
case 18:
{
nextPageToken_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000002;
break;
} // case 18
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.util.List<com.google.cloud.dataform.v1.SearchResult> searchResults_ =
java.util.Collections.emptyList();
private void ensureSearchResultsIsMutable() {
if (!((bitField0_ & 0x00000001) != 0)) {
searchResults_ =
new java.util.ArrayList<com.google.cloud.dataform.v1.SearchResult>(searchResults_);
bitField0_ |= 0x00000001;
}
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.dataform.v1.SearchResult,
com.google.cloud.dataform.v1.SearchResult.Builder,
com.google.cloud.dataform.v1.SearchResultOrBuilder>
searchResultsBuilder_;
/**
*
*
* <pre>
* List of matched results.
* </pre>
*
* <code>repeated .google.cloud.dataform.v1.SearchResult search_results = 1;</code>
*/
public java.util.List<com.google.cloud.dataform.v1.SearchResult> getSearchResultsList() {
if (searchResultsBuilder_ == null) {
return java.util.Collections.unmodifiableList(searchResults_);
} else {
return searchResultsBuilder_.getMessageList();
}
}
/**
*
*
* <pre>
* List of matched results.
* </pre>
*
* <code>repeated .google.cloud.dataform.v1.SearchResult search_results = 1;</code>
*/
public int getSearchResultsCount() {
if (searchResultsBuilder_ == null) {
return searchResults_.size();
} else {
return searchResultsBuilder_.getCount();
}
}
/**
*
*
* <pre>
* List of matched results.
* </pre>
*
* <code>repeated .google.cloud.dataform.v1.SearchResult search_results = 1;</code>
*/
public com.google.cloud.dataform.v1.SearchResult getSearchResults(int index) {
if (searchResultsBuilder_ == null) {
return searchResults_.get(index);
} else {
return searchResultsBuilder_.getMessage(index);
}
}
/**
*
*
* <pre>
* List of matched results.
* </pre>
*
* <code>repeated .google.cloud.dataform.v1.SearchResult search_results = 1;</code>
*/
public Builder setSearchResults(int index, com.google.cloud.dataform.v1.SearchResult value) {
if (searchResultsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureSearchResultsIsMutable();
searchResults_.set(index, value);
onChanged();
} else {
searchResultsBuilder_.setMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* List of matched results.
* </pre>
*
* <code>repeated .google.cloud.dataform.v1.SearchResult search_results = 1;</code>
*/
public Builder setSearchResults(
int index, com.google.cloud.dataform.v1.SearchResult.Builder builderForValue) {
if (searchResultsBuilder_ == null) {
ensureSearchResultsIsMutable();
searchResults_.set(index, builderForValue.build());
onChanged();
} else {
searchResultsBuilder_.setMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* List of matched results.
* </pre>
*
* <code>repeated .google.cloud.dataform.v1.SearchResult search_results = 1;</code>
*/
public Builder addSearchResults(com.google.cloud.dataform.v1.SearchResult value) {
if (searchResultsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureSearchResultsIsMutable();
searchResults_.add(value);
onChanged();
} else {
searchResultsBuilder_.addMessage(value);
}
return this;
}
/**
*
*
* <pre>
* List of matched results.
* </pre>
*
* <code>repeated .google.cloud.dataform.v1.SearchResult search_results = 1;</code>
*/
public Builder addSearchResults(int index, com.google.cloud.dataform.v1.SearchResult value) {
if (searchResultsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureSearchResultsIsMutable();
searchResults_.add(index, value);
onChanged();
} else {
searchResultsBuilder_.addMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* List of matched results.
* </pre>
*
* <code>repeated .google.cloud.dataform.v1.SearchResult search_results = 1;</code>
*/
public Builder addSearchResults(
com.google.cloud.dataform.v1.SearchResult.Builder builderForValue) {
if (searchResultsBuilder_ == null) {
ensureSearchResultsIsMutable();
searchResults_.add(builderForValue.build());
onChanged();
} else {
searchResultsBuilder_.addMessage(builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* List of matched results.
* </pre>
*
* <code>repeated .google.cloud.dataform.v1.SearchResult search_results = 1;</code>
*/
public Builder addSearchResults(
int index, com.google.cloud.dataform.v1.SearchResult.Builder builderForValue) {
if (searchResultsBuilder_ == null) {
ensureSearchResultsIsMutable();
searchResults_.add(index, builderForValue.build());
onChanged();
} else {
searchResultsBuilder_.addMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* List of matched results.
* </pre>
*
* <code>repeated .google.cloud.dataform.v1.SearchResult search_results = 1;</code>
*/
public Builder addAllSearchResults(
java.lang.Iterable<? extends com.google.cloud.dataform.v1.SearchResult> values) {
if (searchResultsBuilder_ == null) {
ensureSearchResultsIsMutable();
com.google.protobuf.AbstractMessageLite.Builder.addAll(values, searchResults_);
onChanged();
} else {
searchResultsBuilder_.addAllMessages(values);
}
return this;
}
/**
*
*
* <pre>
* List of matched results.
* </pre>
*
* <code>repeated .google.cloud.dataform.v1.SearchResult search_results = 1;</code>
*/
public Builder clearSearchResults() {
if (searchResultsBuilder_ == null) {
searchResults_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
} else {
searchResultsBuilder_.clear();
}
return this;
}
/**
*
*
* <pre>
* List of matched results.
* </pre>
*
* <code>repeated .google.cloud.dataform.v1.SearchResult search_results = 1;</code>
*/
public Builder removeSearchResults(int index) {
if (searchResultsBuilder_ == null) {
ensureSearchResultsIsMutable();
searchResults_.remove(index);
onChanged();
} else {
searchResultsBuilder_.remove(index);
}
return this;
}
/**
*
*
* <pre>
* List of matched results.
* </pre>
*
* <code>repeated .google.cloud.dataform.v1.SearchResult search_results = 1;</code>
*/
public com.google.cloud.dataform.v1.SearchResult.Builder getSearchResultsBuilder(int index) {
return getSearchResultsFieldBuilder().getBuilder(index);
}
/**
*
*
* <pre>
* List of matched results.
* </pre>
*
* <code>repeated .google.cloud.dataform.v1.SearchResult search_results = 1;</code>
*/
public com.google.cloud.dataform.v1.SearchResultOrBuilder getSearchResultsOrBuilder(int index) {
if (searchResultsBuilder_ == null) {
return searchResults_.get(index);
} else {
return searchResultsBuilder_.getMessageOrBuilder(index);
}
}
/**
*
*
* <pre>
* List of matched results.
* </pre>
*
* <code>repeated .google.cloud.dataform.v1.SearchResult search_results = 1;</code>
*/
public java.util.List<? extends com.google.cloud.dataform.v1.SearchResultOrBuilder>
getSearchResultsOrBuilderList() {
if (searchResultsBuilder_ != null) {
return searchResultsBuilder_.getMessageOrBuilderList();
} else {
return java.util.Collections.unmodifiableList(searchResults_);
}
}
/**
*
*
* <pre>
* List of matched results.
* </pre>
*
* <code>repeated .google.cloud.dataform.v1.SearchResult search_results = 1;</code>
*/
public com.google.cloud.dataform.v1.SearchResult.Builder addSearchResultsBuilder() {
return getSearchResultsFieldBuilder()
.addBuilder(com.google.cloud.dataform.v1.SearchResult.getDefaultInstance());
}
/**
*
*
* <pre>
* List of matched results.
* </pre>
*
* <code>repeated .google.cloud.dataform.v1.SearchResult search_results = 1;</code>
*/
public com.google.cloud.dataform.v1.SearchResult.Builder addSearchResultsBuilder(int index) {
return getSearchResultsFieldBuilder()
.addBuilder(index, com.google.cloud.dataform.v1.SearchResult.getDefaultInstance());
}
/**
*
*
* <pre>
* List of matched results.
* </pre>
*
* <code>repeated .google.cloud.dataform.v1.SearchResult search_results = 1;</code>
*/
public java.util.List<com.google.cloud.dataform.v1.SearchResult.Builder>
getSearchResultsBuilderList() {
return getSearchResultsFieldBuilder().getBuilderList();
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.dataform.v1.SearchResult,
com.google.cloud.dataform.v1.SearchResult.Builder,
com.google.cloud.dataform.v1.SearchResultOrBuilder>
getSearchResultsFieldBuilder() {
if (searchResultsBuilder_ == null) {
searchResultsBuilder_ =
new com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.dataform.v1.SearchResult,
com.google.cloud.dataform.v1.SearchResult.Builder,
com.google.cloud.dataform.v1.SearchResultOrBuilder>(
searchResults_,
((bitField0_ & 0x00000001) != 0),
getParentForChildren(),
isClean());
searchResults_ = null;
}
return searchResultsBuilder_;
}
private java.lang.Object nextPageToken_ = "";
/**
*
*
* <pre>
* Optional. A token, which can be sent as `page_token` to retrieve the next
* page. If this field is omitted, there are no subsequent pages.
* </pre>
*
* <code>string next_page_token = 2 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The nextPageToken.
*/
public java.lang.String getNextPageToken() {
java.lang.Object ref = nextPageToken_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
nextPageToken_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Optional. A token, which can be sent as `page_token` to retrieve the next
* page. If this field is omitted, there are no subsequent pages.
* </pre>
*
* <code>string next_page_token = 2 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The bytes for nextPageToken.
*/
public com.google.protobuf.ByteString getNextPageTokenBytes() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
nextPageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Optional. A token, which can be sent as `page_token` to retrieve the next
* page. If this field is omitted, there are no subsequent pages.
* </pre>
*
* <code>string next_page_token = 2 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @param value The nextPageToken to set.
* @return This builder for chaining.
*/
public Builder setNextPageToken(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
nextPageToken_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. A token, which can be sent as `page_token` to retrieve the next
* page. If this field is omitted, there are no subsequent pages.
* </pre>
*
* <code>string next_page_token = 2 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return This builder for chaining.
*/
public Builder clearNextPageToken() {
nextPageToken_ = getDefaultInstance().getNextPageToken();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. A token, which can be sent as `page_token` to retrieve the next
* page. If this field is omitted, there are no subsequent pages.
* </pre>
*
* <code>string next_page_token = 2 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @param value The bytes for nextPageToken to set.
* @return This builder for chaining.
*/
public Builder setNextPageTokenBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
nextPageToken_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.dataform.v1.SearchFilesResponse)
}
// @@protoc_insertion_point(class_scope:google.cloud.dataform.v1.SearchFilesResponse)
private static final com.google.cloud.dataform.v1.SearchFilesResponse DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.dataform.v1.SearchFilesResponse();
}
public static com.google.cloud.dataform.v1.SearchFilesResponse getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<SearchFilesResponse> PARSER =
new com.google.protobuf.AbstractParser<SearchFilesResponse>() {
@java.lang.Override
public SearchFilesResponse parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<SearchFilesResponse> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<SearchFilesResponse> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.dataform.v1.SearchFilesResponse getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
apache/systemds | 34,865 | src/main/java/org/apache/sysds/runtime/controlprogram/federated/FederatedStatistics.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.sysds.runtime.controlprogram.federated;
import java.io.Serializable;
import java.lang.management.ManagementFactory;
import java.net.InetSocketAddress;
import java.text.DecimalFormat;
import java.time.LocalDateTime;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Comparator;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Set;
import java.util.concurrent.Future;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.LongAdder;
import org.apache.commons.lang3.tuple.ImmutablePair;
import org.apache.commons.lang3.tuple.Pair;
import org.apache.log4j.Logger;
import org.apache.sysds.api.DMLScript;
import org.apache.sysds.conf.ConfigurationManager;
import org.apache.sysds.runtime.DMLRuntimeException;
import org.apache.sysds.runtime.controlprogram.caching.CacheBlock;
import org.apache.sysds.runtime.controlprogram.caching.CacheStatistics;
import org.apache.sysds.runtime.controlprogram.caching.CacheableData;
import org.apache.sysds.runtime.controlprogram.caching.FrameObject;
import org.apache.sysds.runtime.controlprogram.caching.MatrixObject;
import org.apache.sysds.runtime.controlprogram.context.ExecutionContext;
import org.apache.sysds.runtime.controlprogram.federated.FederatedRequest.RequestType;
import org.apache.sysds.runtime.controlprogram.federated.FederatedStatistics.FedStatsCollection.CacheStatsCollection;
import org.apache.sysds.runtime.controlprogram.federated.FederatedStatistics.FedStatsCollection.GCStatsCollection;
import org.apache.sysds.runtime.controlprogram.federated.FederatedStatistics.FedStatsCollection.LineageCacheStatsCollection;
import org.apache.sysds.runtime.controlprogram.federated.FederatedStatistics.FedStatsCollection.MultiTenantStatsCollection;
import org.apache.sysds.runtime.controlprogram.federated.monitoring.models.DataObjectModel;
import org.apache.sysds.runtime.controlprogram.federated.monitoring.models.EventModel;
import org.apache.sysds.runtime.controlprogram.federated.monitoring.models.RequestModel;
import org.apache.sysds.runtime.controlprogram.federated.monitoring.models.TrafficModel;
import org.apache.sysds.runtime.controlprogram.federated.monitoring.models.UtilizationModel;
import org.apache.sysds.runtime.frame.data.FrameBlock;
import org.apache.sysds.runtime.instructions.InstructionUtils;
import org.apache.sysds.runtime.instructions.cp.Data;
import org.apache.sysds.runtime.instructions.cp.ListObject;
import org.apache.sysds.runtime.instructions.cp.ScalarObject;
import org.apache.sysds.runtime.lineage.LineageCacheStatistics;
import org.apache.sysds.runtime.lineage.LineageItem;
import org.apache.sysds.runtime.matrix.data.MatrixBlock;
import org.apache.sysds.runtime.meta.MatrixCharacteristics;
import org.apache.sysds.utils.Statistics;
public class FederatedStatistics {
protected static Logger LOG = Logger.getLogger(FederatedStatistics.class);
// stats of the federated worker on the coordinator site
private static Set<Pair<String, Integer>> _fedWorkerAddresses = new HashSet<>();
private static final LongAdder readCount = new LongAdder();
private static final LongAdder putCount = new LongAdder();
private static final LongAdder getCount = new LongAdder();
private static final LongAdder executeInstructionCount = new LongAdder();
private static final LongAdder executeUDFCount = new LongAdder();
private static final LongAdder transferredScalarCount = new LongAdder();
private static final LongAdder transferredListCount = new LongAdder();
private static final LongAdder transferredMatrixCount = new LongAdder();
private static final LongAdder transferredFrameCount = new LongAdder();
private static final LongAdder transferredMatCharCount = new LongAdder();
private static final LongAdder transferredMatrixBytes = new LongAdder();
private static final LongAdder transferredFrameBytes = new LongAdder();
private static final LongAdder asyncPrefetchCount = new LongAdder();
private static final LongAdder bytesSent = new LongAdder();
private static final LongAdder bytesReceived = new LongAdder();
// stats on the federated worker itself
private static final LongAdder fedLookupTableGetCount = new LongAdder();
private static final LongAdder fedLookupTableGetTime = new LongAdder(); // msec
private static final LongAdder fedLookupTableEntryCount = new LongAdder();
private static final LongAdder fedReuseReadHitCount = new LongAdder();
private static final LongAdder fedReuseReadBytesCount = new LongAdder();
private static final LongAdder fedBytesSent = new LongAdder();
private static final LongAdder fedBytesReceived = new LongAdder();
private static final LongAdder fedPutLineageCount = new LongAdder();
private static final LongAdder fedPutLineageItems = new LongAdder();
private static final LongAdder fedSerializationReuseCount = new LongAdder();
private static final LongAdder fedSerializationReuseBytes = new LongAdder();
private static final List<TrafficModel> coordinatorsTrafficBytes = new ArrayList<>();
private static final List<EventModel> workerEvents = new ArrayList<>();
private static final Map<String, DataObjectModel> workerDataObjects = new HashMap<>();
private static final Map<String, RequestModel> workerFederatedRequests = new HashMap<>();
public static void logServerTraffic(long read, long written) {
bytesReceived.add(read);
bytesSent.add(written);
}
public static void logWorkerTraffic(long read, long written) {
fedBytesReceived.add(read);
fedBytesSent.add(written);
}
public static synchronized void incFederated(RequestType rqt, List<Object> data){
switch (rqt) {
case READ_VAR:
readCount.increment();
break;
case PUT_VAR:
putCount.increment();
incFedTransfer(data.get(0));
break;
case GET_VAR:
getCount.increment();
break;
case EXEC_INST:
executeInstructionCount.increment();
break;
case EXEC_UDF:
executeUDFCount.increment();
incFedTransfer(data);
break;
default:
break;
}
}
private static void incFedTransfer(List<Object> data) {
for(Object dataObj : data)
incFedTransfer(dataObj);
}
private static void incFedTransfer(Object dataObj) {
incFedTransfer(dataObj, null, null);
}
public static void incFedTransfer(Object dataObj, String host, Long pid) {
long byteAmount = 0;
if(dataObj instanceof MatrixBlock) {
transferredMatrixCount.increment();
byteAmount = ((MatrixBlock)dataObj).getInMemorySize();
transferredMatrixBytes.add(byteAmount);
}
else if(dataObj instanceof FrameBlock) {
transferredFrameCount.increment();
byteAmount = ((FrameBlock)dataObj).getInMemorySize();
transferredFrameBytes.add(byteAmount);
}
else if(dataObj instanceof ScalarObject) {
transferredScalarCount.increment();
}
else if(dataObj instanceof ListObject) {
transferredListCount.increment();
var listData = ((ListObject)dataObj).getData();
for (var entry: listData) {
if (entry.getDataType().isMatrix()) {
byteAmount += ((MatrixObject)entry).getDataSize();
} else if (entry.getDataType().isFrame()) {
byteAmount += ((FrameObject)entry).getDataSize();
}
}
}
else if(dataObj instanceof MatrixCharacteristics) {
transferredMatCharCount.increment();
}
if (host != null && pid != null) {
var coordinatorHostId = String.format("%s-%d", host, pid);
coordinatorsTrafficBytes.add(new TrafficModel(LocalDateTime.now(), coordinatorHostId, byteAmount));
}
}
public static void incAsyncPrefetchCount(long c) {
asyncPrefetchCount.add(c);
}
public static long getTotalFedTransferCount() {
return transferredScalarCount.longValue() + transferredListCount.longValue()
+ transferredMatrixCount.longValue() + transferredFrameCount.longValue()
+ transferredMatCharCount.longValue();
}
public static void reset() {
readCount.reset();
putCount.reset();
getCount.reset();
executeInstructionCount.reset();
executeUDFCount.reset();
transferredScalarCount.reset();
transferredListCount.reset();
transferredMatrixCount.reset();
transferredFrameCount.reset();
transferredMatCharCount.reset();
transferredMatrixBytes.reset();
transferredFrameBytes.reset();
asyncPrefetchCount.reset();
fedLookupTableGetCount.reset();
fedLookupTableGetTime.reset();
fedLookupTableEntryCount.reset();
fedReuseReadHitCount.reset();
fedReuseReadBytesCount.reset();
fedPutLineageCount.reset();
fedPutLineageItems.reset();
fedSerializationReuseCount.reset();
fedSerializationReuseBytes.reset();
bytesSent.reset();
bytesReceived.reset();
fedBytesSent.reset();
fedBytesReceived.reset();
//TODO merge with existing
coordinatorsTrafficBytes.clear();
workerEvents.clear();
workerDataObjects.clear();
}
public static String displayFedIOExecStatistics() {
if( readCount.longValue() > 0){ // only if there happened something on the federated worker
StringBuilder sb = new StringBuilder();
sb.append("Federated I/O (Read, Put, Get):\t" +
readCount.longValue() + "/" +
putCount.longValue() + "/" +
getCount.longValue() + ".\n");
sb.append("Federated Execute (Inst, UDF):\t" +
executeInstructionCount.longValue() + "/" +
executeUDFCount.longValue() + ".\n");
if(getTotalFedTransferCount() > 0)
sb.append("Fed Put Count (Sc/Li/Ma/Fr/MC):\t" +
transferredScalarCount.longValue() + "/" +
transferredListCount.longValue() + "/" +
transferredMatrixCount.longValue() + "/" +
transferredFrameCount.longValue() + "/" +
transferredMatCharCount.longValue() + ".\n");
if(transferredMatrixBytes.longValue() > 0 || transferredFrameBytes.longValue() > 0)
sb.append("Fed Put Bytes (Mat/Frame):\t" +
transferredMatrixBytes.longValue() + "/" +
transferredFrameBytes.longValue() + " Bytes.\n");
sb.append("Federated prefetch count:\t" +
asyncPrefetchCount.longValue() + ".\n");
return sb.toString();
}
return "";
}
public static String displayNetworkTrafficStatistics() {
return "Server I/O bytes (read/written):\t" +
bytesReceived.longValue() +
"/" +
bytesSent.longValue() +
"\n" +
"Worker I/O bytes (read/written):\t" +
fedBytesReceived.longValue() +
"/" +
fedBytesSent.longValue() +
"\n";
}
public static void registerFedWorker(String host, int port) {
_fedWorkerAddresses.add(new ImmutablePair<>(host, Integer.valueOf(port)));
}
public static String displayFedWorkers() {
StringBuilder sb = new StringBuilder();
sb.append("Federated Worker Addresses:\n");
for(Pair<String, Integer> fedAddr : _fedWorkerAddresses) {
sb.append(String.format(" %s:%d", fedAddr.getLeft(), fedAddr.getRight().intValue()));
sb.append("\n");
}
return sb.toString();
}
public static String displayFedWorkerStats() {
if( readCount.longValue() > 0){
StringBuilder sb = new StringBuilder();
sb.append(displayFedLookupTableStats());
sb.append(displayFedReuseReadStats());
sb.append(displayFedPutLineageStats());
sb.append(displayFedSerializationReuseStats());
//sb.append(displayFedTransfer());
//sb.append(displayCPUUsage());
//sb.append(displayMemoryUsage());
return sb.toString();
}
return "";
}
public static String displayStatistics(int numHeavyHitters) {
FedStatsCollection fedStats = collectFedStats();
return displayStatistics(fedStats, numHeavyHitters);
}
public static String displayStatistics(FedStatsCollection fedStats, int numHeavyHitters) {
StringBuilder sb = new StringBuilder();
sb.append("SystemDS Federated Statistics:\n");
sb.append(displayCacheStats(fedStats.cacheStats));
sb.append(String.format("Total JIT compile time:\t\t%.3f sec.\n", fedStats.jitCompileTime));
sb.append(displayGCStats(fedStats.gcStats));
sb.append(displayLinCacheStats(fedStats.linCacheStats));
sb.append(displayMultiTenantStats(fedStats.mtStats));
sb.append(displayFedTransfer());
sb.append(displayHeavyHitters(fedStats.heavyHitters, numHeavyHitters));
sb.append(displayNetworkTrafficStatistics());
return sb.toString();
}
private static String displayCacheStats(CacheStatsCollection csc) {
StringBuilder sb = new StringBuilder();
sb.append(String.format("Cache hits (Mem/Li/WB/FS/HDFS):\t%d/%d/%d/%d/%d.\n",
csc.memHits, csc.linHits, csc.fsBuffHits, csc.fsHits, csc.hdfsHits));
sb.append(String.format("Cache writes (Li/WB/FS/HDFS):\t%d/%d/%d/%d.\n",
csc.linWrites, csc.fsBuffWrites, csc.fsWrites, csc.hdfsWrites));
sb.append(String.format("Cache times (ACQr/m, RLS, EXP):\t%.3f/%.3f/%.3f/%.3f sec.\n",
csc.acqRTime, csc.acqMTime, csc.rlsTime, csc.expTime));
return sb.toString();
}
private static String displayGCStats(GCStatsCollection gcsc) {
StringBuilder sb = new StringBuilder();
sb.append(String.format("Total JVM GC count:\t\t%d.\n", gcsc.gcCount));
sb.append(String.format("Total JVM GC time:\t\t%.3f sec.\n", gcsc.gcTime));
return sb.toString();
}
private static String displayLinCacheStats(LineageCacheStatsCollection lcsc) {
StringBuilder sb = new StringBuilder();
sb.append(String.format("LinCache hits (Mem/FS/Del):\t%d/%d/%d.\n",
lcsc.numHitsMem, lcsc.numHitsFS, lcsc.numHitsDel));
sb.append(String.format("LinCache MultiLvl (Ins/SB/Fn):\t%d/%d/%d.\n",
lcsc.numHitsInst, lcsc.numHitsSB, lcsc.numHitsFunc));
sb.append(String.format("LinCache writes (Mem/FS/Del):\t%d/%d/%d.\n",
lcsc.numWritesMem, lcsc.numWritesFS, lcsc.numMemDel));
return sb.toString();
}
private static String displayMultiTenantStats(MultiTenantStatsCollection mtsc) {
StringBuilder sb = new StringBuilder();
sb.append(displayFedLookupTableStats(mtsc.fLTGetCount, mtsc.fLTEntryCount, mtsc.fLTGetTime));
sb.append(displayFedReuseReadStats(mtsc.reuseReadHits, mtsc.reuseReadBytes));
sb.append(displayFedPutLineageStats(mtsc.putLineageCount, mtsc.putLineageItems));
sb.append(displayFedSerializationReuseStats(mtsc.serializationReuseCount, mtsc.serializationReuseBytes));
return sb.toString();
}
@SuppressWarnings("unused")
private static String displayHeavyHitters(HashMap<String, Pair<Long, Double>> heavyHitters) {
return displayHeavyHitters(heavyHitters, 10);
}
private static String displayFedTransfer() {
StringBuilder sb = new StringBuilder();
sb.append("Transferred bytes (Host/Datetime/ByteAmount):\n");
for (var entry: coordinatorsTrafficBytes) {
sb.append(String.format("%s/%s/%d.\n", entry.getCoordinatorHostId(), entry.timestamp, entry.byteAmount));
}
return sb.toString();
}
private static String displayHeavyHitters(HashMap<String, Pair<Long, Double>> heavyHitters, int num) {
StringBuilder sb = new StringBuilder();
@SuppressWarnings("unchecked")
Entry<String, Pair<Long, Double>>[] hhArr = heavyHitters.entrySet().toArray(new Entry[0]);
Arrays.sort(hhArr, new Comparator<Entry<String, Pair<Long, Double>>>() {
public int compare(Entry<String, Pair<Long, Double>> e1, Entry<String, Pair<Long, Double>> e2) {
return e1.getValue().getRight().compareTo(e2.getValue().getRight());
}
});
sb.append("Heavy hitter instructions:\n");
final String numCol = "#";
final String instCol = "Instruction";
final String timeSCol = "Time(s)";
final String countCol = "Count";
int numHittersToDisplay = Math.min(num, hhArr.length);
int maxNumLen = String.valueOf(numHittersToDisplay).length();
int maxInstLen = instCol.length();
int maxTimeSLen = timeSCol.length();
int maxCountLen = countCol.length();
DecimalFormat sFormat = new DecimalFormat("#,##0.000");
for (int counter = 0; counter < numHittersToDisplay; counter++) {
Entry<String, Pair<Long, Double>> hh = hhArr[hhArr.length - 1 - counter];
String instruction = hh.getKey();
maxInstLen = Math.max(maxInstLen, instruction.length());
String timeString = sFormat.format(hh.getValue().getRight());
maxTimeSLen = Math.max(maxTimeSLen, timeString.length());
maxCountLen = Math.max(maxCountLen, String.valueOf(hh.getValue().getLeft()).length());
}
maxInstLen = Math.min(maxInstLen, DMLScript.STATISTICS_MAX_WRAP_LEN);
sb.append(String.format( " %" + maxNumLen + "s %-" + maxInstLen + "s %"
+ maxTimeSLen + "s %" + maxCountLen + "s", numCol, instCol, timeSCol, countCol));
sb.append("\n");
for (int counter = 0; counter < numHittersToDisplay; counter++) {
String instruction = hhArr[hhArr.length - 1 - counter].getKey();
String [] wrappedInstruction = Statistics.wrap(instruction, maxInstLen);
String timeSString = sFormat.format(hhArr[hhArr.length - 1 - counter].getValue().getRight());
long count = hhArr[hhArr.length - 1 - counter].getValue().getLeft();
int numLines = wrappedInstruction.length;
for(int wrapIter = 0; wrapIter < numLines; wrapIter++) {
String instStr = (wrapIter < wrappedInstruction.length) ? wrappedInstruction[wrapIter] : "";
if(wrapIter == 0) {
sb.append(String.format(
" %" + maxNumLen + "d %-" + maxInstLen + "s %" + maxTimeSLen + "s %"
+ maxCountLen + "d", (counter + 1), instStr, timeSString, count));
}
else {
sb.append(String.format(
" %" + maxNumLen + "s %-" + maxInstLen + "s %" + maxTimeSLen + "s %"
+ maxCountLen + "s", "", instStr, "", ""));
}
sb.append("\n");
}
}
return sb.toString();
}
private static FedStatsCollection collectFedStats() {
Future<FederatedResponse>[] responses = getFederatedResponses();
FedStatsCollection aggFedStats = new FedStatsCollection();
final int timeout = ConfigurationManager.getFederatedTimeout();
for(Future<FederatedResponse> res : responses) {
try {
Object[] tmp = timeout > 0 ? //
res.get(timeout, TimeUnit.SECONDS).getData() : //
res.get().getData();
if(tmp[0] instanceof FedStatsCollection)
aggFedStats.aggregate((FedStatsCollection)tmp[0]);
} catch(Exception e) {
throw new DMLRuntimeException("Exception of type " + e.getClass().toString()
+ " thrown while " + "getting the federated stats of the federated response: ", e);
}
}
return aggFedStats;
}
private static Future<FederatedResponse>[] getFederatedResponses() {
List<Future<FederatedResponse>> ret = new ArrayList<>();
for(Pair<String, Integer> fedAddr : _fedWorkerAddresses) {
InetSocketAddress isa = new InetSocketAddress(fedAddr.getLeft(), fedAddr.getRight());
FederatedRequest frUDF = new FederatedRequest(RequestType.EXEC_UDF, -1,
new FedStatsCollectFunction());
try {
ret.add(FederatedData.executeFederatedOperation(isa, frUDF));
} catch(DMLRuntimeException dre) {
// silently ignore this exception --> caused by offline federated workers
} catch (Exception e) {
System.out.println("Exeption of type " + e.getClass().getName()
+ " thrown while getting stats from federated worker: " + e.getMessage());
}
}
@SuppressWarnings("unchecked")
Future<FederatedResponse>[] retArr = ret.toArray(new Future[0]);
return retArr;
}
public static long getFedLookupTableGetCount() {
return fedLookupTableGetCount.longValue();
}
public static List<TrafficModel> getCoordinatorsTrafficBytes() {
var result = new ArrayList<>(coordinatorsTrafficBytes);
coordinatorsTrafficBytes.clear();
return result;
}
public static List<EventModel> getWorkerEvents() {
var result = new ArrayList<>(workerEvents);
workerEvents.clear();
return result;
}
public static List<RequestModel> getWorkerRequests() {
return new ArrayList<>(workerFederatedRequests.values());
}
public static List<DataObjectModel> getWorkerDataObjects() {
return new ArrayList<>(workerDataObjects.values());
}
public synchronized static void addEvent(EventModel event) {
// synchronized, because multiple requests can be handled concurrently
workerEvents.add(event);
}
public static void addWorkerRequest(RequestModel request) {
if (!workerFederatedRequests.containsKey(request.type)) {
workerFederatedRequests.put(request.type, request);
}
workerFederatedRequests.get(request.type).count++;
}
public static void addDataObject(DataObjectModel dataObject) {
workerDataObjects.put(dataObject.varName, dataObject);
}
public static void removeDataObjects() {
workerDataObjects.clear();
}
public static UtilizationModel getUtilization() {
var osMXBean = ManagementFactory.getOperatingSystemMXBean();
var memoryMXBean = ManagementFactory.getMemoryMXBean();
double cpuUsage = osMXBean.getSystemLoadAverage();
double memoryUsage = 0.0;
double maxMemory = (double)memoryMXBean.getHeapMemoryUsage().getMax() / 1073741824;
double usedMemory = (double)memoryMXBean.getHeapMemoryUsage().getUsed() / 1073741824;
memoryUsage = (usedMemory / maxMemory) * 100;
return new UtilizationModel(cpuUsage, memoryUsage);
}
public static long getFedLookupTableGetTime() {
return fedLookupTableGetTime.longValue();
}
public static long getFedLookupTableEntryCount() {
return fedLookupTableEntryCount.longValue();
}
public static long getFedReuseReadHitCount() {
return fedReuseReadHitCount.longValue();
}
public static long getFedReuseReadBytesCount() {
return fedReuseReadBytesCount.longValue();
}
public static long getFedPutLineageCount() {
return fedPutLineageCount.longValue();
}
public static long getFedPutLineageItems() {
return fedPutLineageItems.longValue();
}
public static long getFedSerializationReuseCount() {
return fedSerializationReuseCount.longValue();
}
public static long getFedSerializationReuseBytes() {
return fedSerializationReuseBytes.longValue();
}
public static void incFedLookupTableGetCount() {
fedLookupTableGetCount.increment();
}
public static void incFedLookupTableGetTime(long time) {
fedLookupTableGetTime.add(time);
}
public static void incFedLookupTableEntryCount() {
fedLookupTableEntryCount.increment();
}
public static void incFedReuseReadHitCount() {
fedReuseReadHitCount.increment();
}
public static void incFedReuseReadBytesCount(CacheableData<?> data) {
fedReuseReadBytesCount.add(data.getDataSize());
}
public static void incFedReuseReadBytesCount(CacheBlock<?> cb) {
fedReuseReadBytesCount.add(cb.getInMemorySize());
}
public static void aggFedPutLineage(String serializedLineage) {
fedPutLineageCount.increment();
fedPutLineageItems.add(serializedLineage.lines().count());
}
public static void aggFedSerializationReuse(long bytes) {
fedSerializationReuseCount.increment();
fedSerializationReuseBytes.add(bytes);
}
public static String displayFedLookupTableStats() {
return displayFedLookupTableStats(fedLookupTableGetCount.longValue(),
fedLookupTableEntryCount.longValue(), fedLookupTableGetTime.doubleValue() / 1000000000);
}
public static String displayFedLookupTableStats(long fltGetCount, long fltEntryCount, double fltGetTime) {
if(fltGetCount > 0) {
return InstructionUtils.concatStrings(
"Fed LookupTable (Get, Entries):\t",
String.valueOf(fltGetCount), "/", String.valueOf(fltEntryCount),".\n");
}
return "";
}
public static String displayFedReuseReadStats() {
return displayFedReuseReadStats(
fedReuseReadHitCount.longValue(),
fedReuseReadBytesCount.longValue());
}
public static String displayFedReuseReadStats(long rrHits, long rrBytes) {
if(rrHits > 0) {
return InstructionUtils.concatStrings(
"Fed ReuseRead (Hits, Bytes):\t",
String.valueOf(rrHits), "/", String.valueOf(rrBytes), ".\n");
}
return "";
}
public static String displayFedPutLineageStats() {
return displayFedPutLineageStats(fedPutLineageCount.longValue(),
fedPutLineageItems.longValue());
}
public static String displayFedPutLineageStats(long plCount, long plItems) {
if(plCount > 0) {
return InstructionUtils.concatStrings(
"Fed PutLineage (Count, Items):\t",
String.valueOf(plCount), "/", String.valueOf(plItems), ".\n");
}
return "";
}
public static String displayFedSerializationReuseStats() {
return displayFedSerializationReuseStats(fedSerializationReuseCount.longValue(),
fedSerializationReuseBytes.longValue());
}
public static String displayFedSerializationReuseStats(long srCount, long srBytes) {
if(srCount > 0) {
return InstructionUtils.concatStrings(
"Fed SerialReuse (Count, Bytes):\t",
String.valueOf(srCount), "/", String.valueOf(srBytes), ".\n");
}
return "";
}
public static class FedStatsCollectFunction extends FederatedUDF {
private static final long serialVersionUID = 1L;
public FedStatsCollectFunction() {
super(new long[] { });
}
@Override
public FederatedResponse execute(ExecutionContext ec, Data... data) {
FedStatsCollection fedStats = new FedStatsCollection();
fedStats.collectStats();
return new FederatedResponse(FederatedResponse.ResponseType.SUCCESS, fedStats);
}
@Override
public Pair<String, LineageItem> getLineageItem(ExecutionContext ec) {
return null;
}
}
public static class FedStatsCollection implements Serializable {
// TODO fix this class to use shallow pointers.
private static final long serialVersionUID = 1L;
private CacheStatsCollection cacheStats = new CacheStatsCollection();
public double jitCompileTime = 0;
public UtilizationModel utilization = new UtilizationModel(0.0, 0.0);
private GCStatsCollection gcStats = new GCStatsCollection();
private LineageCacheStatsCollection linCacheStats = new LineageCacheStatsCollection();
private MultiTenantStatsCollection mtStats = new MultiTenantStatsCollection();
public HashMap<String, Pair<Long, Double>> heavyHitters = new HashMap<>();
public List<TrafficModel> coordinatorsTrafficBytes = new ArrayList<>();
public List<EventModel> workerEvents = new ArrayList<>();
public List<DataObjectModel> workerDataObjects = new ArrayList<>();
public List<RequestModel> workerRequests = new ArrayList<>();
private void collectStats() {
cacheStats.collectStats();
jitCompileTime = ((double)Statistics.getJITCompileTime()) / 1000; // in sec
utilization = getUtilization();
gcStats.collectStats();
linCacheStats.collectStats();
mtStats.collectStats();
heavyHitters = Statistics.getHeavyHittersHashMap();
coordinatorsTrafficBytes = getCoordinatorsTrafficBytes();
workerEvents = getWorkerEvents();
workerDataObjects = getWorkerDataObjects();
workerRequests = getWorkerRequests();
}
public void aggregate(FedStatsCollection that) {
cacheStats.aggregate(that.cacheStats);
jitCompileTime += that.jitCompileTime;
utilization = that.utilization;
gcStats.aggregate(that.gcStats);
linCacheStats.aggregate(that.linCacheStats);
mtStats.aggregate(that.mtStats);
that.heavyHitters.forEach(
(key, value) -> heavyHitters.merge(key, value, (v1, v2) ->
new ImmutablePair<>(v1.getLeft() + v2.getLeft(), v1.getRight() + v2.getRight()))
);
coordinatorsTrafficBytes.addAll(that.coordinatorsTrafficBytes);
workerEvents.addAll(that.workerEvents);
workerDataObjects.addAll(that.workerDataObjects);
workerRequests.addAll(that.workerRequests);
}
protected static class CacheStatsCollection implements Serializable {
private static final long serialVersionUID = 1L;
private long memHits = 0;
private long linHits = 0;
private long fsBuffHits = 0;
private long fsHits = 0;
private long hdfsHits = 0;
private long linWrites = 0;
private long fsBuffWrites = 0;
private long fsWrites = 0;
private long hdfsWrites = 0;
private double acqRTime = 0;
private double acqMTime = 0;
private double rlsTime = 0;
private double expTime = 0;
private void collectStats() {
memHits = CacheStatistics.getMemHits();
linHits = CacheStatistics.getLinHits();
fsBuffHits = CacheStatistics.getFSBuffHits();
fsHits = CacheStatistics.getFSHits();
hdfsHits = CacheStatistics.getHDFSHits();
linWrites = CacheStatistics.getLinWrites();
fsBuffWrites = CacheStatistics.getFSBuffWrites();
fsWrites = CacheStatistics.getFSWrites();
hdfsWrites = CacheStatistics.getHDFSWrites();
acqRTime = ((double) CacheStatistics.getAcquireRTime()) / 1000000000; // in sec
acqMTime = ((double) CacheStatistics.getAcquireMTime()) / 1000000000; // in sec
rlsTime = ((double) CacheStatistics.getReleaseTime()) / 1000000000; // in sec
expTime = ((double) CacheStatistics.getExportTime()) / 1000000000; // in sec
}
private void aggregate(CacheStatsCollection that) {
memHits += that.memHits;
linHits += that.linHits;
fsBuffHits += that.fsBuffHits;
fsHits += that.fsHits;
hdfsHits += that.hdfsHits;
linWrites += that.linWrites;
fsBuffWrites += that.fsBuffWrites;
fsWrites += that.fsWrites;
hdfsWrites += that.hdfsWrites;
acqRTime += that.acqRTime;
acqMTime += that.acqMTime;
rlsTime += that.rlsTime;
expTime += that.expTime;
}
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("CacheStatsCollection:");
sb.append("\tmemHits:" + memHits);
sb.append("\tlinHits:" + linHits);
sb.append("\tfsBuffHits:" + fsBuffHits);
sb.append("\tfsHits:" + fsHits);
sb.append("\thdfsHits:" + hdfsHits);
sb.append("\tlinWrites:" + linWrites);
sb.append("\tfsBuffWrites:" + fsBuffWrites);
sb.append("\tfsWrites:" + fsWrites);
sb.append("\thdfsWrites:" + hdfsWrites);
sb.append("\tacqRTime:" + acqRTime);
sb.append("\tacqMTime:" + acqMTime);
sb.append("\trlsTime:" + rlsTime);
sb.append("\texpTime:" + expTime);
return sb.toString();
}
}
protected static class GCStatsCollection implements Serializable {
private static final long serialVersionUID = 1L;
private void collectStats() {
gcCount = Statistics.getJVMgcCount();
gcTime = ((double)Statistics.getJVMgcTime()) / 1000; // in sec
}
private void aggregate(GCStatsCollection that) {
gcCount += that.gcCount;
gcTime += that.gcTime;
}
private long gcCount = 0;
private double gcTime = 0;
}
protected static class LineageCacheStatsCollection implements Serializable {
private static final long serialVersionUID = 1L;
private long numHitsMem = 0;
private long numHitsFS = 0;
private long numHitsDel = 0;
private long numHitsInst = 0;
private long numHitsSB = 0;
private long numHitsFunc = 0;
private long numWritesMem = 0;
private long numWritesFS = 0;
private long numMemDel = 0;
private void collectStats() {
numHitsMem = LineageCacheStatistics.getMemHits();
numHitsFS = LineageCacheStatistics.getFSHits();
numHitsDel = LineageCacheStatistics.getDelHits();
numHitsInst = LineageCacheStatistics.getInstHits();
numHitsSB = LineageCacheStatistics.getSBHits();
numHitsFunc = LineageCacheStatistics.getFuncHits();
numWritesMem = LineageCacheStatistics.getMemWrites();
numWritesFS = LineageCacheStatistics.getFSWrites();
numMemDel = LineageCacheStatistics.getMemDeletes();
}
private void aggregate(LineageCacheStatsCollection that) {
numHitsMem += that.numHitsMem;
numHitsFS += that.numHitsFS;
numHitsDel += that.numHitsDel;
numHitsInst += that.numHitsInst;
numHitsSB += that.numHitsSB;
numHitsFunc += that.numHitsFunc;
numWritesMem += that.numWritesMem;
numWritesFS += that.numWritesFS;
numMemDel += that.numMemDel;
}
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("numHitsMem: " + numHitsMem);
sb.append("\tnumHitsFS: " + numHitsFS);
sb.append("\tnumHitsDel: " + numHitsDel);
sb.append("\tnumHitsInst: " + numHitsInst);
sb.append("\tnumHitsSB: " + numHitsSB);
sb.append("\tnumHitsFunc: " + numHitsFunc);
sb.append("\tnumWritesMem: " + numWritesMem);
sb.append("\tnumWritesFS: " + numWritesFS);
sb.append("\tnumMemDel: " + numMemDel);
return sb.toString();
}
}
protected static class MultiTenantStatsCollection implements Serializable {
private static final long serialVersionUID = 1L;
private long fLTGetCount = 0;
private double fLTGetTime = 0;
private long fLTEntryCount = 0;
private long reuseReadHits = 0;
private long reuseReadBytes = 0;
private long putLineageCount = 0;
private long putLineageItems = 0;
private long serializationReuseCount = 0;
private long serializationReuseBytes = 0;
private void collectStats() {
fLTGetCount = getFedLookupTableGetCount();
fLTGetTime = ((double)getFedLookupTableGetTime()) / 1000000000; // in sec
fLTEntryCount = getFedLookupTableEntryCount();
reuseReadHits = getFedReuseReadHitCount();
reuseReadBytes = getFedReuseReadBytesCount();
putLineageCount = getFedPutLineageCount();
putLineageItems = getFedPutLineageItems();
serializationReuseCount = getFedSerializationReuseCount();
serializationReuseBytes = getFedSerializationReuseBytes();
}
private void aggregate(MultiTenantStatsCollection that) {
fLTGetCount += that.fLTGetCount;
fLTGetTime += that.fLTGetTime;
fLTEntryCount += that.fLTEntryCount;
reuseReadHits += that.reuseReadHits;
reuseReadBytes += that.reuseReadBytes;
putLineageCount += that.putLineageCount;
putLineageItems += that.putLineageItems;
serializationReuseCount += that.serializationReuseCount;
serializationReuseBytes += that.serializationReuseBytes;
}
}
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("\nFedStatsCollection: ");
sb.append("\ncacheStats " + cacheStats);
sb.append("\njit " + jitCompileTime);
sb.append("\nutilization " + utilization);
sb.append("\ngcStats " + gcStats);
sb.append("\nlinCacheStats " + linCacheStats);
sb.append("\nmtStats " + mtStats);
sb.append("\nheavyHitters " + heavyHitters);
sb.append("\ncoordinatorsTrafficBytes " + coordinatorsTrafficBytes);
sb.append("\nworkerEvents " + workerEvents);
sb.append("\nworkerDataObjects " + workerDataObjects);
sb.append("\nworkerRequests " + workerRequests);
sb.append("\n\n");
return sb.toString();
}
}
}
|
googleapis/google-cloud-java | 37,143 | java-datalineage/google-cloud-datalineage/src/main/java/com/google/cloud/datacatalog/lineage/v1/stub/GrpcLineageStub.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.cloud.datacatalog.lineage.v1.stub;
import static com.google.cloud.datacatalog.lineage.v1.LineageClient.BatchSearchLinkProcessesPagedResponse;
import static com.google.cloud.datacatalog.lineage.v1.LineageClient.ListLineageEventsPagedResponse;
import static com.google.cloud.datacatalog.lineage.v1.LineageClient.ListProcessesPagedResponse;
import static com.google.cloud.datacatalog.lineage.v1.LineageClient.ListRunsPagedResponse;
import static com.google.cloud.datacatalog.lineage.v1.LineageClient.SearchLinksPagedResponse;
import com.google.api.gax.core.BackgroundResource;
import com.google.api.gax.core.BackgroundResourceAggregation;
import com.google.api.gax.grpc.GrpcCallSettings;
import com.google.api.gax.grpc.GrpcStubCallableFactory;
import com.google.api.gax.rpc.ClientContext;
import com.google.api.gax.rpc.OperationCallable;
import com.google.api.gax.rpc.RequestParamsBuilder;
import com.google.api.gax.rpc.UnaryCallable;
import com.google.cloud.datacatalog.lineage.v1.BatchSearchLinkProcessesRequest;
import com.google.cloud.datacatalog.lineage.v1.BatchSearchLinkProcessesResponse;
import com.google.cloud.datacatalog.lineage.v1.CreateLineageEventRequest;
import com.google.cloud.datacatalog.lineage.v1.CreateProcessRequest;
import com.google.cloud.datacatalog.lineage.v1.CreateRunRequest;
import com.google.cloud.datacatalog.lineage.v1.DeleteLineageEventRequest;
import com.google.cloud.datacatalog.lineage.v1.DeleteProcessRequest;
import com.google.cloud.datacatalog.lineage.v1.DeleteRunRequest;
import com.google.cloud.datacatalog.lineage.v1.GetLineageEventRequest;
import com.google.cloud.datacatalog.lineage.v1.GetProcessRequest;
import com.google.cloud.datacatalog.lineage.v1.GetRunRequest;
import com.google.cloud.datacatalog.lineage.v1.LineageEvent;
import com.google.cloud.datacatalog.lineage.v1.ListLineageEventsRequest;
import com.google.cloud.datacatalog.lineage.v1.ListLineageEventsResponse;
import com.google.cloud.datacatalog.lineage.v1.ListProcessesRequest;
import com.google.cloud.datacatalog.lineage.v1.ListProcessesResponse;
import com.google.cloud.datacatalog.lineage.v1.ListRunsRequest;
import com.google.cloud.datacatalog.lineage.v1.ListRunsResponse;
import com.google.cloud.datacatalog.lineage.v1.OperationMetadata;
import com.google.cloud.datacatalog.lineage.v1.Process;
import com.google.cloud.datacatalog.lineage.v1.ProcessOpenLineageRunEventRequest;
import com.google.cloud.datacatalog.lineage.v1.ProcessOpenLineageRunEventResponse;
import com.google.cloud.datacatalog.lineage.v1.Run;
import com.google.cloud.datacatalog.lineage.v1.SearchLinksRequest;
import com.google.cloud.datacatalog.lineage.v1.SearchLinksResponse;
import com.google.cloud.datacatalog.lineage.v1.UpdateProcessRequest;
import com.google.cloud.datacatalog.lineage.v1.UpdateRunRequest;
import com.google.longrunning.Operation;
import com.google.longrunning.stub.GrpcOperationsStub;
import com.google.protobuf.Empty;
import io.grpc.MethodDescriptor;
import io.grpc.protobuf.ProtoUtils;
import java.io.IOException;
import java.util.concurrent.TimeUnit;
import javax.annotation.Generated;
// AUTO-GENERATED DOCUMENTATION AND CLASS.
/**
* gRPC stub implementation for the Lineage service API.
*
* <p>This class is for advanced usage and reflects the underlying API directly.
*/
@Generated("by gapic-generator-java")
public class GrpcLineageStub extends LineageStub {
private static final MethodDescriptor<
ProcessOpenLineageRunEventRequest, ProcessOpenLineageRunEventResponse>
processOpenLineageRunEventMethodDescriptor =
MethodDescriptor
.<ProcessOpenLineageRunEventRequest, ProcessOpenLineageRunEventResponse>newBuilder()
.setType(MethodDescriptor.MethodType.UNARY)
.setFullMethodName(
"google.cloud.datacatalog.lineage.v1.Lineage/ProcessOpenLineageRunEvent")
.setRequestMarshaller(
ProtoUtils.marshaller(ProcessOpenLineageRunEventRequest.getDefaultInstance()))
.setResponseMarshaller(
ProtoUtils.marshaller(ProcessOpenLineageRunEventResponse.getDefaultInstance()))
.setSampledToLocalTracing(true)
.build();
private static final MethodDescriptor<CreateProcessRequest, Process>
createProcessMethodDescriptor =
MethodDescriptor.<CreateProcessRequest, Process>newBuilder()
.setType(MethodDescriptor.MethodType.UNARY)
.setFullMethodName("google.cloud.datacatalog.lineage.v1.Lineage/CreateProcess")
.setRequestMarshaller(
ProtoUtils.marshaller(CreateProcessRequest.getDefaultInstance()))
.setResponseMarshaller(ProtoUtils.marshaller(Process.getDefaultInstance()))
.setSampledToLocalTracing(true)
.build();
private static final MethodDescriptor<UpdateProcessRequest, Process>
updateProcessMethodDescriptor =
MethodDescriptor.<UpdateProcessRequest, Process>newBuilder()
.setType(MethodDescriptor.MethodType.UNARY)
.setFullMethodName("google.cloud.datacatalog.lineage.v1.Lineage/UpdateProcess")
.setRequestMarshaller(
ProtoUtils.marshaller(UpdateProcessRequest.getDefaultInstance()))
.setResponseMarshaller(ProtoUtils.marshaller(Process.getDefaultInstance()))
.setSampledToLocalTracing(true)
.build();
private static final MethodDescriptor<GetProcessRequest, Process> getProcessMethodDescriptor =
MethodDescriptor.<GetProcessRequest, Process>newBuilder()
.setType(MethodDescriptor.MethodType.UNARY)
.setFullMethodName("google.cloud.datacatalog.lineage.v1.Lineage/GetProcess")
.setRequestMarshaller(ProtoUtils.marshaller(GetProcessRequest.getDefaultInstance()))
.setResponseMarshaller(ProtoUtils.marshaller(Process.getDefaultInstance()))
.setSampledToLocalTracing(true)
.build();
private static final MethodDescriptor<ListProcessesRequest, ListProcessesResponse>
listProcessesMethodDescriptor =
MethodDescriptor.<ListProcessesRequest, ListProcessesResponse>newBuilder()
.setType(MethodDescriptor.MethodType.UNARY)
.setFullMethodName("google.cloud.datacatalog.lineage.v1.Lineage/ListProcesses")
.setRequestMarshaller(
ProtoUtils.marshaller(ListProcessesRequest.getDefaultInstance()))
.setResponseMarshaller(
ProtoUtils.marshaller(ListProcessesResponse.getDefaultInstance()))
.setSampledToLocalTracing(true)
.build();
private static final MethodDescriptor<DeleteProcessRequest, Operation>
deleteProcessMethodDescriptor =
MethodDescriptor.<DeleteProcessRequest, Operation>newBuilder()
.setType(MethodDescriptor.MethodType.UNARY)
.setFullMethodName("google.cloud.datacatalog.lineage.v1.Lineage/DeleteProcess")
.setRequestMarshaller(
ProtoUtils.marshaller(DeleteProcessRequest.getDefaultInstance()))
.setResponseMarshaller(ProtoUtils.marshaller(Operation.getDefaultInstance()))
.setSampledToLocalTracing(true)
.build();
private static final MethodDescriptor<CreateRunRequest, Run> createRunMethodDescriptor =
MethodDescriptor.<CreateRunRequest, Run>newBuilder()
.setType(MethodDescriptor.MethodType.UNARY)
.setFullMethodName("google.cloud.datacatalog.lineage.v1.Lineage/CreateRun")
.setRequestMarshaller(ProtoUtils.marshaller(CreateRunRequest.getDefaultInstance()))
.setResponseMarshaller(ProtoUtils.marshaller(Run.getDefaultInstance()))
.setSampledToLocalTracing(true)
.build();
private static final MethodDescriptor<UpdateRunRequest, Run> updateRunMethodDescriptor =
MethodDescriptor.<UpdateRunRequest, Run>newBuilder()
.setType(MethodDescriptor.MethodType.UNARY)
.setFullMethodName("google.cloud.datacatalog.lineage.v1.Lineage/UpdateRun")
.setRequestMarshaller(ProtoUtils.marshaller(UpdateRunRequest.getDefaultInstance()))
.setResponseMarshaller(ProtoUtils.marshaller(Run.getDefaultInstance()))
.setSampledToLocalTracing(true)
.build();
private static final MethodDescriptor<GetRunRequest, Run> getRunMethodDescriptor =
MethodDescriptor.<GetRunRequest, Run>newBuilder()
.setType(MethodDescriptor.MethodType.UNARY)
.setFullMethodName("google.cloud.datacatalog.lineage.v1.Lineage/GetRun")
.setRequestMarshaller(ProtoUtils.marshaller(GetRunRequest.getDefaultInstance()))
.setResponseMarshaller(ProtoUtils.marshaller(Run.getDefaultInstance()))
.setSampledToLocalTracing(true)
.build();
private static final MethodDescriptor<ListRunsRequest, ListRunsResponse>
listRunsMethodDescriptor =
MethodDescriptor.<ListRunsRequest, ListRunsResponse>newBuilder()
.setType(MethodDescriptor.MethodType.UNARY)
.setFullMethodName("google.cloud.datacatalog.lineage.v1.Lineage/ListRuns")
.setRequestMarshaller(ProtoUtils.marshaller(ListRunsRequest.getDefaultInstance()))
.setResponseMarshaller(ProtoUtils.marshaller(ListRunsResponse.getDefaultInstance()))
.setSampledToLocalTracing(true)
.build();
private static final MethodDescriptor<DeleteRunRequest, Operation> deleteRunMethodDescriptor =
MethodDescriptor.<DeleteRunRequest, Operation>newBuilder()
.setType(MethodDescriptor.MethodType.UNARY)
.setFullMethodName("google.cloud.datacatalog.lineage.v1.Lineage/DeleteRun")
.setRequestMarshaller(ProtoUtils.marshaller(DeleteRunRequest.getDefaultInstance()))
.setResponseMarshaller(ProtoUtils.marshaller(Operation.getDefaultInstance()))
.setSampledToLocalTracing(true)
.build();
private static final MethodDescriptor<CreateLineageEventRequest, LineageEvent>
createLineageEventMethodDescriptor =
MethodDescriptor.<CreateLineageEventRequest, LineageEvent>newBuilder()
.setType(MethodDescriptor.MethodType.UNARY)
.setFullMethodName("google.cloud.datacatalog.lineage.v1.Lineage/CreateLineageEvent")
.setRequestMarshaller(
ProtoUtils.marshaller(CreateLineageEventRequest.getDefaultInstance()))
.setResponseMarshaller(ProtoUtils.marshaller(LineageEvent.getDefaultInstance()))
.setSampledToLocalTracing(true)
.build();
private static final MethodDescriptor<GetLineageEventRequest, LineageEvent>
getLineageEventMethodDescriptor =
MethodDescriptor.<GetLineageEventRequest, LineageEvent>newBuilder()
.setType(MethodDescriptor.MethodType.UNARY)
.setFullMethodName("google.cloud.datacatalog.lineage.v1.Lineage/GetLineageEvent")
.setRequestMarshaller(
ProtoUtils.marshaller(GetLineageEventRequest.getDefaultInstance()))
.setResponseMarshaller(ProtoUtils.marshaller(LineageEvent.getDefaultInstance()))
.setSampledToLocalTracing(true)
.build();
private static final MethodDescriptor<ListLineageEventsRequest, ListLineageEventsResponse>
listLineageEventsMethodDescriptor =
MethodDescriptor.<ListLineageEventsRequest, ListLineageEventsResponse>newBuilder()
.setType(MethodDescriptor.MethodType.UNARY)
.setFullMethodName("google.cloud.datacatalog.lineage.v1.Lineage/ListLineageEvents")
.setRequestMarshaller(
ProtoUtils.marshaller(ListLineageEventsRequest.getDefaultInstance()))
.setResponseMarshaller(
ProtoUtils.marshaller(ListLineageEventsResponse.getDefaultInstance()))
.setSampledToLocalTracing(true)
.build();
private static final MethodDescriptor<DeleteLineageEventRequest, Empty>
deleteLineageEventMethodDescriptor =
MethodDescriptor.<DeleteLineageEventRequest, Empty>newBuilder()
.setType(MethodDescriptor.MethodType.UNARY)
.setFullMethodName("google.cloud.datacatalog.lineage.v1.Lineage/DeleteLineageEvent")
.setRequestMarshaller(
ProtoUtils.marshaller(DeleteLineageEventRequest.getDefaultInstance()))
.setResponseMarshaller(ProtoUtils.marshaller(Empty.getDefaultInstance()))
.setSampledToLocalTracing(true)
.build();
private static final MethodDescriptor<SearchLinksRequest, SearchLinksResponse>
searchLinksMethodDescriptor =
MethodDescriptor.<SearchLinksRequest, SearchLinksResponse>newBuilder()
.setType(MethodDescriptor.MethodType.UNARY)
.setFullMethodName("google.cloud.datacatalog.lineage.v1.Lineage/SearchLinks")
.setRequestMarshaller(ProtoUtils.marshaller(SearchLinksRequest.getDefaultInstance()))
.setResponseMarshaller(
ProtoUtils.marshaller(SearchLinksResponse.getDefaultInstance()))
.setSampledToLocalTracing(true)
.build();
private static final MethodDescriptor<
BatchSearchLinkProcessesRequest, BatchSearchLinkProcessesResponse>
batchSearchLinkProcessesMethodDescriptor =
MethodDescriptor
.<BatchSearchLinkProcessesRequest, BatchSearchLinkProcessesResponse>newBuilder()
.setType(MethodDescriptor.MethodType.UNARY)
.setFullMethodName(
"google.cloud.datacatalog.lineage.v1.Lineage/BatchSearchLinkProcesses")
.setRequestMarshaller(
ProtoUtils.marshaller(BatchSearchLinkProcessesRequest.getDefaultInstance()))
.setResponseMarshaller(
ProtoUtils.marshaller(BatchSearchLinkProcessesResponse.getDefaultInstance()))
.setSampledToLocalTracing(true)
.build();
private final UnaryCallable<ProcessOpenLineageRunEventRequest, ProcessOpenLineageRunEventResponse>
processOpenLineageRunEventCallable;
private final UnaryCallable<CreateProcessRequest, Process> createProcessCallable;
private final UnaryCallable<UpdateProcessRequest, Process> updateProcessCallable;
private final UnaryCallable<GetProcessRequest, Process> getProcessCallable;
private final UnaryCallable<ListProcessesRequest, ListProcessesResponse> listProcessesCallable;
private final UnaryCallable<ListProcessesRequest, ListProcessesPagedResponse>
listProcessesPagedCallable;
private final UnaryCallable<DeleteProcessRequest, Operation> deleteProcessCallable;
private final OperationCallable<DeleteProcessRequest, Empty, OperationMetadata>
deleteProcessOperationCallable;
private final UnaryCallable<CreateRunRequest, Run> createRunCallable;
private final UnaryCallable<UpdateRunRequest, Run> updateRunCallable;
private final UnaryCallable<GetRunRequest, Run> getRunCallable;
private final UnaryCallable<ListRunsRequest, ListRunsResponse> listRunsCallable;
private final UnaryCallable<ListRunsRequest, ListRunsPagedResponse> listRunsPagedCallable;
private final UnaryCallable<DeleteRunRequest, Operation> deleteRunCallable;
private final OperationCallable<DeleteRunRequest, Empty, OperationMetadata>
deleteRunOperationCallable;
private final UnaryCallable<CreateLineageEventRequest, LineageEvent> createLineageEventCallable;
private final UnaryCallable<GetLineageEventRequest, LineageEvent> getLineageEventCallable;
private final UnaryCallable<ListLineageEventsRequest, ListLineageEventsResponse>
listLineageEventsCallable;
private final UnaryCallable<ListLineageEventsRequest, ListLineageEventsPagedResponse>
listLineageEventsPagedCallable;
private final UnaryCallable<DeleteLineageEventRequest, Empty> deleteLineageEventCallable;
private final UnaryCallable<SearchLinksRequest, SearchLinksResponse> searchLinksCallable;
private final UnaryCallable<SearchLinksRequest, SearchLinksPagedResponse>
searchLinksPagedCallable;
private final UnaryCallable<BatchSearchLinkProcessesRequest, BatchSearchLinkProcessesResponse>
batchSearchLinkProcessesCallable;
private final UnaryCallable<
BatchSearchLinkProcessesRequest, BatchSearchLinkProcessesPagedResponse>
batchSearchLinkProcessesPagedCallable;
private final BackgroundResource backgroundResources;
private final GrpcOperationsStub operationsStub;
private final GrpcStubCallableFactory callableFactory;
public static final GrpcLineageStub create(LineageStubSettings settings) throws IOException {
return new GrpcLineageStub(settings, ClientContext.create(settings));
}
public static final GrpcLineageStub create(ClientContext clientContext) throws IOException {
return new GrpcLineageStub(LineageStubSettings.newBuilder().build(), clientContext);
}
public static final GrpcLineageStub create(
ClientContext clientContext, GrpcStubCallableFactory callableFactory) throws IOException {
return new GrpcLineageStub(
LineageStubSettings.newBuilder().build(), clientContext, callableFactory);
}
/**
* Constructs an instance of GrpcLineageStub, using the given settings. This is protected so that
* it is easy to make a subclass, but otherwise, the static factory methods should be preferred.
*/
protected GrpcLineageStub(LineageStubSettings settings, ClientContext clientContext)
throws IOException {
this(settings, clientContext, new GrpcLineageCallableFactory());
}
/**
* Constructs an instance of GrpcLineageStub, using the given settings. This is protected so that
* it is easy to make a subclass, but otherwise, the static factory methods should be preferred.
*/
protected GrpcLineageStub(
LineageStubSettings settings,
ClientContext clientContext,
GrpcStubCallableFactory callableFactory)
throws IOException {
this.callableFactory = callableFactory;
this.operationsStub = GrpcOperationsStub.create(clientContext, callableFactory);
GrpcCallSettings<ProcessOpenLineageRunEventRequest, ProcessOpenLineageRunEventResponse>
processOpenLineageRunEventTransportSettings =
GrpcCallSettings
.<ProcessOpenLineageRunEventRequest, ProcessOpenLineageRunEventResponse>newBuilder()
.setMethodDescriptor(processOpenLineageRunEventMethodDescriptor)
.setParamsExtractor(
request -> {
RequestParamsBuilder builder = RequestParamsBuilder.create();
builder.add("parent", String.valueOf(request.getParent()));
return builder.build();
})
.build();
GrpcCallSettings<CreateProcessRequest, Process> createProcessTransportSettings =
GrpcCallSettings.<CreateProcessRequest, Process>newBuilder()
.setMethodDescriptor(createProcessMethodDescriptor)
.setParamsExtractor(
request -> {
RequestParamsBuilder builder = RequestParamsBuilder.create();
builder.add("parent", String.valueOf(request.getParent()));
return builder.build();
})
.build();
GrpcCallSettings<UpdateProcessRequest, Process> updateProcessTransportSettings =
GrpcCallSettings.<UpdateProcessRequest, Process>newBuilder()
.setMethodDescriptor(updateProcessMethodDescriptor)
.setParamsExtractor(
request -> {
RequestParamsBuilder builder = RequestParamsBuilder.create();
builder.add("process.name", String.valueOf(request.getProcess().getName()));
return builder.build();
})
.build();
GrpcCallSettings<GetProcessRequest, Process> getProcessTransportSettings =
GrpcCallSettings.<GetProcessRequest, Process>newBuilder()
.setMethodDescriptor(getProcessMethodDescriptor)
.setParamsExtractor(
request -> {
RequestParamsBuilder builder = RequestParamsBuilder.create();
builder.add("name", String.valueOf(request.getName()));
return builder.build();
})
.build();
GrpcCallSettings<ListProcessesRequest, ListProcessesResponse> listProcessesTransportSettings =
GrpcCallSettings.<ListProcessesRequest, ListProcessesResponse>newBuilder()
.setMethodDescriptor(listProcessesMethodDescriptor)
.setParamsExtractor(
request -> {
RequestParamsBuilder builder = RequestParamsBuilder.create();
builder.add("parent", String.valueOf(request.getParent()));
return builder.build();
})
.build();
GrpcCallSettings<DeleteProcessRequest, Operation> deleteProcessTransportSettings =
GrpcCallSettings.<DeleteProcessRequest, Operation>newBuilder()
.setMethodDescriptor(deleteProcessMethodDescriptor)
.setParamsExtractor(
request -> {
RequestParamsBuilder builder = RequestParamsBuilder.create();
builder.add("name", String.valueOf(request.getName()));
return builder.build();
})
.build();
GrpcCallSettings<CreateRunRequest, Run> createRunTransportSettings =
GrpcCallSettings.<CreateRunRequest, Run>newBuilder()
.setMethodDescriptor(createRunMethodDescriptor)
.setParamsExtractor(
request -> {
RequestParamsBuilder builder = RequestParamsBuilder.create();
builder.add("parent", String.valueOf(request.getParent()));
return builder.build();
})
.build();
GrpcCallSettings<UpdateRunRequest, Run> updateRunTransportSettings =
GrpcCallSettings.<UpdateRunRequest, Run>newBuilder()
.setMethodDescriptor(updateRunMethodDescriptor)
.setParamsExtractor(
request -> {
RequestParamsBuilder builder = RequestParamsBuilder.create();
builder.add("run.name", String.valueOf(request.getRun().getName()));
return builder.build();
})
.build();
GrpcCallSettings<GetRunRequest, Run> getRunTransportSettings =
GrpcCallSettings.<GetRunRequest, Run>newBuilder()
.setMethodDescriptor(getRunMethodDescriptor)
.setParamsExtractor(
request -> {
RequestParamsBuilder builder = RequestParamsBuilder.create();
builder.add("name", String.valueOf(request.getName()));
return builder.build();
})
.build();
GrpcCallSettings<ListRunsRequest, ListRunsResponse> listRunsTransportSettings =
GrpcCallSettings.<ListRunsRequest, ListRunsResponse>newBuilder()
.setMethodDescriptor(listRunsMethodDescriptor)
.setParamsExtractor(
request -> {
RequestParamsBuilder builder = RequestParamsBuilder.create();
builder.add("parent", String.valueOf(request.getParent()));
return builder.build();
})
.build();
GrpcCallSettings<DeleteRunRequest, Operation> deleteRunTransportSettings =
GrpcCallSettings.<DeleteRunRequest, Operation>newBuilder()
.setMethodDescriptor(deleteRunMethodDescriptor)
.setParamsExtractor(
request -> {
RequestParamsBuilder builder = RequestParamsBuilder.create();
builder.add("name", String.valueOf(request.getName()));
return builder.build();
})
.build();
GrpcCallSettings<CreateLineageEventRequest, LineageEvent> createLineageEventTransportSettings =
GrpcCallSettings.<CreateLineageEventRequest, LineageEvent>newBuilder()
.setMethodDescriptor(createLineageEventMethodDescriptor)
.setParamsExtractor(
request -> {
RequestParamsBuilder builder = RequestParamsBuilder.create();
builder.add("parent", String.valueOf(request.getParent()));
return builder.build();
})
.build();
GrpcCallSettings<GetLineageEventRequest, LineageEvent> getLineageEventTransportSettings =
GrpcCallSettings.<GetLineageEventRequest, LineageEvent>newBuilder()
.setMethodDescriptor(getLineageEventMethodDescriptor)
.setParamsExtractor(
request -> {
RequestParamsBuilder builder = RequestParamsBuilder.create();
builder.add("name", String.valueOf(request.getName()));
return builder.build();
})
.build();
GrpcCallSettings<ListLineageEventsRequest, ListLineageEventsResponse>
listLineageEventsTransportSettings =
GrpcCallSettings.<ListLineageEventsRequest, ListLineageEventsResponse>newBuilder()
.setMethodDescriptor(listLineageEventsMethodDescriptor)
.setParamsExtractor(
request -> {
RequestParamsBuilder builder = RequestParamsBuilder.create();
builder.add("parent", String.valueOf(request.getParent()));
return builder.build();
})
.build();
GrpcCallSettings<DeleteLineageEventRequest, Empty> deleteLineageEventTransportSettings =
GrpcCallSettings.<DeleteLineageEventRequest, Empty>newBuilder()
.setMethodDescriptor(deleteLineageEventMethodDescriptor)
.setParamsExtractor(
request -> {
RequestParamsBuilder builder = RequestParamsBuilder.create();
builder.add("name", String.valueOf(request.getName()));
return builder.build();
})
.build();
GrpcCallSettings<SearchLinksRequest, SearchLinksResponse> searchLinksTransportSettings =
GrpcCallSettings.<SearchLinksRequest, SearchLinksResponse>newBuilder()
.setMethodDescriptor(searchLinksMethodDescriptor)
.setParamsExtractor(
request -> {
RequestParamsBuilder builder = RequestParamsBuilder.create();
builder.add("parent", String.valueOf(request.getParent()));
return builder.build();
})
.build();
GrpcCallSettings<BatchSearchLinkProcessesRequest, BatchSearchLinkProcessesResponse>
batchSearchLinkProcessesTransportSettings =
GrpcCallSettings
.<BatchSearchLinkProcessesRequest, BatchSearchLinkProcessesResponse>newBuilder()
.setMethodDescriptor(batchSearchLinkProcessesMethodDescriptor)
.setParamsExtractor(
request -> {
RequestParamsBuilder builder = RequestParamsBuilder.create();
builder.add("parent", String.valueOf(request.getParent()));
return builder.build();
})
.build();
this.processOpenLineageRunEventCallable =
callableFactory.createUnaryCallable(
processOpenLineageRunEventTransportSettings,
settings.processOpenLineageRunEventSettings(),
clientContext);
this.createProcessCallable =
callableFactory.createUnaryCallable(
createProcessTransportSettings, settings.createProcessSettings(), clientContext);
this.updateProcessCallable =
callableFactory.createUnaryCallable(
updateProcessTransportSettings, settings.updateProcessSettings(), clientContext);
this.getProcessCallable =
callableFactory.createUnaryCallable(
getProcessTransportSettings, settings.getProcessSettings(), clientContext);
this.listProcessesCallable =
callableFactory.createUnaryCallable(
listProcessesTransportSettings, settings.listProcessesSettings(), clientContext);
this.listProcessesPagedCallable =
callableFactory.createPagedCallable(
listProcessesTransportSettings, settings.listProcessesSettings(), clientContext);
this.deleteProcessCallable =
callableFactory.createUnaryCallable(
deleteProcessTransportSettings, settings.deleteProcessSettings(), clientContext);
this.deleteProcessOperationCallable =
callableFactory.createOperationCallable(
deleteProcessTransportSettings,
settings.deleteProcessOperationSettings(),
clientContext,
operationsStub);
this.createRunCallable =
callableFactory.createUnaryCallable(
createRunTransportSettings, settings.createRunSettings(), clientContext);
this.updateRunCallable =
callableFactory.createUnaryCallable(
updateRunTransportSettings, settings.updateRunSettings(), clientContext);
this.getRunCallable =
callableFactory.createUnaryCallable(
getRunTransportSettings, settings.getRunSettings(), clientContext);
this.listRunsCallable =
callableFactory.createUnaryCallable(
listRunsTransportSettings, settings.listRunsSettings(), clientContext);
this.listRunsPagedCallable =
callableFactory.createPagedCallable(
listRunsTransportSettings, settings.listRunsSettings(), clientContext);
this.deleteRunCallable =
callableFactory.createUnaryCallable(
deleteRunTransportSettings, settings.deleteRunSettings(), clientContext);
this.deleteRunOperationCallable =
callableFactory.createOperationCallable(
deleteRunTransportSettings,
settings.deleteRunOperationSettings(),
clientContext,
operationsStub);
this.createLineageEventCallable =
callableFactory.createUnaryCallable(
createLineageEventTransportSettings,
settings.createLineageEventSettings(),
clientContext);
this.getLineageEventCallable =
callableFactory.createUnaryCallable(
getLineageEventTransportSettings, settings.getLineageEventSettings(), clientContext);
this.listLineageEventsCallable =
callableFactory.createUnaryCallable(
listLineageEventsTransportSettings,
settings.listLineageEventsSettings(),
clientContext);
this.listLineageEventsPagedCallable =
callableFactory.createPagedCallable(
listLineageEventsTransportSettings,
settings.listLineageEventsSettings(),
clientContext);
this.deleteLineageEventCallable =
callableFactory.createUnaryCallable(
deleteLineageEventTransportSettings,
settings.deleteLineageEventSettings(),
clientContext);
this.searchLinksCallable =
callableFactory.createUnaryCallable(
searchLinksTransportSettings, settings.searchLinksSettings(), clientContext);
this.searchLinksPagedCallable =
callableFactory.createPagedCallable(
searchLinksTransportSettings, settings.searchLinksSettings(), clientContext);
this.batchSearchLinkProcessesCallable =
callableFactory.createUnaryCallable(
batchSearchLinkProcessesTransportSettings,
settings.batchSearchLinkProcessesSettings(),
clientContext);
this.batchSearchLinkProcessesPagedCallable =
callableFactory.createPagedCallable(
batchSearchLinkProcessesTransportSettings,
settings.batchSearchLinkProcessesSettings(),
clientContext);
this.backgroundResources =
new BackgroundResourceAggregation(clientContext.getBackgroundResources());
}
public GrpcOperationsStub getOperationsStub() {
return operationsStub;
}
@Override
public UnaryCallable<ProcessOpenLineageRunEventRequest, ProcessOpenLineageRunEventResponse>
processOpenLineageRunEventCallable() {
return processOpenLineageRunEventCallable;
}
@Override
public UnaryCallable<CreateProcessRequest, Process> createProcessCallable() {
return createProcessCallable;
}
@Override
public UnaryCallable<UpdateProcessRequest, Process> updateProcessCallable() {
return updateProcessCallable;
}
@Override
public UnaryCallable<GetProcessRequest, Process> getProcessCallable() {
return getProcessCallable;
}
@Override
public UnaryCallable<ListProcessesRequest, ListProcessesResponse> listProcessesCallable() {
return listProcessesCallable;
}
@Override
public UnaryCallable<ListProcessesRequest, ListProcessesPagedResponse>
listProcessesPagedCallable() {
return listProcessesPagedCallable;
}
@Override
public UnaryCallable<DeleteProcessRequest, Operation> deleteProcessCallable() {
return deleteProcessCallable;
}
@Override
public OperationCallable<DeleteProcessRequest, Empty, OperationMetadata>
deleteProcessOperationCallable() {
return deleteProcessOperationCallable;
}
@Override
public UnaryCallable<CreateRunRequest, Run> createRunCallable() {
return createRunCallable;
}
@Override
public UnaryCallable<UpdateRunRequest, Run> updateRunCallable() {
return updateRunCallable;
}
@Override
public UnaryCallable<GetRunRequest, Run> getRunCallable() {
return getRunCallable;
}
@Override
public UnaryCallable<ListRunsRequest, ListRunsResponse> listRunsCallable() {
return listRunsCallable;
}
@Override
public UnaryCallable<ListRunsRequest, ListRunsPagedResponse> listRunsPagedCallable() {
return listRunsPagedCallable;
}
@Override
public UnaryCallable<DeleteRunRequest, Operation> deleteRunCallable() {
return deleteRunCallable;
}
@Override
public OperationCallable<DeleteRunRequest, Empty, OperationMetadata>
deleteRunOperationCallable() {
return deleteRunOperationCallable;
}
@Override
public UnaryCallable<CreateLineageEventRequest, LineageEvent> createLineageEventCallable() {
return createLineageEventCallable;
}
@Override
public UnaryCallable<GetLineageEventRequest, LineageEvent> getLineageEventCallable() {
return getLineageEventCallable;
}
@Override
public UnaryCallable<ListLineageEventsRequest, ListLineageEventsResponse>
listLineageEventsCallable() {
return listLineageEventsCallable;
}
@Override
public UnaryCallable<ListLineageEventsRequest, ListLineageEventsPagedResponse>
listLineageEventsPagedCallable() {
return listLineageEventsPagedCallable;
}
@Override
public UnaryCallable<DeleteLineageEventRequest, Empty> deleteLineageEventCallable() {
return deleteLineageEventCallable;
}
@Override
public UnaryCallable<SearchLinksRequest, SearchLinksResponse> searchLinksCallable() {
return searchLinksCallable;
}
@Override
public UnaryCallable<SearchLinksRequest, SearchLinksPagedResponse> searchLinksPagedCallable() {
return searchLinksPagedCallable;
}
@Override
public UnaryCallable<BatchSearchLinkProcessesRequest, BatchSearchLinkProcessesResponse>
batchSearchLinkProcessesCallable() {
return batchSearchLinkProcessesCallable;
}
@Override
public UnaryCallable<BatchSearchLinkProcessesRequest, BatchSearchLinkProcessesPagedResponse>
batchSearchLinkProcessesPagedCallable() {
return batchSearchLinkProcessesPagedCallable;
}
@Override
public final void close() {
try {
backgroundResources.close();
} catch (RuntimeException e) {
throw e;
} catch (Exception e) {
throw new IllegalStateException("Failed to close resource", e);
}
}
@Override
public void shutdown() {
backgroundResources.shutdown();
}
@Override
public boolean isShutdown() {
return backgroundResources.isShutdown();
}
@Override
public boolean isTerminated() {
return backgroundResources.isTerminated();
}
@Override
public void shutdownNow() {
backgroundResources.shutdownNow();
}
@Override
public boolean awaitTermination(long duration, TimeUnit unit) throws InterruptedException {
return backgroundResources.awaitTermination(duration, unit);
}
}
|
googleapis/google-cloud-java | 36,796 | java-dialogflow/proto-google-cloud-dialogflow-v2/src/main/java/com/google/cloud/dialogflow/v2/ListEntityTypesRequest.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/dialogflow/v2/entity_type.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.dialogflow.v2;
/**
*
*
* <pre>
* The request message for
* [EntityTypes.ListEntityTypes][google.cloud.dialogflow.v2.EntityTypes.ListEntityTypes].
* </pre>
*
* Protobuf type {@code google.cloud.dialogflow.v2.ListEntityTypesRequest}
*/
public final class ListEntityTypesRequest extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.dialogflow.v2.ListEntityTypesRequest)
ListEntityTypesRequestOrBuilder {
private static final long serialVersionUID = 0L;
// Use ListEntityTypesRequest.newBuilder() to construct.
private ListEntityTypesRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private ListEntityTypesRequest() {
parent_ = "";
languageCode_ = "";
pageToken_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new ListEntityTypesRequest();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.dialogflow.v2.EntityTypeProto
.internal_static_google_cloud_dialogflow_v2_ListEntityTypesRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.dialogflow.v2.EntityTypeProto
.internal_static_google_cloud_dialogflow_v2_ListEntityTypesRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.dialogflow.v2.ListEntityTypesRequest.class,
com.google.cloud.dialogflow.v2.ListEntityTypesRequest.Builder.class);
}
public static final int PARENT_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private volatile java.lang.Object parent_ = "";
/**
*
*
* <pre>
* Required. The agent to list all entity types from.
* Format: `projects/<Project ID>/agent`.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The parent.
*/
@java.lang.Override
public java.lang.String getParent() {
java.lang.Object ref = parent_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
parent_ = s;
return s;
}
}
/**
*
*
* <pre>
* Required. The agent to list all entity types from.
* Format: `projects/<Project ID>/agent`.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for parent.
*/
@java.lang.Override
public com.google.protobuf.ByteString getParentBytes() {
java.lang.Object ref = parent_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
parent_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int LANGUAGE_CODE_FIELD_NUMBER = 2;
@SuppressWarnings("serial")
private volatile java.lang.Object languageCode_ = "";
/**
*
*
* <pre>
* Optional. The language used to access language-specific data.
* If not specified, the agent's default language is used.
* For more information, see
* [Multilingual intent and entity
* data](https://cloud.google.com/dialogflow/docs/agents-multilingual#intent-entity).
* </pre>
*
* <code>string language_code = 2 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The languageCode.
*/
@java.lang.Override
public java.lang.String getLanguageCode() {
java.lang.Object ref = languageCode_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
languageCode_ = s;
return s;
}
}
/**
*
*
* <pre>
* Optional. The language used to access language-specific data.
* If not specified, the agent's default language is used.
* For more information, see
* [Multilingual intent and entity
* data](https://cloud.google.com/dialogflow/docs/agents-multilingual#intent-entity).
* </pre>
*
* <code>string language_code = 2 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The bytes for languageCode.
*/
@java.lang.Override
public com.google.protobuf.ByteString getLanguageCodeBytes() {
java.lang.Object ref = languageCode_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
languageCode_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int PAGE_SIZE_FIELD_NUMBER = 3;
private int pageSize_ = 0;
/**
*
*
* <pre>
* Optional. The maximum number of items to return in a single page. By
* default 100 and at most 1000.
* </pre>
*
* <code>int32 page_size = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The pageSize.
*/
@java.lang.Override
public int getPageSize() {
return pageSize_;
}
public static final int PAGE_TOKEN_FIELD_NUMBER = 4;
@SuppressWarnings("serial")
private volatile java.lang.Object pageToken_ = "";
/**
*
*
* <pre>
* Optional. The next_page_token value returned from a previous list request.
* </pre>
*
* <code>string page_token = 4 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The pageToken.
*/
@java.lang.Override
public java.lang.String getPageToken() {
java.lang.Object ref = pageToken_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
pageToken_ = s;
return s;
}
}
/**
*
*
* <pre>
* Optional. The next_page_token value returned from a previous list request.
* </pre>
*
* <code>string page_token = 4 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The bytes for pageToken.
*/
@java.lang.Override
public com.google.protobuf.ByteString getPageTokenBytes() {
java.lang.Object ref = pageToken_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
pageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 1, parent_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(languageCode_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 2, languageCode_);
}
if (pageSize_ != 0) {
output.writeInt32(3, pageSize_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(pageToken_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 4, pageToken_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, parent_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(languageCode_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, languageCode_);
}
if (pageSize_ != 0) {
size += com.google.protobuf.CodedOutputStream.computeInt32Size(3, pageSize_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(pageToken_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(4, pageToken_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.dialogflow.v2.ListEntityTypesRequest)) {
return super.equals(obj);
}
com.google.cloud.dialogflow.v2.ListEntityTypesRequest other =
(com.google.cloud.dialogflow.v2.ListEntityTypesRequest) obj;
if (!getParent().equals(other.getParent())) return false;
if (!getLanguageCode().equals(other.getLanguageCode())) return false;
if (getPageSize() != other.getPageSize()) return false;
if (!getPageToken().equals(other.getPageToken())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + PARENT_FIELD_NUMBER;
hash = (53 * hash) + getParent().hashCode();
hash = (37 * hash) + LANGUAGE_CODE_FIELD_NUMBER;
hash = (53 * hash) + getLanguageCode().hashCode();
hash = (37 * hash) + PAGE_SIZE_FIELD_NUMBER;
hash = (53 * hash) + getPageSize();
hash = (37 * hash) + PAGE_TOKEN_FIELD_NUMBER;
hash = (53 * hash) + getPageToken().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.dialogflow.v2.ListEntityTypesRequest parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.dialogflow.v2.ListEntityTypesRequest parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.dialogflow.v2.ListEntityTypesRequest parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.dialogflow.v2.ListEntityTypesRequest parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.dialogflow.v2.ListEntityTypesRequest parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.dialogflow.v2.ListEntityTypesRequest parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.dialogflow.v2.ListEntityTypesRequest parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.dialogflow.v2.ListEntityTypesRequest parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.dialogflow.v2.ListEntityTypesRequest parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.dialogflow.v2.ListEntityTypesRequest parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.dialogflow.v2.ListEntityTypesRequest parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.dialogflow.v2.ListEntityTypesRequest parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.dialogflow.v2.ListEntityTypesRequest prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* The request message for
* [EntityTypes.ListEntityTypes][google.cloud.dialogflow.v2.EntityTypes.ListEntityTypes].
* </pre>
*
* Protobuf type {@code google.cloud.dialogflow.v2.ListEntityTypesRequest}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.dialogflow.v2.ListEntityTypesRequest)
com.google.cloud.dialogflow.v2.ListEntityTypesRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.dialogflow.v2.EntityTypeProto
.internal_static_google_cloud_dialogflow_v2_ListEntityTypesRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.dialogflow.v2.EntityTypeProto
.internal_static_google_cloud_dialogflow_v2_ListEntityTypesRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.dialogflow.v2.ListEntityTypesRequest.class,
com.google.cloud.dialogflow.v2.ListEntityTypesRequest.Builder.class);
}
// Construct using com.google.cloud.dialogflow.v2.ListEntityTypesRequest.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
parent_ = "";
languageCode_ = "";
pageSize_ = 0;
pageToken_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.dialogflow.v2.EntityTypeProto
.internal_static_google_cloud_dialogflow_v2_ListEntityTypesRequest_descriptor;
}
@java.lang.Override
public com.google.cloud.dialogflow.v2.ListEntityTypesRequest getDefaultInstanceForType() {
return com.google.cloud.dialogflow.v2.ListEntityTypesRequest.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.dialogflow.v2.ListEntityTypesRequest build() {
com.google.cloud.dialogflow.v2.ListEntityTypesRequest result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.dialogflow.v2.ListEntityTypesRequest buildPartial() {
com.google.cloud.dialogflow.v2.ListEntityTypesRequest result =
new com.google.cloud.dialogflow.v2.ListEntityTypesRequest(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(com.google.cloud.dialogflow.v2.ListEntityTypesRequest result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.parent_ = parent_;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.languageCode_ = languageCode_;
}
if (((from_bitField0_ & 0x00000004) != 0)) {
result.pageSize_ = pageSize_;
}
if (((from_bitField0_ & 0x00000008) != 0)) {
result.pageToken_ = pageToken_;
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.dialogflow.v2.ListEntityTypesRequest) {
return mergeFrom((com.google.cloud.dialogflow.v2.ListEntityTypesRequest) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.dialogflow.v2.ListEntityTypesRequest other) {
if (other == com.google.cloud.dialogflow.v2.ListEntityTypesRequest.getDefaultInstance())
return this;
if (!other.getParent().isEmpty()) {
parent_ = other.parent_;
bitField0_ |= 0x00000001;
onChanged();
}
if (!other.getLanguageCode().isEmpty()) {
languageCode_ = other.languageCode_;
bitField0_ |= 0x00000002;
onChanged();
}
if (other.getPageSize() != 0) {
setPageSize(other.getPageSize());
}
if (!other.getPageToken().isEmpty()) {
pageToken_ = other.pageToken_;
bitField0_ |= 0x00000008;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
parent_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000001;
break;
} // case 10
case 18:
{
languageCode_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000002;
break;
} // case 18
case 24:
{
pageSize_ = input.readInt32();
bitField0_ |= 0x00000004;
break;
} // case 24
case 34:
{
pageToken_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000008;
break;
} // case 34
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.lang.Object parent_ = "";
/**
*
*
* <pre>
* Required. The agent to list all entity types from.
* Format: `projects/<Project ID>/agent`.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The parent.
*/
public java.lang.String getParent() {
java.lang.Object ref = parent_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
parent_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Required. The agent to list all entity types from.
* Format: `projects/<Project ID>/agent`.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for parent.
*/
public com.google.protobuf.ByteString getParentBytes() {
java.lang.Object ref = parent_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
parent_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Required. The agent to list all entity types from.
* Format: `projects/<Project ID>/agent`.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The parent to set.
* @return This builder for chaining.
*/
public Builder setParent(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
parent_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The agent to list all entity types from.
* Format: `projects/<Project ID>/agent`.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return This builder for chaining.
*/
public Builder clearParent() {
parent_ = getDefaultInstance().getParent();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The agent to list all entity types from.
* Format: `projects/<Project ID>/agent`.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The bytes for parent to set.
* @return This builder for chaining.
*/
public Builder setParentBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
parent_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
private java.lang.Object languageCode_ = "";
/**
*
*
* <pre>
* Optional. The language used to access language-specific data.
* If not specified, the agent's default language is used.
* For more information, see
* [Multilingual intent and entity
* data](https://cloud.google.com/dialogflow/docs/agents-multilingual#intent-entity).
* </pre>
*
* <code>string language_code = 2 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The languageCode.
*/
public java.lang.String getLanguageCode() {
java.lang.Object ref = languageCode_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
languageCode_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Optional. The language used to access language-specific data.
* If not specified, the agent's default language is used.
* For more information, see
* [Multilingual intent and entity
* data](https://cloud.google.com/dialogflow/docs/agents-multilingual#intent-entity).
* </pre>
*
* <code>string language_code = 2 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The bytes for languageCode.
*/
public com.google.protobuf.ByteString getLanguageCodeBytes() {
java.lang.Object ref = languageCode_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
languageCode_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Optional. The language used to access language-specific data.
* If not specified, the agent's default language is used.
* For more information, see
* [Multilingual intent and entity
* data](https://cloud.google.com/dialogflow/docs/agents-multilingual#intent-entity).
* </pre>
*
* <code>string language_code = 2 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @param value The languageCode to set.
* @return This builder for chaining.
*/
public Builder setLanguageCode(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
languageCode_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. The language used to access language-specific data.
* If not specified, the agent's default language is used.
* For more information, see
* [Multilingual intent and entity
* data](https://cloud.google.com/dialogflow/docs/agents-multilingual#intent-entity).
* </pre>
*
* <code>string language_code = 2 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return This builder for chaining.
*/
public Builder clearLanguageCode() {
languageCode_ = getDefaultInstance().getLanguageCode();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. The language used to access language-specific data.
* If not specified, the agent's default language is used.
* For more information, see
* [Multilingual intent and entity
* data](https://cloud.google.com/dialogflow/docs/agents-multilingual#intent-entity).
* </pre>
*
* <code>string language_code = 2 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @param value The bytes for languageCode to set.
* @return This builder for chaining.
*/
public Builder setLanguageCodeBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
languageCode_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
private int pageSize_;
/**
*
*
* <pre>
* Optional. The maximum number of items to return in a single page. By
* default 100 and at most 1000.
* </pre>
*
* <code>int32 page_size = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The pageSize.
*/
@java.lang.Override
public int getPageSize() {
return pageSize_;
}
/**
*
*
* <pre>
* Optional. The maximum number of items to return in a single page. By
* default 100 and at most 1000.
* </pre>
*
* <code>int32 page_size = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @param value The pageSize to set.
* @return This builder for chaining.
*/
public Builder setPageSize(int value) {
pageSize_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. The maximum number of items to return in a single page. By
* default 100 and at most 1000.
* </pre>
*
* <code>int32 page_size = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return This builder for chaining.
*/
public Builder clearPageSize() {
bitField0_ = (bitField0_ & ~0x00000004);
pageSize_ = 0;
onChanged();
return this;
}
private java.lang.Object pageToken_ = "";
/**
*
*
* <pre>
* Optional. The next_page_token value returned from a previous list request.
* </pre>
*
* <code>string page_token = 4 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The pageToken.
*/
public java.lang.String getPageToken() {
java.lang.Object ref = pageToken_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
pageToken_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Optional. The next_page_token value returned from a previous list request.
* </pre>
*
* <code>string page_token = 4 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The bytes for pageToken.
*/
public com.google.protobuf.ByteString getPageTokenBytes() {
java.lang.Object ref = pageToken_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
pageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Optional. The next_page_token value returned from a previous list request.
* </pre>
*
* <code>string page_token = 4 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @param value The pageToken to set.
* @return This builder for chaining.
*/
public Builder setPageToken(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
pageToken_ = value;
bitField0_ |= 0x00000008;
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. The next_page_token value returned from a previous list request.
* </pre>
*
* <code>string page_token = 4 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return This builder for chaining.
*/
public Builder clearPageToken() {
pageToken_ = getDefaultInstance().getPageToken();
bitField0_ = (bitField0_ & ~0x00000008);
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. The next_page_token value returned from a previous list request.
* </pre>
*
* <code>string page_token = 4 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @param value The bytes for pageToken to set.
* @return This builder for chaining.
*/
public Builder setPageTokenBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
pageToken_ = value;
bitField0_ |= 0x00000008;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.dialogflow.v2.ListEntityTypesRequest)
}
// @@protoc_insertion_point(class_scope:google.cloud.dialogflow.v2.ListEntityTypesRequest)
private static final com.google.cloud.dialogflow.v2.ListEntityTypesRequest DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.dialogflow.v2.ListEntityTypesRequest();
}
public static com.google.cloud.dialogflow.v2.ListEntityTypesRequest getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<ListEntityTypesRequest> PARSER =
new com.google.protobuf.AbstractParser<ListEntityTypesRequest>() {
@java.lang.Override
public ListEntityTypesRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<ListEntityTypesRequest> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<ListEntityTypesRequest> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.dialogflow.v2.ListEntityTypesRequest getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
apache/james-project | 37,188 | mailbox/cassandra/src/test/java/org/apache/james/mailbox/cassandra/mail/CassandraMailboxMapperTest.java | /****************************************************************
* Licensed to the Apache Software Foundation (ASF) under one *
* or more contributor license agreements. See the NOTICE file *
* distributed with this work for additional information *
* regarding copyright ownership. The ASF licenses this file *
* to you under the Apache License, Version 2.0 (the *
* "License"); you may not use this file except in compliance *
* with the License. You may obtain a copy of the License at *
* *
* http://www.apache.org/licenses/LICENSE-2.0 *
* *
* Unless required by applicable law or agreed to in writing, *
* software distributed under the License is distributed on an *
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY *
* KIND, either express or implied. See the License for the *
* specific language governing permissions and limitations *
* under the License. *
****************************************************************/
package org.apache.james.mailbox.cassandra.mail;
import static org.apache.james.backends.cassandra.Scenario.Builder.fail;
import static org.apache.james.mailbox.model.MailboxAssertingTool.softly;
import static org.assertj.core.api.Assertions.assertThat;
import static org.assertj.core.api.Assertions.assertThatThrownBy;
import java.util.stream.IntStream;
import org.apache.commons.lang3.StringUtils;
import org.apache.james.backends.cassandra.CassandraCluster;
import org.apache.james.backends.cassandra.CassandraClusterExtension;
import org.apache.james.backends.cassandra.Scenario;
import org.apache.james.backends.cassandra.components.CassandraDataDefinition;
import org.apache.james.backends.cassandra.init.configuration.CassandraConfiguration;
import org.apache.james.backends.cassandra.versions.CassandraSchemaVersionDAO;
import org.apache.james.backends.cassandra.versions.CassandraSchemaVersionDataDefinition;
import org.apache.james.backends.cassandra.versions.SchemaVersion;
import org.apache.james.core.Username;
import org.apache.james.eventsourcing.eventstore.JsonEventSerializer;
import org.apache.james.eventsourcing.eventstore.cassandra.CassandraEventStore;
import org.apache.james.eventsourcing.eventstore.cassandra.CassandraEventStoreDataDefinition;
import org.apache.james.eventsourcing.eventstore.cassandra.EventStoreDao;
import org.apache.james.junit.categories.Unstable;
import org.apache.james.mailbox.cassandra.ids.CassandraId;
import org.apache.james.mailbox.cassandra.mail.eventsourcing.acl.ACLModule;
import org.apache.james.mailbox.cassandra.modules.CassandraAclDataDefinition;
import org.apache.james.mailbox.cassandra.modules.CassandraMailboxDataDefinition;
import org.apache.james.mailbox.exception.MailboxNotFoundException;
import org.apache.james.mailbox.exception.TooLongMailboxNameException;
import org.apache.james.mailbox.model.Mailbox;
import org.apache.james.mailbox.model.MailboxId;
import org.apache.james.mailbox.model.MailboxPath;
import org.apache.james.mailbox.model.UidValidity;
import org.apache.james.mailbox.model.search.ExactName;
import org.apache.james.mailbox.model.search.MailboxQuery;
import org.apache.james.mailbox.model.search.Wildcard;
import org.apache.james.mailbox.store.MailboxReactorUtils;
import org.assertj.core.api.SoftAssertions;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Disabled;
import org.junit.jupiter.api.Nested;
import org.junit.jupiter.api.Tag;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.extension.RegisterExtension;
import com.github.fge.lambdas.Throwing;
import com.github.fge.lambdas.runnable.ThrowingRunnable;
import reactor.core.publisher.Mono;
class CassandraMailboxMapperTest {
private static final UidValidity UID_VALIDITY = UidValidity.of(52);
private static final Username USER = Username.of("user");
private static final CassandraId MAILBOX_ID = CassandraId.timeBased();
private static final MailboxPath MAILBOX_PATH = MailboxPath.forUser(USER, "name");
private static final Mailbox MAILBOX = new Mailbox(MAILBOX_PATH, UID_VALIDITY, MAILBOX_ID);
private static final String INBOX = "INBOX";
private static final String INBOX_RENAMED = "INBOX_RENAMED";
private static final CassandraId MAILBOX_ID_2 = CassandraId.timeBased();
private static final Mailbox MAILBOX_BIS = new Mailbox(MAILBOX_PATH, UID_VALIDITY, MAILBOX_ID_2);
private static final CassandraDataDefinition MODULES = CassandraDataDefinition.aggregateModules(
CassandraAclDataDefinition.MODULE,
CassandraEventStoreDataDefinition.MODULE(),
CassandraMailboxDataDefinition.MODULE,
CassandraSchemaVersionDataDefinition.MODULE);
private static final int TRY_COUNT_BEFORE_FAILURE = 6;
@RegisterExtension
static CassandraClusterExtension cassandraCluster = new CassandraClusterExtension(MODULES);
private CassandraMailboxDAO mailboxDAO;
private CassandraMailboxPathV3DAO mailboxPathV3DAO;
private CassandraMailboxMapper testee;
private CassandraSchemaVersionDAO versionDAO;
@BeforeEach
void setUp() {
CassandraCluster cassandra = cassandraCluster.getCassandraCluster();
mailboxDAO = new CassandraMailboxDAO(cassandra.getConf(), cassandra.getTypesProvider());
mailboxPathV3DAO = new CassandraMailboxPathV3DAO(cassandra.getConf());
versionDAO = new CassandraSchemaVersionDAO(cassandra.getConf());
versionDAO.truncateVersion()
.then(versionDAO.updateVersion(new SchemaVersion(7)))
.block();
setUpTestee(CassandraConfiguration.DEFAULT_CONFIGURATION);
}
private void setUpTestee(CassandraConfiguration cassandraConfiguration) {
CassandraCluster cassandra = cassandraCluster.getCassandraCluster();
CassandraACLDAOV2 aclDAOv2 = new CassandraACLDAOV2(cassandra.getConf());
JsonEventSerializer jsonEventSerializer = JsonEventSerializer
.forModules(ACLModule.ACL_UPDATE)
.withoutNestedType();
CassandraUserMailboxRightsDAO usersRightDAO = new CassandraUserMailboxRightsDAO(cassandra.getConf());
CassandraEventStore eventStore = new CassandraEventStore(new EventStoreDao(cassandra.getConf(), jsonEventSerializer));
CassandraACLMapper aclMapper = new CassandraACLMapper(
new CassandraACLMapper.StoreV2(usersRightDAO, aclDAOv2, eventStore),
CassandraConfiguration.DEFAULT_CONFIGURATION);
testee = new CassandraMailboxMapper(
mailboxDAO,
mailboxPathV3DAO,
usersRightDAO,
aclMapper,
cassandraConfiguration);
}
@Nested
class ConsistencyTest {
private MailboxPath inboxPath;
private MailboxPath inboxPathRenamed;
private MailboxQuery.UserBound allMailboxesSearchQuery;
private MailboxQuery.UserBound inboxSearchQuery;
private MailboxQuery.UserBound inboxRenamedSearchQuery;
@BeforeEach
void setUp() {
inboxPath = MailboxPath.forUser(USER, INBOX);
inboxPathRenamed = MailboxPath.forUser(USER, INBOX_RENAMED);
allMailboxesSearchQuery = MailboxQuery.builder()
.userAndNamespaceFrom(inboxPath)
.expression(Wildcard.INSTANCE)
.build()
.asUserBound();
inboxSearchQuery = MailboxQuery.builder()
.userAndNamespaceFrom(inboxPath)
.expression(new ExactName(INBOX))
.build()
.asUserBound();
inboxRenamedSearchQuery = MailboxQuery.builder()
.userAndNamespaceFrom(inboxPathRenamed)
.expression(new ExactName(INBOX_RENAMED))
.build()
.asUserBound();
}
@Nested
class Retries {
@Test
void renameShouldRetryFailedDeleteMailboxPath(CassandraCluster cassandra) {
Mailbox inbox = testee.create(inboxPath, UID_VALIDITY).block();
MailboxId inboxId = inbox.getMailboxId();
Mailbox inboxRenamed = createInboxRenamedMailbox(inboxId);
cassandra.getConf()
.registerScenario(fail()
.times(1)
.whenQueryStartsWith("DELETE FROM mailboxpathv2"));
testee.rename(inboxRenamed).block();
SoftAssertions.assertSoftly(Throwing.consumer(softly -> {
softly(softly)
.assertThat(testee.findMailboxById(inboxId).block())
.isEqualTo(inboxRenamed);
softly(softly)
.assertThat(testee.findMailboxByPath(inboxPathRenamed).block())
.isEqualTo(inboxRenamed);
softly.assertThat(testee.findMailboxWithPathLike(allMailboxesSearchQuery)
.collectList().block())
.hasOnlyOneElementSatisfying(searchMailbox -> softly(softly)
.assertThat(searchMailbox)
.isEqualTo(inboxRenamed));
}));
}
@Test
void renameShouldRetryFailedMailboxSaving(CassandraCluster cassandra) {
Mailbox inbox = testee.create(inboxPath, UID_VALIDITY).block();
MailboxId inboxId = inbox.getMailboxId();
Mailbox inboxRenamed = createInboxRenamedMailbox(inboxId);
cassandra.getConf()
.registerScenario(fail()
.times(1)
.whenQueryStartsWith("INSERT INTO mailbox (id,name,uidvalidity,mailboxbase)"));
testee.rename(inboxRenamed).block();
SoftAssertions.assertSoftly(Throwing.consumer(softly -> {
softly(softly)
.assertThat(testee.findMailboxById(inboxId).block())
.isEqualTo(inboxRenamed);
softly(softly)
.assertThat(testee.findMailboxByPath(inboxPathRenamed).block())
.isEqualTo(inboxRenamed);
softly.assertThat(testee.findMailboxWithPathLike(allMailboxesSearchQuery)
.collectList().block())
.hasOnlyOneElementSatisfying(searchMailbox -> softly(softly)
.assertThat(searchMailbox)
.isEqualTo(inboxRenamed));
}));
}
@Test
void createShouldRetryFailedMailboxSaving(CassandraCluster cassandra) {
cassandra.getConf()
.registerScenario(fail()
.times(1)
.whenQueryStartsWith("INSERT INTO mailbox (id,name,uidvalidity,mailboxbase)"));
Mailbox inbox = testee.create(inboxPath, UID_VALIDITY).block();
SoftAssertions.assertSoftly(Throwing.consumer(softly -> {
softly(softly)
.assertThat(testee.findMailboxById(inbox.getMailboxId()).block())
.isEqualTo(inbox);
softly(softly)
.assertThat(testee.findMailboxByPath(inboxPath).block())
.isEqualTo(inbox);
softly.assertThat(testee.findMailboxWithPathLike(allMailboxesSearchQuery)
.collectList().block())
.hasOnlyOneElementSatisfying(searchMailbox -> softly(softly)
.assertThat(searchMailbox)
.isEqualTo(inbox));
}));
}
@Test
void deleteShouldRetryFailedMailboxDeletion(CassandraCluster cassandra) {
Mailbox inbox = testee.create(inboxPath, UID_VALIDITY).block();
cassandra.getConf()
.registerScenario(fail()
.times(1)
.whenQueryStartsWith("DELETE FROM mailbox "));
testee.delete(inbox).block();
SoftAssertions.assertSoftly(Throwing.consumer(softly -> {
softly.assertThatThrownBy(() -> testee.findMailboxById(inbox.getMailboxId()).block())
.hasCauseInstanceOf(MailboxNotFoundException.class);
softly.assertThat(testee.findMailboxByPath(inboxPath).blockOptional())
.isEmpty();
softly.assertThat(testee.findMailboxWithPathLike(allMailboxesSearchQuery)
.collectList().block())
.isEmpty();
}));
}
}
@Nested
class ReadRepairs {
@BeforeEach
void setVersion() {
// Read repairs should not be performed with an outdated data representation
versionDAO.truncateVersion()
.then(versionDAO.updateVersion(new SchemaVersion(8)))
.block();
}
@Test
void findMailboxByIdShouldEventuallyFixInconsistencyWhenMailboxIsNotInPath() {
mailboxDAO.save(MAILBOX)
.block();
IntStream.range(0, 100).forEach(i ->
testee.findMailboxById(MAILBOX_ID)
.onErrorResume(e -> Mono.empty())
.block());
SoftAssertions.assertSoftly(Throwing.consumer(softly -> {
softly(softly)
.assertThat(testee.findMailboxById(MAILBOX_ID).block())
.isEqualTo(MAILBOX);
softly(softly)
.assertThat(testee.findMailboxByPath(MAILBOX_PATH).block())
.isEqualTo(MAILBOX);
}));
}
@Test
void orphanMailboxIdEntriesCanNotBeReadRepaired() {
mailboxDAO.save(MAILBOX)
.block();
IntStream.range(0, 100).forEach(i ->
testee.findMailboxByPath(MAILBOX_PATH)
.onErrorResume(e -> Mono.empty())
.block());
SoftAssertions.assertSoftly(Throwing.consumer(softly -> {
softly.assertThat(MailboxReactorUtils.blockOptional(testee.findMailboxByPath(MAILBOX_PATH)))
.isEmpty();
softly(softly)
.assertThat(testee.findMailboxById(MAILBOX_ID).block())
.isEqualTo(MAILBOX);
}));
}
@Test
void orphanPathEntriesCanNotBeRepairedByIdReads() {
mailboxPathV3DAO.save(MAILBOX)
.block();
IntStream.range(0, 100).forEach(i ->
testee.findMailboxById(MAILBOX_ID)
.onErrorResume(e -> Mono.empty())
.block());
SoftAssertions.assertSoftly(Throwing.consumer(softly -> {
softly.assertThatThrownBy(() -> MailboxReactorUtils.blockOptional(testee.findMailboxById(MAILBOX_ID)))
.isInstanceOf(MailboxNotFoundException.class);
softly(softly)
.assertThat(testee.findMailboxByPath(MAILBOX_PATH).block())
.isEqualTo(MAILBOX);
}));
}
@Test
void findMailboxByPathShouldFixInconsistencyWhenMailboxIsNotReferencedById() {
mailboxPathV3DAO.save(MAILBOX)
.block();
IntStream.range(0, 100).forEach(i ->
testee.findMailboxByPath(MAILBOX_PATH)
.onErrorResume(e -> Mono.empty())
.block());
SoftAssertions.assertSoftly(Throwing.consumer(softly -> {
softly.assertThatThrownBy(() -> MailboxReactorUtils.blockOptional(testee.findMailboxById(MAILBOX_ID)))
.isInstanceOf(MailboxNotFoundException.class);
softly.assertThat(MailboxReactorUtils.blockOptional(testee.findMailboxByPath(MAILBOX_PATH)))
.isEmpty();
}));
}
}
@Disabled("In order to be more performant mailboxPath V3 table includes the UID_VALIDITY." +
"Reading paths no longer requires reading the mailbox by id but this of course has a " +
"consistency cost.")
@Test
void createShouldBeConsistentWhenFailToPersistMailbox(CassandraCluster cassandra) {
cassandra.getConf()
.registerScenario(fail()
.times(10)
.whenQueryStartsWith("INSERT INTO mailbox (id,name,uidvalidity,mailboxbase)"));
doQuietly(() -> testee.create(inboxPath, UID_VALIDITY).block());
SoftAssertions.assertSoftly(softly -> {
softly.assertThat(testee.findMailboxByPath(inboxPath).blockOptional())
.isEmpty();
softly.assertThat(testee.findMailboxWithPathLike(inboxSearchQuery)
.collectList().block())
.isEmpty();
softly.assertThat(testee.findMailboxWithPathLike(allMailboxesSearchQuery)
.collectList().block())
.isEmpty();
});
}
@Test
void renameThenFailToRetrieveMailboxShouldBeConsistentWhenFindByInbox(CassandraCluster cassandra) {
Mailbox inbox = testee.create(inboxPath, UID_VALIDITY).block();
CassandraId inboxId = (CassandraId) inbox.getMailboxId();
Mailbox inboxRenamed = createInboxRenamedMailbox(inboxId);
cassandra.getConf()
.registerScenario(fail()
.times(TRY_COUNT_BEFORE_FAILURE)
.whenQueryStartsWith("SELECT id,mailboxbase,uidvalidity,name FROM mailbox"));
doQuietly(() -> testee.rename(inboxRenamed));
cassandra.getConf().registerScenario(Scenario.NOTHING);
SoftAssertions.assertSoftly(Throwing.consumer(softly -> {
softly(softly)
.assertThat(testee.findMailboxById(inboxId).block())
.isEqualTo(inbox);
softly(softly)
.assertThat(testee.findMailboxByPath(inboxPath).block())
.isEqualTo(inbox);
softly.assertThat(testee.findMailboxWithPathLike(inboxSearchQuery)
.collectList().block())
.hasOnlyOneElementSatisfying(searchMailbox -> softly(softly)
.assertThat(searchMailbox)
.isEqualTo(inbox));
}));
}
@Disabled("JAMES-3056 returning two mailboxes with same name and id")
@Test
void renameThenFailToRetrieveMailboxShouldBeConsistentWhenFindAll(CassandraCluster cassandra) {
Mailbox inbox = testee.create(inboxPath, UID_VALIDITY).block();
CassandraId inboxId = (CassandraId) inbox.getMailboxId();
Mailbox inboxRenamed = createInboxRenamedMailbox(inboxId);
cassandra.getConf()
.registerScenario(fail()
.times(TRY_COUNT_BEFORE_FAILURE)
.whenQueryStartsWith("SELECT id,mailboxbase,uidvalidity,name FROM mailbox"));
doQuietly(() -> testee.rename(inboxRenamed).block());
SoftAssertions.assertSoftly(Throwing.consumer(softly ->
softly.assertThat(testee.findMailboxWithPathLike(allMailboxesSearchQuery)
.collectList().block())
.hasOnlyOneElementSatisfying(searchMailbox -> softly(softly)
.assertThat(searchMailbox)
.isEqualTo(inbox))
));
}
@Disabled("JAMES-3056 find by renamed name returns unexpected results")
@Test
void renameThenFailToRetrieveMailboxShouldBeConsistentWhenFindByRenamedInbox(CassandraCluster cassandra) {
Mailbox inbox = testee.create(inboxPath, UID_VALIDITY).block();
CassandraId inboxId = (CassandraId) inbox.getMailboxId();
Mailbox inboxRenamed = createInboxRenamedMailbox(inboxId);
cassandra.getConf()
.registerScenario(fail()
.times(TRY_COUNT_BEFORE_FAILURE)
.whenQueryStartsWith("SELECT id,mailboxbase,uidvalidity,name FROM mailbox WHERE id=:id"));
doQuietly(() -> testee.rename(inboxRenamed).block());
SoftAssertions.assertSoftly(Throwing.consumer(softly -> {
softly.assertThatThrownBy(() -> testee.findMailboxByPath(inboxPathRenamed).block())
.isInstanceOf(MailboxNotFoundException.class);
softly.assertThat(testee.findMailboxWithPathLike(inboxRenamedSearchQuery)
.collectList().block())
.isEmpty();
}));
}
@Test
void renameThenFailToDeleteMailboxPathShouldBeConsistentWhenFindByInbox(CassandraCluster cassandra) {
Mailbox inbox = testee.create(inboxPath, UID_VALIDITY).block();
CassandraId inboxId = (CassandraId) inbox.getMailboxId();
Mailbox inboxRenamed = createInboxRenamedMailbox(inboxId);
cassandra.getConf()
.registerScenario(fail()
.times(TRY_COUNT_BEFORE_FAILURE)
.whenQueryStartsWith("DELETE FROM mailboxpathv3 WHERE namespace=:namespace AND user=:user AND mailboxname=:mailboxname IF EXISTS"));
doQuietly(() -> testee.rename(inboxRenamed).block());
SoftAssertions.assertSoftly(Throwing.consumer(softly -> {
softly(softly)
.assertThat(testee.findMailboxById(inboxId).block())
.isEqualTo(inbox);
softly(softly)
.assertThat(testee.findMailboxByPath(inboxPath).block())
.isEqualTo(inbox);
softly.assertThat(testee.findMailboxWithPathLike(inboxSearchQuery)
.collectList().block())
.hasOnlyOneElementSatisfying(searchMailbox -> softly(softly)
.assertThat(searchMailbox)
.isEqualTo(inbox));
}));
}
@Disabled("JAMES-3056 returning two mailboxes with same name and id")
@Test
void renameThenFailToDeleteMailboxPathShouldBeConsistentWhenFindAll(CassandraCluster cassandra) {
Mailbox inbox = testee.create(inboxPath, UID_VALIDITY).block();
CassandraId inboxId = (CassandraId) inbox.getMailboxId();
Mailbox inboxRenamed = createInboxRenamedMailbox(inboxId);
cassandra.getConf()
.registerScenario(fail()
.times(TRY_COUNT_BEFORE_FAILURE)
.whenQueryStartsWith("DELETE FROM mailboxpathv3 WHERE namespace=:namespace AND user=:user AND mailboxname=:mailboxname IF EXISTS"));
doQuietly(() -> testee.rename(inboxRenamed).block());
SoftAssertions.assertSoftly(Throwing.consumer(softly ->
softly.assertThat(testee.findMailboxWithPathLike(allMailboxesSearchQuery)
.collectList().block())
.hasOnlyOneElementSatisfying(searchMailbox -> softly(softly)
.assertThat(searchMailbox)
.isEqualTo(inbox))));
}
@Disabled("JAMES-3056 find by renamed name returns unexpected results")
@Test
void renameThenFailToDeleteMailboxPathShouldBeConsistentWhenFindByRenamedInbox(CassandraCluster cassandra) {
Mailbox inbox = testee.create(inboxPath, UID_VALIDITY).block();
CassandraId inboxId = (CassandraId) inbox.getMailboxId();
Mailbox inboxRenamed = createInboxRenamedMailbox(inboxId);
cassandra.getConf()
.registerScenario(fail()
.times(TRY_COUNT_BEFORE_FAILURE)
.whenQueryStartsWith("DELETE FROM mailboxpathv3 WHERE namespace=:namespace AND user=:user AND mailboxname=:mailboxname IF EXISTS"));
doQuietly(() -> testee.rename(inboxRenamed).block());
SoftAssertions.assertSoftly(Throwing.consumer(softly -> {
softly.assertThatThrownBy(() -> testee.findMailboxByPath(inboxPathRenamed).block())
.isInstanceOf(MailboxNotFoundException.class);
softly.assertThat(testee.findMailboxWithPathLike(inboxRenamedSearchQuery)
.collectList().block())
.isEmpty();
}));
}
@Disabled("JAMES-3056 find by mailbox name returns unexpected results")
@Test
void deleteShouldBeConsistentWhenFailToDeleteMailbox(CassandraCluster cassandra) {
Mailbox inbox = testee.create(inboxPath, UID_VALIDITY).block();
CassandraId inboxId = (CassandraId) inbox.getMailboxId();
cassandra.getConf()
.registerScenario(fail()
.times(TRY_COUNT_BEFORE_FAILURE)
.whenQueryStartsWith("DELETE FROM mailbox WHERE id=:id"));
doQuietly(() -> testee.delete(inbox).block());
SoftAssertions.assertSoftly(Throwing.consumer(softly -> {
softly.assertThatCode(() -> testee.findMailboxById(inboxId).block())
.doesNotThrowAnyException();
softly.assertThatCode(() -> testee.findMailboxByPath(inboxPath).block())
.doesNotThrowAnyException();
softly.assertThat(testee.findMailboxWithPathLike(inboxSearchQuery)
.collectList().block())
.hasOnlyOneElementSatisfying(searchMailbox -> softly(softly)
.assertThat(searchMailbox)
.isEqualTo(inbox));
softly.assertThat(testee.findMailboxWithPathLike(allMailboxesSearchQuery)
.collectList().block())
.hasOnlyOneElementSatisfying(searchMailbox -> softly(softly)
.assertThat(searchMailbox)
.isEqualTo(inbox));
}));
}
@Disabled("JAMES-3056 org.apache.james.mailbox.exception.MailboxNotFoundException: 'mailboxId' can not be found")
@Test
void createAfterPreviousFailedCreateShouldCreateAMailbox(CassandraCluster cassandra) {
cassandra.getConf()
.registerScenario(fail()
.times(TRY_COUNT_BEFORE_FAILURE)
.whenQueryStartsWith("INSERT INTO mailbox (id,name,uidvalidity,mailboxbase) VALUES (:id,:name,:uidvalidity,:mailboxbase)"));
doQuietly(() -> testee.create(inboxPath, UID_VALIDITY).block());
Mailbox inbox = testee.create(inboxPath, UID_VALIDITY).block();
SoftAssertions.assertSoftly(Throwing.consumer(softly -> {
softly(softly)
.assertThat(testee.findMailboxByPath(inboxPath).block())
.isEqualTo(inbox);
softly.assertThat(testee.findMailboxWithPathLike(inboxSearchQuery)
.collectList().block())
.hasOnlyOneElementSatisfying(searchMailbox -> softly(softly)
.assertThat(searchMailbox)
.isEqualTo(inbox));
softly.assertThat(testee.findMailboxWithPathLike(allMailboxesSearchQuery)
.collectList().block())
.hasOnlyOneElementSatisfying(searchMailbox -> softly(softly)
.assertThat(searchMailbox)
.isEqualTo(inbox));
}));
}
@Test
/*
https://builds.apache.org/blue/organizations/jenkins/james%2FApacheJames/detail/PR-268/38/tests
Expected size:<1> but was:<2> in:
<[Mailbox{id=44ec7c50-405c-11eb-bd9e-f9735674a69e, namespace=#private, user=Username{localPart=user, domainPart=Optional.empty}, name=INBOX},
Mailbox{id=4282f660-405c-11eb-bd9e-f9735674a69e, namespace=#private, user=Username{localPart=user, domainPart=Optional.empty}, name=name}]>
at CassandraMailboxMapperTest$ConsistencyTest.lambda$createAfterPreviousDeleteOnFailedCreateShouldCreateAMailbox$34(CassandraMailboxMapperTest$ConsistencyTest.java:628)
*/
@Tag(Unstable.TAG)
void createAfterPreviousDeleteOnFailedCreateShouldCreateAMailbox(CassandraCluster cassandra) {
cassandra.getConf()
.registerScenario(fail()
.times(TRY_COUNT_BEFORE_FAILURE)
.whenQueryStartsWith("INSERT INTO mailbox (id,name,uidvalidity,mailboxbase) VALUES (:id,:name,:uidvalidity,:mailboxbase)"));
doQuietly(() -> testee.create(inboxPath, UID_VALIDITY).block());
doQuietly(() -> testee.delete(new Mailbox(inboxPath, UID_VALIDITY, CassandraId.timeBased())).block());
Mailbox inbox = testee.create(inboxPath, UID_VALIDITY).block();
SoftAssertions.assertSoftly(Throwing.consumer(softly -> {
softly(softly)
.assertThat(testee.findMailboxByPath(inboxPath).block())
.isEqualTo(inbox);
softly.assertThat(testee.findMailboxWithPathLike(inboxSearchQuery)
.collectList().block())
.hasOnlyOneElementSatisfying(searchMailbox -> softly(softly)
.assertThat(searchMailbox)
.isEqualTo(inbox));
softly.assertThat(testee.findMailboxWithPathLike(allMailboxesSearchQuery)
.collectList().block())
.hasOnlyOneElementSatisfying(searchMailbox -> softly(softly)
.assertThat(searchMailbox)
.isEqualTo(inbox));
}));
}
@Test
void deleteAfterAFailedDeleteShouldDeleteTheMailbox(CassandraCluster cassandra) {
Mailbox inbox = testee.create(inboxPath, UID_VALIDITY).block();
CassandraId inboxId = (CassandraId) inbox.getMailboxId();
cassandra.getConf()
.registerScenario(fail()
.times(TRY_COUNT_BEFORE_FAILURE)
.whenQueryStartsWith("DELETE FROM mailbox WHERE id=:id"));
doQuietly(() -> testee.delete(inbox).block());
doQuietly(() -> testee.delete(inbox).block());
SoftAssertions.assertSoftly(Throwing.consumer(softly -> {
softly.assertThatThrownBy(() -> testee.findMailboxById(inboxId).block())
.hasCauseInstanceOf(MailboxNotFoundException.class);
softly.assertThat(testee.findMailboxByPath(inboxPath).blockOptional())
.isEmpty();
softly.assertThat(testee.findMailboxWithPathLike(inboxSearchQuery)
.collectList().block())
.isEmpty();
softly.assertThat(testee.findMailboxWithPathLike(allMailboxesSearchQuery)
.collectList().block())
.isEmpty();
}));
}
@Disabled("JAMES-3056 mailbox name is not updated to INBOX_RENAMED).isEqualTo(" +
"findMailboxWithPathLike() returns a list with two same mailboxes")
@Test
void renameAfterRenameFailOnRetrieveMailboxShouldRenameTheMailbox(CassandraCluster cassandra) {
Mailbox inbox = testee.create(inboxPath, UID_VALIDITY).block();
CassandraId inboxId = (CassandraId) inbox.getMailboxId();
Mailbox inboxRenamed = createInboxRenamedMailbox(inboxId);
cassandra.getConf()
.registerScenario(fail()
.times(TRY_COUNT_BEFORE_FAILURE)
.whenQueryStartsWith("SELECT id,mailboxbase,uidvalidity,name FROM mailbox WHERE id=:id"));
doQuietly(() -> testee.rename(inboxRenamed).block());
doQuietly(() -> testee.rename(inboxRenamed).block());
SoftAssertions.assertSoftly(Throwing.consumer(softly -> {
softly(softly)
.assertThat(testee.findMailboxById(inboxId).block())
.isEqualTo(inboxRenamed);
softly(softly)
.assertThat(testee.findMailboxByPath(inboxPathRenamed).block())
.isEqualTo(inboxRenamed);
softly.assertThat(testee.findMailboxWithPathLike(inboxSearchQuery)
.collectList().block())
.isEmpty();
softly.assertThat(testee.findMailboxWithPathLike(inboxRenamedSearchQuery)
.collectList().block())
.hasOnlyOneElementSatisfying(searchMailbox -> softly(softly)
.assertThat(searchMailbox)
.isEqualTo(inboxRenamed));
softly.assertThat(testee.findMailboxWithPathLike(allMailboxesSearchQuery)
.collectList().block())
.hasOnlyOneElementSatisfying(searchMailbox -> softly(softly)
.assertThat(searchMailbox)
.isEqualTo(inboxRenamed));
}));
}
@Disabled("JAMES-3056 mailbox name is not updated to INBOX_RENAMED")
@Test
void renameAfterRenameFailOnDeletePathShouldRenameTheMailbox(CassandraCluster cassandra) {
Mailbox inbox = testee.create(inboxPath, UID_VALIDITY).block();
CassandraId inboxId = (CassandraId) inbox.getMailboxId();
Mailbox inboxRenamed = createInboxRenamedMailbox(inboxId);
cassandra.getConf()
.registerScenario(fail()
.times(TRY_COUNT_BEFORE_FAILURE)
.whenQueryStartsWith("DELETE FROM mailboxpathv3 WHERE namespace=:namespace AND user=:user AND mailboxname=:mailboxname IF EXISTS"));
doQuietly(() -> testee.rename(inboxRenamed).block());
doQuietly(() -> testee.rename(inboxRenamed).block());
SoftAssertions.assertSoftly(Throwing.consumer(softly -> {
softly(softly)
.assertThat(testee.findMailboxById(inboxId).block())
.isEqualTo(inboxRenamed);
softly(softly)
.assertThat(testee.findMailboxByPath(inboxPathRenamed).block())
.isEqualTo(inboxRenamed);
softly.assertThat(testee.findMailboxWithPathLike(inboxSearchQuery)
.collectList().block())
.isEmpty();
softly.assertThat(testee.findMailboxWithPathLike(inboxRenamedSearchQuery)
.collectList().block())
.hasOnlyOneElementSatisfying(searchMailbox ->
softly(softly)
.assertThat(searchMailbox)
.isEqualTo(inboxRenamed));
softly.assertThat(testee.findMailboxWithPathLike(allMailboxesSearchQuery)
.collectList().block())
.hasOnlyOneElementSatisfying(searchMailbox -> softly(softly)
.assertThat(searchMailbox)
.isEqualTo(inboxRenamed));
}));
}
private void doQuietly(ThrowingRunnable runnable) {
try {
runnable.run();
} catch (Throwable th) {
// ignore
}
}
private Mailbox createInboxRenamedMailbox(MailboxId mailboxId) {
return new Mailbox(inboxPathRenamed, UID_VALIDITY, mailboxId);
}
}
@Disabled("JAMES-2514 Cassandra 3 supports long mailbox names. Hence we can not rely on this for failing")
@Test
void renameShouldNotRemoveOldMailboxPathWhenCreatingTheNewMailboxPathFails() {
testee.create(MAILBOX_PATH, UID_VALIDITY).block();
Mailbox mailbox = testee.findMailboxByPath(MAILBOX_PATH).block();
Mailbox newMailbox = new Mailbox(tooLongMailboxPath(mailbox.generateAssociatedPath()), UID_VALIDITY, mailbox.getMailboxId());
assertThatThrownBy(() -> testee.rename(newMailbox).block())
.isInstanceOf(TooLongMailboxNameException.class);
assertThat(mailboxPathV3DAO.retrieve(MAILBOX_PATH).blockOptional())
.isPresent();
}
private MailboxPath tooLongMailboxPath(MailboxPath fromMailboxPath) {
return new MailboxPath(fromMailboxPath, StringUtils.repeat("b", 65537));
}
}
|
apache/tez | 37,113 | tez-api/src/main/java/org/apache/tez/dag/api/DagTypeConverters.java | /**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.tez.dag.api;
import java.io.DataOutputStream;
import java.io.IOException;
import java.net.URISyntaxException;
import java.nio.ByteBuffer;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Set;
import java.util.TreeSet;
import java.util.zip.Inflater;
import javax.annotation.Nullable;
import org.apache.hadoop.classification.InterfaceAudience.Private;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.DataInputByteBuffer;
import org.apache.hadoop.security.Credentials;
import org.apache.hadoop.yarn.api.records.LocalResource;
import org.apache.hadoop.yarn.api.records.LocalResourceType;
import org.apache.hadoop.yarn.api.records.LocalResourceVisibility;
import org.apache.hadoop.yarn.api.records.Resource;
import org.apache.hadoop.yarn.api.records.URL;
import org.apache.hadoop.yarn.api.records.impl.pb.LocalResourcePBImpl;
import org.apache.hadoop.yarn.util.ConverterUtils;
import org.apache.tez.client.CallerContext;
import org.apache.tez.client.TezAppMasterStatus;
import org.apache.tez.common.TezCommonUtils;
import org.apache.tez.common.counters.CounterGroup;
import org.apache.tez.common.counters.TezCounter;
import org.apache.tez.common.counters.TezCounters;
import org.apache.tez.common.security.DAGAccessControls;
import org.apache.tez.dag.api.EdgeProperty.DataMovementType;
import org.apache.tez.dag.api.EdgeProperty.DataSourceType;
import org.apache.tez.dag.api.EdgeProperty.SchedulingType;
import org.apache.tez.dag.api.Vertex.VertexExecutionContext;
import org.apache.tez.dag.api.client.StatusGetOpts;
import org.apache.tez.dag.api.client.rpc.DAGClientAMProtocolRPC.TezAppMasterStatusProto;
import org.apache.tez.dag.api.records.DAGProtos;
import org.apache.tez.dag.api.records.DAGProtos.ACLInfo;
import org.apache.tez.dag.api.records.DAGProtos.AMPluginDescriptorProto;
import org.apache.tez.dag.api.records.DAGProtos.CallerContextProto;
import org.apache.tez.dag.api.records.DAGProtos.ConfigurationProto;
import org.apache.tez.dag.api.records.DAGProtos.EdgePlan;
import org.apache.tez.dag.api.records.DAGProtos.PlanEdgeDataMovementType;
import org.apache.tez.dag.api.records.DAGProtos.PlanEdgeDataSourceType;
import org.apache.tez.dag.api.records.DAGProtos.PlanEdgeProperty;
import org.apache.tez.dag.api.records.DAGProtos.PlanEdgeSchedulingType;
import org.apache.tez.dag.api.records.DAGProtos.PlanKeyValuePair;
import org.apache.tez.dag.api.records.DAGProtos.PlanLocalResource;
import org.apache.tez.dag.api.records.DAGProtos.PlanLocalResourceType;
import org.apache.tez.dag.api.records.DAGProtos.PlanLocalResourceVisibility;
import org.apache.tez.dag.api.records.DAGProtos.PlanLocalResourcesProto;
import org.apache.tez.dag.api.records.DAGProtos.PlanTaskConfiguration;
import org.apache.tez.dag.api.records.DAGProtos.PlanTaskLocationHint;
import org.apache.tez.dag.api.records.DAGProtos.RootInputLeafOutputProto;
import org.apache.tez.dag.api.records.DAGProtos.TezCounterGroupProto;
import org.apache.tez.dag.api.records.DAGProtos.TezCounterProto;
import org.apache.tez.dag.api.records.DAGProtos.TezCountersProto;
import org.apache.tez.dag.api.records.DAGProtos.TezEntityDescriptorProto;
import org.apache.tez.dag.api.records.DAGProtos.TezNamedEntityDescriptorProto;
import org.apache.tez.dag.api.records.DAGProtos.VertexExecutionContextProto;
import org.apache.tez.dag.api.records.DAGProtos.VertexLocationHintProto;
import org.apache.tez.serviceplugins.api.ServicePluginsDescriptor;
import com.google.common.collect.Lists;
import com.google.protobuf.ByteString;
import com.google.protobuf.ByteString.Output;
@Private
public final class DagTypeConverters {
private DagTypeConverters() {}
public static PlanLocalResourceVisibility convertToDAGPlan(LocalResourceVisibility visibility){
switch(visibility){
case PUBLIC : return PlanLocalResourceVisibility.PUBLIC;
case PRIVATE : return PlanLocalResourceVisibility.PRIVATE;
case APPLICATION : return PlanLocalResourceVisibility.APPLICATION;
default : throw new RuntimeException("unknown 'visibility': " + visibility);
}
}
public static List<PlanLocalResource> convertToDAGPlan(Map<String, LocalResource> lrs) {
List<PlanLocalResource> planLrs = Lists.newArrayListWithCapacity(lrs.size());
for (Entry<String, LocalResource> entry : lrs.entrySet()) {
PlanLocalResource.Builder localResourcesBuilder = PlanLocalResource.newBuilder();
String key = entry.getKey();
LocalResource lr = entry.getValue();
localResourcesBuilder.setName(key);
localResourcesBuilder.setUri(
DagTypeConverters.convertToDAGPlan(lr.getResource()));
localResourcesBuilder.setSize(lr.getSize());
localResourcesBuilder.setTimeStamp(lr.getTimestamp());
localResourcesBuilder.setType(
DagTypeConverters.convertToDAGPlan(lr.getType()));
localResourcesBuilder.setVisibility(
DagTypeConverters.convertToDAGPlan(lr.getVisibility()));
if (lr.getType() == LocalResourceType.PATTERN) {
if (lr.getPattern() == null || lr.getPattern().isEmpty()) {
throw new TezUncheckedException("LocalResource type set to pattern"
+ " but pattern is null or empty");
}
localResourcesBuilder.setPattern(lr.getPattern());
}
planLrs.add(localResourcesBuilder.build());
}
return planLrs;
}
public static LocalResourceVisibility convertFromDAGPlan(PlanLocalResourceVisibility visibility){
switch(visibility){
case PUBLIC : return LocalResourceVisibility.PUBLIC;
case PRIVATE : return LocalResourceVisibility.PRIVATE;
case APPLICATION : return LocalResourceVisibility.APPLICATION;
default : throw new RuntimeException("unknown 'visibility': " + visibility);
}
}
public static PlanEdgeDataSourceType convertToDAGPlan(DataSourceType sourceType){
switch(sourceType){
case PERSISTED : return PlanEdgeDataSourceType.PERSISTED;
case PERSISTED_RELIABLE : return PlanEdgeDataSourceType.PERSISTED_RELIABLE;
case EPHEMERAL : return PlanEdgeDataSourceType.EPHEMERAL;
default : throw new RuntimeException("unknown 'dataSourceType': " + sourceType);
}
}
public static DataSourceType convertFromDAGPlan(PlanEdgeDataSourceType sourceType){
switch(sourceType){
case PERSISTED : return DataSourceType.PERSISTED;
case PERSISTED_RELIABLE : return DataSourceType.PERSISTED_RELIABLE;
case EPHEMERAL : return DataSourceType.EPHEMERAL;
default : throw new RuntimeException("unknown 'dataSourceType': " + sourceType);
}
}
public static PlanEdgeDataMovementType convertToDAGPlan(DataMovementType type){
switch(type){
case ONE_TO_ONE : return PlanEdgeDataMovementType.ONE_TO_ONE;
case BROADCAST : return PlanEdgeDataMovementType.BROADCAST;
case SCATTER_GATHER : return PlanEdgeDataMovementType.SCATTER_GATHER;
case CUSTOM: return PlanEdgeDataMovementType.CUSTOM;
default : throw new RuntimeException("unknown 'dataMovementType': " + type);
}
}
public static DataMovementType convertFromDAGPlan(PlanEdgeDataMovementType type){
switch(type){
case ONE_TO_ONE : return DataMovementType.ONE_TO_ONE;
case BROADCAST : return DataMovementType.BROADCAST;
case SCATTER_GATHER : return DataMovementType.SCATTER_GATHER;
case CUSTOM : return DataMovementType.CUSTOM;
default : throw new IllegalArgumentException("unknown 'dataMovementType': " + type);
}
}
public static PlanEdgeSchedulingType convertToDAGPlan(SchedulingType type){
switch(type){
case SEQUENTIAL : return PlanEdgeSchedulingType.SEQUENTIAL;
case CONCURRENT : return PlanEdgeSchedulingType.CONCURRENT;
default : throw new RuntimeException("unknown 'SchedulingType': " + type);
}
}
public static SchedulingType convertFromDAGPlan(PlanEdgeSchedulingType type){
switch(type){
case SEQUENTIAL : return SchedulingType.SEQUENTIAL;
case CONCURRENT : return SchedulingType.CONCURRENT;
default : throw new IllegalArgumentException("unknown 'SchedulingType': " + type);
}
}
public static PlanLocalResourceType convertToDAGPlan(LocalResourceType type) {
switch(type){
case ARCHIVE : return PlanLocalResourceType.ARCHIVE;
case FILE : return PlanLocalResourceType.FILE;
case PATTERN : return PlanLocalResourceType.PATTERN;
default : throw new IllegalArgumentException("unknown 'type': " + type);
}
}
public static LocalResourceType convertFromDAGPlan(PlanLocalResourceType type) {
switch(type){
case ARCHIVE : return LocalResourceType.ARCHIVE;
case FILE : return LocalResourceType.FILE;
case PATTERN : return LocalResourceType.PATTERN;
default : throw new IllegalArgumentException("unknown 'type': " + type);
}
}
public static VertexLocationHint convertFromDAGPlan(
List<PlanTaskLocationHint> locationHints) {
List<TaskLocationHint> outputList = new ArrayList<TaskLocationHint>();
for(PlanTaskLocationHint inputHint : locationHints){
TaskLocationHint outputHint = TaskLocationHint.createTaskLocationHint(
new HashSet<String>(inputHint.getHostList()),
new HashSet<String>(inputHint.getRackList()));
outputList.add(outputHint);
}
return VertexLocationHint.create(outputList);
}
public static String convertToDAGPlan(URL resource) {
Path p;
try {
p = ConverterUtils.getPathFromYarnURL(resource);
} catch (URISyntaxException e) {
throw new TezUncheckedException("Unable to translate resource: " + resource + " to Path");
}
String urlString = p.toString();
return urlString;
}
public static URL convertToYarnURL(String pathString) {
Path path = new Path(pathString);
return ConverterUtils.getYarnUrlFromPath(path);
}
public static Map<String, LocalResource> createLocalResourceMapFromDAGPlan(
List<PlanLocalResource> localResourcesList) {
Map<String, LocalResource> map = new HashMap<String, LocalResource>();
for(PlanLocalResource res : localResourcesList){
LocalResource r = new LocalResourcePBImpl();
//NOTE: have to check every optional field in protobuf generated classes for existence before accessing
//else we will receive a default value back, eg ""
if(res.hasPattern()){
r.setPattern(res.getPattern());
}
r.setResource(convertToYarnURL(res.getUri()));
r.setSize(res.getSize());
r.setTimestamp(res.getTimeStamp());
r.setType(DagTypeConverters.convertFromDAGPlan(res.getType()));
r.setVisibility(DagTypeConverters.convertFromDAGPlan(res.getVisibility()));
map.put(res.getName(), r);
}
return map;
}
public static Map<String, String> createEnvironmentMapFromDAGPlan(
List<PlanKeyValuePair> environmentSettingList) {
Map<String, String> map = new HashMap<String, String>();
for(PlanKeyValuePair setting : environmentSettingList){
map.put(setting.getKey(), setting.getValue());
}
return map;
}
public static Map<String, EdgePlan> createEdgePlanMapFromDAGPlan(List<EdgePlan> edgeList){
Map<String, EdgePlan> edgePlanMap =
new HashMap<String, EdgePlan>();
for(EdgePlan edgePlanItem : edgeList){
edgePlanMap.put(edgePlanItem.getId(), edgePlanItem);
}
return edgePlanMap;
}
public static PlanEdgeProperty convertToProto(EdgeProperty prop) {
PlanEdgeProperty.Builder edgePropBuilder = PlanEdgeProperty.newBuilder();
edgePropBuilder.setDataMovementType(convertToDAGPlan(prop.getDataMovementType()));
edgePropBuilder.setDataSourceType(convertToDAGPlan(prop.getDataSourceType()));
edgePropBuilder.setSchedulingType(convertToDAGPlan(prop.getSchedulingType()));
edgePropBuilder.setEdgeSource(DagTypeConverters.convertToDAGPlan(prop.getEdgeSource()));
edgePropBuilder
.setEdgeDestination(DagTypeConverters.convertToDAGPlan(prop.getEdgeDestination()));
if (prop.getEdgeManagerDescriptor() != null) {
edgePropBuilder.setEdgeManager(DagTypeConverters.convertToDAGPlan(prop
.getEdgeManagerDescriptor()));
}
return edgePropBuilder.build();
}
public static EdgeProperty convertFromProto(PlanEdgeProperty edge) {
return EdgeProperty.create(
(edge.hasEdgeManager() ?
convertEdgeManagerPluginDescriptorFromDAGPlan(edge.getEdgeManager()) : null),
convertFromDAGPlan(edge.getDataMovementType()),
convertFromDAGPlan(edge.getDataSourceType()),
convertFromDAGPlan(edge.getSchedulingType()),
convertOutputDescriptorFromDAGPlan(edge.getEdgeSource()),
convertInputDescriptorFromDAGPlan(edge.getEdgeDestination())
);
}
public static EdgeProperty createEdgePropertyMapFromDAGPlan(EdgePlan edge) {
if (edge.getDataMovementType() == PlanEdgeDataMovementType.CUSTOM) {
return EdgeProperty.create(
(edge.hasEdgeManager() ?
convertEdgeManagerPluginDescriptorFromDAGPlan(edge.getEdgeManager()) : null),
convertFromDAGPlan(edge.getDataSourceType()),
convertFromDAGPlan(edge.getSchedulingType()),
convertOutputDescriptorFromDAGPlan(edge.getEdgeSource()),
convertInputDescriptorFromDAGPlan(edge.getEdgeDestination())
);
} else {
return EdgeProperty.create(
convertFromDAGPlan(edge.getDataMovementType()),
convertFromDAGPlan(edge.getDataSourceType()),
convertFromDAGPlan(edge.getSchedulingType()),
convertOutputDescriptorFromDAGPlan(edge.getEdgeSource()),
convertInputDescriptorFromDAGPlan(edge.getEdgeDestination())
);
}
}
public static Resource createResourceRequestFromTaskConfig(
PlanTaskConfiguration taskConfig) {
return Resource.newInstance(taskConfig.getMemoryMb(), taskConfig.getVirtualCores());
}
public static Map<String, String> convertConfFromProto(
ConfigurationProto confProto) {
List<PlanKeyValuePair> settingList = confProto.getConfKeyValuesList();
Map<String, String> map = new HashMap<String, String>();
for(PlanKeyValuePair setting: settingList){
map.put(setting.getKey(), setting.getValue());
}
return map;
}
public static TezEntityDescriptorProto convertToDAGPlan(
EntityDescriptor<?> descriptor) {
TezEntityDescriptorProto.Builder builder = TezEntityDescriptorProto
.newBuilder();
builder.setClassName(descriptor.getClassName());
UserPayload userPayload = descriptor.getUserPayload();
if (userPayload != null) {
DAGProtos.TezUserPayloadProto.Builder payloadBuilder = DAGProtos.TezUserPayloadProto.newBuilder();
if (userPayload.hasPayload()) {
payloadBuilder.setUserPayload(ByteString.copyFrom(userPayload.getPayload()));
payloadBuilder.setVersion(userPayload.getVersion());
}
builder.setTezUserPayload(payloadBuilder.build());
}
if (descriptor.getHistoryText() != null) {
try {
builder.setHistoryText(TezCommonUtils.compressByteArrayToByteString(
descriptor.getHistoryText().getBytes("UTF-8")));
} catch (IOException e) {
throw new TezUncheckedException(e);
}
}
return builder.build();
}
public static String getHistoryTextFromProto(TezEntityDescriptorProto proto, Inflater inflater) {
if (!proto.hasHistoryText()) {
return null;
}
try {
return new String(TezCommonUtils.decompressByteStringToByteArray(proto.getHistoryText(), inflater),
"UTF-8");
} catch (IOException e) {
throw new TezUncheckedException(e);
}
}
public static RootInputLeafOutputProto convertToDAGPlan(
RootInputLeafOutput<? extends EntityDescriptor<?>, ? extends EntityDescriptor<?>> rootIO) {
RootInputLeafOutputProto.Builder builder = RootInputLeafOutputProto.newBuilder();
builder.setName(rootIO.getName());
builder.setIODescriptor(convertToDAGPlan(rootIO.getIODescriptor()));
if (rootIO.getControllerDescriptor() != null) {
builder.setControllerDescriptor(convertToDAGPlan(rootIO.getControllerDescriptor()));
}
return builder.build();
}
private static UserPayload convertTezUserPayloadFromDAGPlan(
TezEntityDescriptorProto proto) {
UserPayload userPayload = null;
if (proto.hasTezUserPayload()) {
if (proto.getTezUserPayload().hasUserPayload()) {
userPayload =
UserPayload.create(proto.getTezUserPayload().getUserPayload().asReadOnlyByteBuffer(), proto.getTezUserPayload().getVersion());
} else {
userPayload = UserPayload.create(null);
}
}
return userPayload;
}
private static void setUserPayload(EntityDescriptor<?> entity, UserPayload payload) {
if (payload != null) {
entity.setUserPayload(payload);
}
}
public static InputDescriptor convertInputDescriptorFromDAGPlan(
TezEntityDescriptorProto proto) {
String className = proto.getClassName();
UserPayload payload = convertTezUserPayloadFromDAGPlan(proto);
InputDescriptor id = InputDescriptor.create(className);
setUserPayload(id, payload);
return id;
}
public static OutputDescriptor convertOutputDescriptorFromDAGPlan(
TezEntityDescriptorProto proto) {
String className = proto.getClassName();
UserPayload payload = convertTezUserPayloadFromDAGPlan(proto);
OutputDescriptor od = OutputDescriptor.create(className);
setUserPayload(od, payload);
return od;
}
public static NamedEntityDescriptor convertNamedDescriptorFromProto(TezNamedEntityDescriptorProto proto) {
String name = proto.getName();
String className = proto.getEntityDescriptor().getClassName();
UserPayload payload = convertTezUserPayloadFromDAGPlan(proto.getEntityDescriptor());
NamedEntityDescriptor descriptor = new NamedEntityDescriptor(name, className);
setUserPayload(descriptor, payload);
return descriptor;
}
public static InputInitializerDescriptor convertInputInitializerDescriptorFromDAGPlan(
TezEntityDescriptorProto proto) {
String className = proto.getClassName();
UserPayload payload = convertTezUserPayloadFromDAGPlan(proto);
InputInitializerDescriptor iid = InputInitializerDescriptor.create(className);
setUserPayload(iid, payload);
return iid;
}
public static OutputCommitterDescriptor convertOutputCommitterDescriptorFromDAGPlan(
TezEntityDescriptorProto proto) {
String className = proto.getClassName();
UserPayload payload = convertTezUserPayloadFromDAGPlan(proto);
OutputCommitterDescriptor ocd = OutputCommitterDescriptor.create(className);
setUserPayload(ocd, payload);
return ocd;
}
public static VertexManagerPluginDescriptor convertVertexManagerPluginDescriptorFromDAGPlan(
TezEntityDescriptorProto proto) {
String className = proto.getClassName();
UserPayload payload = convertTezUserPayloadFromDAGPlan(proto);
VertexManagerPluginDescriptor vmpd = VertexManagerPluginDescriptor.create(className);
setUserPayload(vmpd, payload);
return vmpd;
}
public static EdgeManagerPluginDescriptor convertEdgeManagerPluginDescriptorFromDAGPlan(
TezEntityDescriptorProto proto) {
String className = proto.getClassName();
UserPayload payload = convertTezUserPayloadFromDAGPlan(proto);
EdgeManagerPluginDescriptor empd = EdgeManagerPluginDescriptor.create(className);
setUserPayload(empd, payload);
return empd;
}
public static ProcessorDescriptor convertProcessorDescriptorFromDAGPlan(
TezEntityDescriptorProto proto) {
String className = proto.getClassName();
UserPayload payload = convertTezUserPayloadFromDAGPlan(proto);
ProcessorDescriptor pd = ProcessorDescriptor.create(className);
setUserPayload(pd, payload);
return pd;
}
public static TezAppMasterStatus convertTezAppMasterStatusFromProto(
TezAppMasterStatusProto proto) {
switch (proto) {
case INITIALIZING:
return TezAppMasterStatus.INITIALIZING;
case READY:
return TezAppMasterStatus.READY;
case RUNNING:
return TezAppMasterStatus.RUNNING;
case SHUTDOWN:
return TezAppMasterStatus.SHUTDOWN;
}
throw new TezUncheckedException("Could not convert to TezSessionStatus from"
+ " proto");
}
public static TezAppMasterStatusProto convertTezAppMasterStatusToProto(
TezAppMasterStatus status) {
switch (status) {
case INITIALIZING:
return TezAppMasterStatusProto.INITIALIZING;
case READY:
return TezAppMasterStatusProto.READY;
case RUNNING:
return TezAppMasterStatusProto.RUNNING;
case SHUTDOWN:
return TezAppMasterStatusProto.SHUTDOWN;
}
throw new TezUncheckedException("Could not convert TezSessionStatus to"
+ " proto");
}
public static PlanLocalResourcesProto convertFromLocalResources(
Map<String, LocalResource> localResources) {
PlanLocalResourcesProto.Builder builder =
PlanLocalResourcesProto.newBuilder();
for (Map.Entry<String, LocalResource> entry : localResources.entrySet()) {
PlanLocalResource plr = convertLocalResourceToPlanLocalResource(
entry.getKey(), entry.getValue());
builder.addLocalResources(plr);
}
return builder.build();
}
public static Map<String, LocalResource> convertFromPlanLocalResources(
PlanLocalResourcesProto proto) {
Map<String, LocalResource> localResources =
new HashMap<String, LocalResource>(proto.getLocalResourcesCount());
for (PlanLocalResource plr : proto.getLocalResourcesList()) {
String name = plr.getName();
LocalResource lr = convertPlanLocalResourceToLocalResource(plr);
localResources.put(name, lr);
}
return localResources;
}
public static PlanLocalResource convertLocalResourceToPlanLocalResource(
String name, LocalResource lr) {
PlanLocalResource.Builder localResourcesBuilder = PlanLocalResource.newBuilder();
localResourcesBuilder.setName(name);
localResourcesBuilder.setUri(
DagTypeConverters.convertToDAGPlan(lr.getResource()));
localResourcesBuilder.setSize(lr.getSize());
localResourcesBuilder.setTimeStamp(lr.getTimestamp());
localResourcesBuilder.setType(
DagTypeConverters.convertToDAGPlan(lr.getType()));
localResourcesBuilder.setVisibility(
DagTypeConverters.convertToDAGPlan(lr.getVisibility()));
if (lr.getType() == LocalResourceType.PATTERN) {
if (lr.getPattern() == null || lr.getPattern().isEmpty()) {
throw new TezUncheckedException("LocalResource type set to pattern"
+ " but pattern is null or empty");
}
localResourcesBuilder.setPattern(lr.getPattern());
}
return localResourcesBuilder.build();
}
public static LocalResource convertPlanLocalResourceToLocalResource(
PlanLocalResource plr) {
return LocalResource.newInstance(
ConverterUtils.getYarnUrlFromPath(new Path(plr.getUri())),
DagTypeConverters.convertFromDAGPlan(plr.getType()),
DagTypeConverters.convertFromDAGPlan(plr.getVisibility()),
plr.getSize(), plr.getTimeStamp(),
plr.hasPattern() ? plr.getPattern() : null);
}
public static TezCounters convertTezCountersFromProto(TezCountersProto proto) {
TezCounters counters = new TezCounters();
for (TezCounterGroupProto counterGroupProto : proto.getCounterGroupsList()) {
CounterGroup group = counters.addGroup(counterGroupProto.getName(),
counterGroupProto.getDisplayName());
for (TezCounterProto counterProto :
counterGroupProto.getCountersList()) {
TezCounter counter = group.findCounter(
counterProto.getName(),
counterProto.getDisplayName());
counter.setValue(counterProto.getValue());
}
}
return counters;
}
public static TezCountersProto convertTezCountersToProto(
TezCounters counters) {
TezCountersProto.Builder builder = TezCountersProto.newBuilder();
Iterator<CounterGroup> groupIterator = counters.iterator();
int groupIndex = 0;
while (groupIterator.hasNext()) {
CounterGroup counterGroup = groupIterator.next();
TezCounterGroupProto.Builder groupBuilder =
TezCounterGroupProto.newBuilder();
groupBuilder.setName(counterGroup.getName());
groupBuilder.setDisplayName(counterGroup.getDisplayName());
Iterator<TezCounter> counterIterator = counterGroup.iterator();
int counterIndex = 0;
while (counterIterator.hasNext()) {
TezCounter counter = counterIterator.next();
TezCounterProto tezCounterProto = TezCounterProto.newBuilder()
.setName(counter.getName())
.setDisplayName(counter.getDisplayName())
.setValue(counter.getValue())
.build();
groupBuilder.addCounters(counterIndex, tezCounterProto);
++counterIndex;
}
builder.addCounterGroups(groupIndex, groupBuilder.build());
++groupIndex;
}
return builder.build();
}
public static DAGProtos.StatusGetOptsProto convertStatusGetOptsToProto(
StatusGetOpts statusGetOpts) {
switch (statusGetOpts) {
case GET_COUNTERS:
return DAGProtos.StatusGetOptsProto.GET_COUNTERS;
case GET_MEMORY_USAGE:
return DAGProtos.StatusGetOptsProto.GET_MEMORY_USAGE;
}
throw new TezUncheckedException("Could not convert StatusGetOpts to" + " proto");
}
public static StatusGetOpts convertStatusGetOptsFromProto(DAGProtos.StatusGetOptsProto proto) {
switch (proto) {
case GET_COUNTERS:
return StatusGetOpts.GET_COUNTERS;
case GET_MEMORY_USAGE:
return StatusGetOpts.GET_MEMORY_USAGE;
}
throw new TezUncheckedException("Could not convert to StatusGetOpts from" + " proto");
}
public static List<DAGProtos.StatusGetOptsProto> convertStatusGetOptsToProto(
Set<StatusGetOpts> statusGetOpts) {
List<DAGProtos.StatusGetOptsProto> protos =
new ArrayList<DAGProtos.StatusGetOptsProto>(statusGetOpts.size());
for (StatusGetOpts opt : statusGetOpts) {
protos.add(convertStatusGetOptsToProto(opt));
}
return protos;
}
public static Set<StatusGetOpts> convertStatusGetOptsFromProto(
List<DAGProtos.StatusGetOptsProto> protoList) {
Set<StatusGetOpts> opts = new TreeSet<StatusGetOpts>();
for (DAGProtos.StatusGetOptsProto proto : protoList) {
opts.add(convertStatusGetOptsFromProto(proto));
}
return opts;
}
public static ByteString convertCredentialsToProto(Credentials credentials) {
if (credentials == null) {
return null;
}
Output output = ByteString.newOutput();
DataOutputStream dos = new DataOutputStream(output);
try {
credentials.writeTokenStorageToStream(dos);
return output.toByteString();
} catch (IOException e) {
throw new TezUncheckedException("Failed to serialize Credentials", e);
}
}
public static Credentials convertByteStringToCredentials(ByteString byteString) {
if (byteString == null) {
return null;
}
DataInputByteBuffer dib = new DataInputByteBuffer();
dib.reset(byteString.asReadOnlyByteBuffer());
Credentials credentials = new Credentials();
try {
credentials.readTokenStorageStream(dib);
return credentials;
} catch (IOException e) {
throw new TezUncheckedException("Failed to deserialize Credentials", e);
}
}
public static VertexLocationHint convertVertexLocationHintFromProto(
VertexLocationHintProto proto) {
List<TaskLocationHint> outputList = new ArrayList<TaskLocationHint>(
proto.getTaskLocationHintsCount());
for(PlanTaskLocationHint inputHint : proto.getTaskLocationHintsList()){
TaskLocationHint outputHint = TaskLocationHint.createTaskLocationHint(
new HashSet<String>(inputHint.getHostList()),
new HashSet<String>(inputHint.getRackList()));
outputList.add(outputHint);
}
return VertexLocationHint.create(outputList);
}
public static VertexLocationHintProto convertVertexLocationHintToProto(
VertexLocationHint vertexLocationHint) {
VertexLocationHintProto.Builder builder =
VertexLocationHintProto.newBuilder();
if (vertexLocationHint.getTaskLocationHints() != null) {
for (TaskLocationHint taskLocationHint :
vertexLocationHint.getTaskLocationHints()) {
PlanTaskLocationHint.Builder taskLHBuilder =
PlanTaskLocationHint.newBuilder();
if (taskLocationHint.getHosts() != null) {
taskLHBuilder.addAllHost(taskLocationHint.getHosts());
}
if (taskLocationHint.getRacks() != null) {
taskLHBuilder.addAllRack(taskLocationHint.getRacks());
}
builder.addTaskLocationHints(taskLHBuilder.build());
}
}
return builder.build();
}
public static UserPayload convertToTezUserPayload(@Nullable ByteBuffer payload, int version) {
return UserPayload.create(payload, version);
}
@Nullable
public static ByteBuffer convertFromTezUserPayload(@Nullable UserPayload payload) {
if (payload == null) {
return null;
}
return payload.getRawPayload();
}
public static VertexExecutionContextProto convertToProto(
VertexExecutionContext context) {
if (context == null) {
return null;
} else {
VertexExecutionContextProto.Builder builder =
VertexExecutionContextProto.newBuilder();
builder.setExecuteInAm(context.shouldExecuteInAm());
builder.setExecuteInContainers(context.shouldExecuteInContainers());
if (context.getTaskSchedulerName() != null) {
builder.setTaskSchedulerName(context.getTaskSchedulerName());
}
if (context.getContainerLauncherName() != null) {
builder.setContainerLauncherName(context.getContainerLauncherName());
}
if (context.getTaskCommName() != null) {
builder.setTaskCommName(context.getTaskCommName());
}
return builder.build();
}
}
public static VertexExecutionContext convertFromProto(
VertexExecutionContextProto proto) {
if (proto == null) {
return null;
} else {
if (proto.getExecuteInAm()) {
VertexExecutionContext context =
VertexExecutionContext.createExecuteInAm(proto.getExecuteInAm());
return context;
} else if (proto.getExecuteInContainers()) {
VertexExecutionContext context =
VertexExecutionContext.createExecuteInContainers(proto.getExecuteInContainers());
return context;
} else {
String taskScheduler = proto.hasTaskSchedulerName() ? proto.getTaskSchedulerName() : null;
String containerLauncher =
proto.hasContainerLauncherName() ? proto.getContainerLauncherName() : null;
String taskComm = proto.hasTaskCommName() ? proto.getTaskCommName() : null;
VertexExecutionContext context =
VertexExecutionContext.create(taskScheduler, containerLauncher, taskComm);
return context;
}
}
}
public static List<TezNamedEntityDescriptorProto> convertNamedEntityCollectionToProto(
NamedEntityDescriptor[] namedEntityDescriptors) {
List<TezNamedEntityDescriptorProto> list =
Lists.newArrayListWithCapacity(namedEntityDescriptors.length);
for (NamedEntityDescriptor namedEntity : namedEntityDescriptors) {
TezNamedEntityDescriptorProto namedEntityProto = convertNamedEntityToProto(namedEntity);
list.add(namedEntityProto);
}
return list;
}
public static TezNamedEntityDescriptorProto convertNamedEntityToProto(
NamedEntityDescriptor namedEntityDescriptor) {
TezNamedEntityDescriptorProto.Builder builder = TezNamedEntityDescriptorProto.newBuilder();
builder.setName(namedEntityDescriptor.getEntityName());
DAGProtos.TezEntityDescriptorProto entityProto =
DagTypeConverters.convertToDAGPlan(namedEntityDescriptor);
builder.setEntityDescriptor(entityProto);
return builder.build();
}
public static AMPluginDescriptorProto convertServicePluginDescriptorToProto(
ServicePluginsDescriptor servicePluginsDescriptor) {
AMPluginDescriptorProto.Builder pluginDescriptorBuilder =
AMPluginDescriptorProto.newBuilder();
if (servicePluginsDescriptor != null) {
pluginDescriptorBuilder.setContainersEnabled(servicePluginsDescriptor.areContainersEnabled());
pluginDescriptorBuilder.setUberEnabled(servicePluginsDescriptor.isUberEnabled());
if (servicePluginsDescriptor.getTaskSchedulerDescriptors() != null &&
servicePluginsDescriptor.getTaskSchedulerDescriptors().length > 0) {
List<TezNamedEntityDescriptorProto> namedEntityProtos = DagTypeConverters.convertNamedEntityCollectionToProto(
servicePluginsDescriptor.getTaskSchedulerDescriptors());
pluginDescriptorBuilder.addAllTaskSchedulers(namedEntityProtos);
}
if (servicePluginsDescriptor.getContainerLauncherDescriptors() != null &&
servicePluginsDescriptor.getContainerLauncherDescriptors().length > 0) {
List<TezNamedEntityDescriptorProto> namedEntityProtos = DagTypeConverters.convertNamedEntityCollectionToProto(
servicePluginsDescriptor.getContainerLauncherDescriptors());
pluginDescriptorBuilder.addAllContainerLaunchers(namedEntityProtos);
}
if (servicePluginsDescriptor.getTaskCommunicatorDescriptors() != null &&
servicePluginsDescriptor.getTaskCommunicatorDescriptors().length > 0) {
List<TezNamedEntityDescriptorProto> namedEntityProtos = DagTypeConverters.convertNamedEntityCollectionToProto(
servicePluginsDescriptor.getTaskCommunicatorDescriptors());
pluginDescriptorBuilder.addAllTaskCommunicators(namedEntityProtos);
}
} else {
pluginDescriptorBuilder.setContainersEnabled(true).setUberEnabled(false);
}
return pluginDescriptorBuilder.build();
}
public static CallerContextProto convertCallerContextToProto(CallerContext callerContext) {
CallerContextProto.Builder callerContextBuilder = CallerContextProto.newBuilder();
callerContextBuilder.setContext(callerContext.getContext());
if (callerContext.getCallerId() != null) {
callerContextBuilder.setCallerId(callerContext.getCallerId());
}
if (callerContext.getCallerType() != null) {
callerContextBuilder.setCallerType(callerContext.getCallerType());
}
if (callerContext.getBlob() != null) {
callerContextBuilder.setBlob(callerContext.getBlob());
}
return callerContextBuilder.build();
}
public static CallerContext convertCallerContextFromProto(CallerContextProto proto) {
CallerContext callerContext = CallerContext.create(proto.getContext(),
(proto.hasBlob() ? proto.getBlob() : null));
if (proto.hasCallerType() && proto.hasCallerId()) {
callerContext.setCallerIdAndType(proto.getCallerId(), proto.getCallerType());
}
return callerContext;
}
public static ACLInfo convertDAGAccessControlsToProto(DAGAccessControls dagAccessControls) {
if (dagAccessControls == null) {
return null;
}
ACLInfo.Builder builder = ACLInfo.newBuilder();
builder.addAllUsersWithViewAccess(dagAccessControls.getUsersWithViewACLs());
builder.addAllUsersWithModifyAccess(dagAccessControls.getUsersWithModifyACLs());
builder.addAllGroupsWithViewAccess(dagAccessControls.getGroupsWithViewACLs());
builder.addAllGroupsWithModifyAccess(dagAccessControls.getGroupsWithModifyACLs());
return builder.build();
}
public static DAGAccessControls convertDAGAccessControlsFromProto(ACLInfo aclInfo) {
if (aclInfo == null) {
return null;
}
DAGAccessControls dagAccessControls = new DAGAccessControls();
dagAccessControls.setUsersWithViewACLs(aclInfo.getUsersWithViewAccessList());
dagAccessControls.setUsersWithModifyACLs(aclInfo.getUsersWithModifyAccessList());
dagAccessControls.setGroupsWithViewACLs(aclInfo.getGroupsWithViewAccessList());
dagAccessControls.setGroupsWithModifyACLs(aclInfo.getGroupsWithModifyAccessList());
return dagAccessControls;
}
}
|
googleapis/google-cloud-java | 36,799 | java-analytics-admin/proto-google-analytics-admin-v1beta/src/main/java/com/google/analytics/admin/v1beta/FirebaseLink.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/analytics/admin/v1beta/resources.proto
// Protobuf Java Version: 3.25.8
package com.google.analytics.admin.v1beta;
/**
*
*
* <pre>
* A link between a Google Analytics property and a Firebase project.
* </pre>
*
* Protobuf type {@code google.analytics.admin.v1beta.FirebaseLink}
*/
public final class FirebaseLink extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.analytics.admin.v1beta.FirebaseLink)
FirebaseLinkOrBuilder {
private static final long serialVersionUID = 0L;
// Use FirebaseLink.newBuilder() to construct.
private FirebaseLink(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private FirebaseLink() {
name_ = "";
project_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new FirebaseLink();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.analytics.admin.v1beta.ResourcesProto
.internal_static_google_analytics_admin_v1beta_FirebaseLink_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.analytics.admin.v1beta.ResourcesProto
.internal_static_google_analytics_admin_v1beta_FirebaseLink_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.analytics.admin.v1beta.FirebaseLink.class,
com.google.analytics.admin.v1beta.FirebaseLink.Builder.class);
}
private int bitField0_;
public static final int NAME_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private volatile java.lang.Object name_ = "";
/**
*
*
* <pre>
* Output only. Example format: properties/1234/firebaseLinks/5678
* </pre>
*
* <code>string name = 1 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
*
* @return The name.
*/
@java.lang.Override
public java.lang.String getName() {
java.lang.Object ref = name_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
name_ = s;
return s;
}
}
/**
*
*
* <pre>
* Output only. Example format: properties/1234/firebaseLinks/5678
* </pre>
*
* <code>string name = 1 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
*
* @return The bytes for name.
*/
@java.lang.Override
public com.google.protobuf.ByteString getNameBytes() {
java.lang.Object ref = name_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
name_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int PROJECT_FIELD_NUMBER = 2;
@SuppressWarnings("serial")
private volatile java.lang.Object project_ = "";
/**
*
*
* <pre>
* Immutable. Firebase project resource name. When creating a FirebaseLink,
* you may provide this resource name using either a project number or project
* ID. Once this resource has been created, returned FirebaseLinks will always
* have a project_name that contains a project number.
*
* Format: 'projects/{project number}'
* Example: 'projects/1234'
* </pre>
*
* <code>string project = 2 [(.google.api.field_behavior) = IMMUTABLE];</code>
*
* @return The project.
*/
@java.lang.Override
public java.lang.String getProject() {
java.lang.Object ref = project_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
project_ = s;
return s;
}
}
/**
*
*
* <pre>
* Immutable. Firebase project resource name. When creating a FirebaseLink,
* you may provide this resource name using either a project number or project
* ID. Once this resource has been created, returned FirebaseLinks will always
* have a project_name that contains a project number.
*
* Format: 'projects/{project number}'
* Example: 'projects/1234'
* </pre>
*
* <code>string project = 2 [(.google.api.field_behavior) = IMMUTABLE];</code>
*
* @return The bytes for project.
*/
@java.lang.Override
public com.google.protobuf.ByteString getProjectBytes() {
java.lang.Object ref = project_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
project_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int CREATE_TIME_FIELD_NUMBER = 3;
private com.google.protobuf.Timestamp createTime_;
/**
*
*
* <pre>
* Output only. Time when this FirebaseLink was originally created.
* </pre>
*
* <code>.google.protobuf.Timestamp create_time = 3 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*
* @return Whether the createTime field is set.
*/
@java.lang.Override
public boolean hasCreateTime() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* Output only. Time when this FirebaseLink was originally created.
* </pre>
*
* <code>.google.protobuf.Timestamp create_time = 3 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*
* @return The createTime.
*/
@java.lang.Override
public com.google.protobuf.Timestamp getCreateTime() {
return createTime_ == null ? com.google.protobuf.Timestamp.getDefaultInstance() : createTime_;
}
/**
*
*
* <pre>
* Output only. Time when this FirebaseLink was originally created.
* </pre>
*
* <code>.google.protobuf.Timestamp create_time = 3 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*/
@java.lang.Override
public com.google.protobuf.TimestampOrBuilder getCreateTimeOrBuilder() {
return createTime_ == null ? com.google.protobuf.Timestamp.getDefaultInstance() : createTime_;
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(name_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 1, name_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(project_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 2, project_);
}
if (((bitField0_ & 0x00000001) != 0)) {
output.writeMessage(3, getCreateTime());
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(name_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, name_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(project_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, project_);
}
if (((bitField0_ & 0x00000001) != 0)) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(3, getCreateTime());
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.analytics.admin.v1beta.FirebaseLink)) {
return super.equals(obj);
}
com.google.analytics.admin.v1beta.FirebaseLink other =
(com.google.analytics.admin.v1beta.FirebaseLink) obj;
if (!getName().equals(other.getName())) return false;
if (!getProject().equals(other.getProject())) return false;
if (hasCreateTime() != other.hasCreateTime()) return false;
if (hasCreateTime()) {
if (!getCreateTime().equals(other.getCreateTime())) return false;
}
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + NAME_FIELD_NUMBER;
hash = (53 * hash) + getName().hashCode();
hash = (37 * hash) + PROJECT_FIELD_NUMBER;
hash = (53 * hash) + getProject().hashCode();
if (hasCreateTime()) {
hash = (37 * hash) + CREATE_TIME_FIELD_NUMBER;
hash = (53 * hash) + getCreateTime().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.analytics.admin.v1beta.FirebaseLink parseFrom(java.nio.ByteBuffer data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.analytics.admin.v1beta.FirebaseLink parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.analytics.admin.v1beta.FirebaseLink parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.analytics.admin.v1beta.FirebaseLink parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.analytics.admin.v1beta.FirebaseLink parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.analytics.admin.v1beta.FirebaseLink parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.analytics.admin.v1beta.FirebaseLink parseFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.analytics.admin.v1beta.FirebaseLink parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.analytics.admin.v1beta.FirebaseLink parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.analytics.admin.v1beta.FirebaseLink parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.analytics.admin.v1beta.FirebaseLink parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.analytics.admin.v1beta.FirebaseLink parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.analytics.admin.v1beta.FirebaseLink prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* A link between a Google Analytics property and a Firebase project.
* </pre>
*
* Protobuf type {@code google.analytics.admin.v1beta.FirebaseLink}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.analytics.admin.v1beta.FirebaseLink)
com.google.analytics.admin.v1beta.FirebaseLinkOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.analytics.admin.v1beta.ResourcesProto
.internal_static_google_analytics_admin_v1beta_FirebaseLink_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.analytics.admin.v1beta.ResourcesProto
.internal_static_google_analytics_admin_v1beta_FirebaseLink_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.analytics.admin.v1beta.FirebaseLink.class,
com.google.analytics.admin.v1beta.FirebaseLink.Builder.class);
}
// Construct using com.google.analytics.admin.v1beta.FirebaseLink.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {
getCreateTimeFieldBuilder();
}
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
name_ = "";
project_ = "";
createTime_ = null;
if (createTimeBuilder_ != null) {
createTimeBuilder_.dispose();
createTimeBuilder_ = null;
}
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.analytics.admin.v1beta.ResourcesProto
.internal_static_google_analytics_admin_v1beta_FirebaseLink_descriptor;
}
@java.lang.Override
public com.google.analytics.admin.v1beta.FirebaseLink getDefaultInstanceForType() {
return com.google.analytics.admin.v1beta.FirebaseLink.getDefaultInstance();
}
@java.lang.Override
public com.google.analytics.admin.v1beta.FirebaseLink build() {
com.google.analytics.admin.v1beta.FirebaseLink result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.analytics.admin.v1beta.FirebaseLink buildPartial() {
com.google.analytics.admin.v1beta.FirebaseLink result =
new com.google.analytics.admin.v1beta.FirebaseLink(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(com.google.analytics.admin.v1beta.FirebaseLink result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.name_ = name_;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.project_ = project_;
}
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000004) != 0)) {
result.createTime_ = createTimeBuilder_ == null ? createTime_ : createTimeBuilder_.build();
to_bitField0_ |= 0x00000001;
}
result.bitField0_ |= to_bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.analytics.admin.v1beta.FirebaseLink) {
return mergeFrom((com.google.analytics.admin.v1beta.FirebaseLink) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.analytics.admin.v1beta.FirebaseLink other) {
if (other == com.google.analytics.admin.v1beta.FirebaseLink.getDefaultInstance()) return this;
if (!other.getName().isEmpty()) {
name_ = other.name_;
bitField0_ |= 0x00000001;
onChanged();
}
if (!other.getProject().isEmpty()) {
project_ = other.project_;
bitField0_ |= 0x00000002;
onChanged();
}
if (other.hasCreateTime()) {
mergeCreateTime(other.getCreateTime());
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
name_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000001;
break;
} // case 10
case 18:
{
project_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000002;
break;
} // case 18
case 26:
{
input.readMessage(getCreateTimeFieldBuilder().getBuilder(), extensionRegistry);
bitField0_ |= 0x00000004;
break;
} // case 26
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.lang.Object name_ = "";
/**
*
*
* <pre>
* Output only. Example format: properties/1234/firebaseLinks/5678
* </pre>
*
* <code>string name = 1 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
*
* @return The name.
*/
public java.lang.String getName() {
java.lang.Object ref = name_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
name_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Output only. Example format: properties/1234/firebaseLinks/5678
* </pre>
*
* <code>string name = 1 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
*
* @return The bytes for name.
*/
public com.google.protobuf.ByteString getNameBytes() {
java.lang.Object ref = name_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
name_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Output only. Example format: properties/1234/firebaseLinks/5678
* </pre>
*
* <code>string name = 1 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
*
* @param value The name to set.
* @return This builder for chaining.
*/
public Builder setName(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
name_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Output only. Example format: properties/1234/firebaseLinks/5678
* </pre>
*
* <code>string name = 1 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
*
* @return This builder for chaining.
*/
public Builder clearName() {
name_ = getDefaultInstance().getName();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
return this;
}
/**
*
*
* <pre>
* Output only. Example format: properties/1234/firebaseLinks/5678
* </pre>
*
* <code>string name = 1 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
*
* @param value The bytes for name to set.
* @return This builder for chaining.
*/
public Builder setNameBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
name_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
private java.lang.Object project_ = "";
/**
*
*
* <pre>
* Immutable. Firebase project resource name. When creating a FirebaseLink,
* you may provide this resource name using either a project number or project
* ID. Once this resource has been created, returned FirebaseLinks will always
* have a project_name that contains a project number.
*
* Format: 'projects/{project number}'
* Example: 'projects/1234'
* </pre>
*
* <code>string project = 2 [(.google.api.field_behavior) = IMMUTABLE];</code>
*
* @return The project.
*/
public java.lang.String getProject() {
java.lang.Object ref = project_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
project_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Immutable. Firebase project resource name. When creating a FirebaseLink,
* you may provide this resource name using either a project number or project
* ID. Once this resource has been created, returned FirebaseLinks will always
* have a project_name that contains a project number.
*
* Format: 'projects/{project number}'
* Example: 'projects/1234'
* </pre>
*
* <code>string project = 2 [(.google.api.field_behavior) = IMMUTABLE];</code>
*
* @return The bytes for project.
*/
public com.google.protobuf.ByteString getProjectBytes() {
java.lang.Object ref = project_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
project_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Immutable. Firebase project resource name. When creating a FirebaseLink,
* you may provide this resource name using either a project number or project
* ID. Once this resource has been created, returned FirebaseLinks will always
* have a project_name that contains a project number.
*
* Format: 'projects/{project number}'
* Example: 'projects/1234'
* </pre>
*
* <code>string project = 2 [(.google.api.field_behavior) = IMMUTABLE];</code>
*
* @param value The project to set.
* @return This builder for chaining.
*/
public Builder setProject(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
project_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Immutable. Firebase project resource name. When creating a FirebaseLink,
* you may provide this resource name using either a project number or project
* ID. Once this resource has been created, returned FirebaseLinks will always
* have a project_name that contains a project number.
*
* Format: 'projects/{project number}'
* Example: 'projects/1234'
* </pre>
*
* <code>string project = 2 [(.google.api.field_behavior) = IMMUTABLE];</code>
*
* @return This builder for chaining.
*/
public Builder clearProject() {
project_ = getDefaultInstance().getProject();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
return this;
}
/**
*
*
* <pre>
* Immutable. Firebase project resource name. When creating a FirebaseLink,
* you may provide this resource name using either a project number or project
* ID. Once this resource has been created, returned FirebaseLinks will always
* have a project_name that contains a project number.
*
* Format: 'projects/{project number}'
* Example: 'projects/1234'
* </pre>
*
* <code>string project = 2 [(.google.api.field_behavior) = IMMUTABLE];</code>
*
* @param value The bytes for project to set.
* @return This builder for chaining.
*/
public Builder setProjectBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
project_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
private com.google.protobuf.Timestamp createTime_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.Timestamp,
com.google.protobuf.Timestamp.Builder,
com.google.protobuf.TimestampOrBuilder>
createTimeBuilder_;
/**
*
*
* <pre>
* Output only. Time when this FirebaseLink was originally created.
* </pre>
*
* <code>
* .google.protobuf.Timestamp create_time = 3 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*
* @return Whether the createTime field is set.
*/
public boolean hasCreateTime() {
return ((bitField0_ & 0x00000004) != 0);
}
/**
*
*
* <pre>
* Output only. Time when this FirebaseLink was originally created.
* </pre>
*
* <code>
* .google.protobuf.Timestamp create_time = 3 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*
* @return The createTime.
*/
public com.google.protobuf.Timestamp getCreateTime() {
if (createTimeBuilder_ == null) {
return createTime_ == null
? com.google.protobuf.Timestamp.getDefaultInstance()
: createTime_;
} else {
return createTimeBuilder_.getMessage();
}
}
/**
*
*
* <pre>
* Output only. Time when this FirebaseLink was originally created.
* </pre>
*
* <code>
* .google.protobuf.Timestamp create_time = 3 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*/
public Builder setCreateTime(com.google.protobuf.Timestamp value) {
if (createTimeBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
createTime_ = value;
} else {
createTimeBuilder_.setMessage(value);
}
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
*
*
* <pre>
* Output only. Time when this FirebaseLink was originally created.
* </pre>
*
* <code>
* .google.protobuf.Timestamp create_time = 3 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*/
public Builder setCreateTime(com.google.protobuf.Timestamp.Builder builderForValue) {
if (createTimeBuilder_ == null) {
createTime_ = builderForValue.build();
} else {
createTimeBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
*
*
* <pre>
* Output only. Time when this FirebaseLink was originally created.
* </pre>
*
* <code>
* .google.protobuf.Timestamp create_time = 3 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*/
public Builder mergeCreateTime(com.google.protobuf.Timestamp value) {
if (createTimeBuilder_ == null) {
if (((bitField0_ & 0x00000004) != 0)
&& createTime_ != null
&& createTime_ != com.google.protobuf.Timestamp.getDefaultInstance()) {
getCreateTimeBuilder().mergeFrom(value);
} else {
createTime_ = value;
}
} else {
createTimeBuilder_.mergeFrom(value);
}
if (createTime_ != null) {
bitField0_ |= 0x00000004;
onChanged();
}
return this;
}
/**
*
*
* <pre>
* Output only. Time when this FirebaseLink was originally created.
* </pre>
*
* <code>
* .google.protobuf.Timestamp create_time = 3 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*/
public Builder clearCreateTime() {
bitField0_ = (bitField0_ & ~0x00000004);
createTime_ = null;
if (createTimeBuilder_ != null) {
createTimeBuilder_.dispose();
createTimeBuilder_ = null;
}
onChanged();
return this;
}
/**
*
*
* <pre>
* Output only. Time when this FirebaseLink was originally created.
* </pre>
*
* <code>
* .google.protobuf.Timestamp create_time = 3 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*/
public com.google.protobuf.Timestamp.Builder getCreateTimeBuilder() {
bitField0_ |= 0x00000004;
onChanged();
return getCreateTimeFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* Output only. Time when this FirebaseLink was originally created.
* </pre>
*
* <code>
* .google.protobuf.Timestamp create_time = 3 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*/
public com.google.protobuf.TimestampOrBuilder getCreateTimeOrBuilder() {
if (createTimeBuilder_ != null) {
return createTimeBuilder_.getMessageOrBuilder();
} else {
return createTime_ == null
? com.google.protobuf.Timestamp.getDefaultInstance()
: createTime_;
}
}
/**
*
*
* <pre>
* Output only. Time when this FirebaseLink was originally created.
* </pre>
*
* <code>
* .google.protobuf.Timestamp create_time = 3 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.Timestamp,
com.google.protobuf.Timestamp.Builder,
com.google.protobuf.TimestampOrBuilder>
getCreateTimeFieldBuilder() {
if (createTimeBuilder_ == null) {
createTimeBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.Timestamp,
com.google.protobuf.Timestamp.Builder,
com.google.protobuf.TimestampOrBuilder>(
getCreateTime(), getParentForChildren(), isClean());
createTime_ = null;
}
return createTimeBuilder_;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.analytics.admin.v1beta.FirebaseLink)
}
// @@protoc_insertion_point(class_scope:google.analytics.admin.v1beta.FirebaseLink)
private static final com.google.analytics.admin.v1beta.FirebaseLink DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.analytics.admin.v1beta.FirebaseLink();
}
public static com.google.analytics.admin.v1beta.FirebaseLink getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<FirebaseLink> PARSER =
new com.google.protobuf.AbstractParser<FirebaseLink>() {
@java.lang.Override
public FirebaseLink parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<FirebaseLink> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<FirebaseLink> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.analytics.admin.v1beta.FirebaseLink getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleapis/google-cloud-java | 36,799 | java-compute/proto-google-cloud-compute-v1/src/main/java/com/google/cloud/compute/v1/LocationPolicyLocation.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/compute/v1/compute.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.compute.v1;
/**
*
*
* <pre>
* </pre>
*
* Protobuf type {@code google.cloud.compute.v1.LocationPolicyLocation}
*/
public final class LocationPolicyLocation extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.compute.v1.LocationPolicyLocation)
LocationPolicyLocationOrBuilder {
private static final long serialVersionUID = 0L;
// Use LocationPolicyLocation.newBuilder() to construct.
private LocationPolicyLocation(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private LocationPolicyLocation() {
preference_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new LocationPolicyLocation();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.compute.v1.Compute
.internal_static_google_cloud_compute_v1_LocationPolicyLocation_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.compute.v1.Compute
.internal_static_google_cloud_compute_v1_LocationPolicyLocation_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.compute.v1.LocationPolicyLocation.class,
com.google.cloud.compute.v1.LocationPolicyLocation.Builder.class);
}
/**
*
*
* <pre>
* Preference for a given location. Set to either ALLOW or DENY.
* </pre>
*
* Protobuf enum {@code google.cloud.compute.v1.LocationPolicyLocation.Preference}
*/
public enum Preference implements com.google.protobuf.ProtocolMessageEnum {
/**
*
*
* <pre>
* A value indicating that the enum field is not set.
* </pre>
*
* <code>UNDEFINED_PREFERENCE = 0;</code>
*/
UNDEFINED_PREFERENCE(0),
/**
*
*
* <pre>
* Location is allowed for use.
* </pre>
*
* <code>ALLOW = 62368553;</code>
*/
ALLOW(62368553),
/**
*
*
* <pre>
* Location is prohibited.
* </pre>
*
* <code>DENY = 2094604;</code>
*/
DENY(2094604),
/**
*
*
* <pre>
* Default value, unused.
* </pre>
*
* <code>PREFERENCE_UNSPECIFIED = 496219571;</code>
*/
PREFERENCE_UNSPECIFIED(496219571),
UNRECOGNIZED(-1),
;
/**
*
*
* <pre>
* A value indicating that the enum field is not set.
* </pre>
*
* <code>UNDEFINED_PREFERENCE = 0;</code>
*/
public static final int UNDEFINED_PREFERENCE_VALUE = 0;
/**
*
*
* <pre>
* Location is allowed for use.
* </pre>
*
* <code>ALLOW = 62368553;</code>
*/
public static final int ALLOW_VALUE = 62368553;
/**
*
*
* <pre>
* Location is prohibited.
* </pre>
*
* <code>DENY = 2094604;</code>
*/
public static final int DENY_VALUE = 2094604;
/**
*
*
* <pre>
* Default value, unused.
* </pre>
*
* <code>PREFERENCE_UNSPECIFIED = 496219571;</code>
*/
public static final int PREFERENCE_UNSPECIFIED_VALUE = 496219571;
public final int getNumber() {
if (this == UNRECOGNIZED) {
throw new java.lang.IllegalArgumentException(
"Can't get the number of an unknown enum value.");
}
return value;
}
/**
* @param value The numeric wire value of the corresponding enum entry.
* @return The enum associated with the given numeric wire value.
* @deprecated Use {@link #forNumber(int)} instead.
*/
@java.lang.Deprecated
public static Preference valueOf(int value) {
return forNumber(value);
}
/**
* @param value The numeric wire value of the corresponding enum entry.
* @return The enum associated with the given numeric wire value.
*/
public static Preference forNumber(int value) {
switch (value) {
case 0:
return UNDEFINED_PREFERENCE;
case 62368553:
return ALLOW;
case 2094604:
return DENY;
case 496219571:
return PREFERENCE_UNSPECIFIED;
default:
return null;
}
}
public static com.google.protobuf.Internal.EnumLiteMap<Preference> internalGetValueMap() {
return internalValueMap;
}
private static final com.google.protobuf.Internal.EnumLiteMap<Preference> internalValueMap =
new com.google.protobuf.Internal.EnumLiteMap<Preference>() {
public Preference findValueByNumber(int number) {
return Preference.forNumber(number);
}
};
public final com.google.protobuf.Descriptors.EnumValueDescriptor getValueDescriptor() {
if (this == UNRECOGNIZED) {
throw new java.lang.IllegalStateException(
"Can't get the descriptor of an unrecognized enum value.");
}
return getDescriptor().getValues().get(ordinal());
}
public final com.google.protobuf.Descriptors.EnumDescriptor getDescriptorForType() {
return getDescriptor();
}
public static final com.google.protobuf.Descriptors.EnumDescriptor getDescriptor() {
return com.google.cloud.compute.v1.LocationPolicyLocation.getDescriptor()
.getEnumTypes()
.get(0);
}
private static final Preference[] VALUES = values();
public static Preference valueOf(com.google.protobuf.Descriptors.EnumValueDescriptor desc) {
if (desc.getType() != getDescriptor()) {
throw new java.lang.IllegalArgumentException("EnumValueDescriptor is not for this type.");
}
if (desc.getIndex() == -1) {
return UNRECOGNIZED;
}
return VALUES[desc.getIndex()];
}
private final int value;
private Preference(int value) {
this.value = value;
}
// @@protoc_insertion_point(enum_scope:google.cloud.compute.v1.LocationPolicyLocation.Preference)
}
private int bitField0_;
public static final int CONSTRAINTS_FIELD_NUMBER = 3909174;
private com.google.cloud.compute.v1.LocationPolicyLocationConstraints constraints_;
/**
*
*
* <pre>
* Constraints that the caller requires on the result distribution in this zone.
* </pre>
*
* <code>
* optional .google.cloud.compute.v1.LocationPolicyLocationConstraints constraints = 3909174;
* </code>
*
* @return Whether the constraints field is set.
*/
@java.lang.Override
public boolean hasConstraints() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* Constraints that the caller requires on the result distribution in this zone.
* </pre>
*
* <code>
* optional .google.cloud.compute.v1.LocationPolicyLocationConstraints constraints = 3909174;
* </code>
*
* @return The constraints.
*/
@java.lang.Override
public com.google.cloud.compute.v1.LocationPolicyLocationConstraints getConstraints() {
return constraints_ == null
? com.google.cloud.compute.v1.LocationPolicyLocationConstraints.getDefaultInstance()
: constraints_;
}
/**
*
*
* <pre>
* Constraints that the caller requires on the result distribution in this zone.
* </pre>
*
* <code>
* optional .google.cloud.compute.v1.LocationPolicyLocationConstraints constraints = 3909174;
* </code>
*/
@java.lang.Override
public com.google.cloud.compute.v1.LocationPolicyLocationConstraintsOrBuilder
getConstraintsOrBuilder() {
return constraints_ == null
? com.google.cloud.compute.v1.LocationPolicyLocationConstraints.getDefaultInstance()
: constraints_;
}
public static final int PREFERENCE_FIELD_NUMBER = 150781147;
@SuppressWarnings("serial")
private volatile java.lang.Object preference_ = "";
/**
*
*
* <pre>
* Preference for a given location. Set to either ALLOW or DENY.
* Check the Preference enum for the list of possible values.
* </pre>
*
* <code>optional string preference = 150781147;</code>
*
* @return Whether the preference field is set.
*/
@java.lang.Override
public boolean hasPreference() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
*
*
* <pre>
* Preference for a given location. Set to either ALLOW or DENY.
* Check the Preference enum for the list of possible values.
* </pre>
*
* <code>optional string preference = 150781147;</code>
*
* @return The preference.
*/
@java.lang.Override
public java.lang.String getPreference() {
java.lang.Object ref = preference_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
preference_ = s;
return s;
}
}
/**
*
*
* <pre>
* Preference for a given location. Set to either ALLOW or DENY.
* Check the Preference enum for the list of possible values.
* </pre>
*
* <code>optional string preference = 150781147;</code>
*
* @return The bytes for preference.
*/
@java.lang.Override
public com.google.protobuf.ByteString getPreferenceBytes() {
java.lang.Object ref = preference_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
preference_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (((bitField0_ & 0x00000001) != 0)) {
output.writeMessage(3909174, getConstraints());
}
if (((bitField0_ & 0x00000002) != 0)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 150781147, preference_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) != 0)) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(3909174, getConstraints());
}
if (((bitField0_ & 0x00000002) != 0)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(150781147, preference_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.compute.v1.LocationPolicyLocation)) {
return super.equals(obj);
}
com.google.cloud.compute.v1.LocationPolicyLocation other =
(com.google.cloud.compute.v1.LocationPolicyLocation) obj;
if (hasConstraints() != other.hasConstraints()) return false;
if (hasConstraints()) {
if (!getConstraints().equals(other.getConstraints())) return false;
}
if (hasPreference() != other.hasPreference()) return false;
if (hasPreference()) {
if (!getPreference().equals(other.getPreference())) return false;
}
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (hasConstraints()) {
hash = (37 * hash) + CONSTRAINTS_FIELD_NUMBER;
hash = (53 * hash) + getConstraints().hashCode();
}
if (hasPreference()) {
hash = (37 * hash) + PREFERENCE_FIELD_NUMBER;
hash = (53 * hash) + getPreference().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.compute.v1.LocationPolicyLocation parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.compute.v1.LocationPolicyLocation parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.compute.v1.LocationPolicyLocation parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.compute.v1.LocationPolicyLocation parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.compute.v1.LocationPolicyLocation parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.compute.v1.LocationPolicyLocation parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.compute.v1.LocationPolicyLocation parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.compute.v1.LocationPolicyLocation parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.compute.v1.LocationPolicyLocation parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.compute.v1.LocationPolicyLocation parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.compute.v1.LocationPolicyLocation parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.compute.v1.LocationPolicyLocation parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.cloud.compute.v1.LocationPolicyLocation prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* </pre>
*
* Protobuf type {@code google.cloud.compute.v1.LocationPolicyLocation}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.compute.v1.LocationPolicyLocation)
com.google.cloud.compute.v1.LocationPolicyLocationOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.compute.v1.Compute
.internal_static_google_cloud_compute_v1_LocationPolicyLocation_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.compute.v1.Compute
.internal_static_google_cloud_compute_v1_LocationPolicyLocation_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.compute.v1.LocationPolicyLocation.class,
com.google.cloud.compute.v1.LocationPolicyLocation.Builder.class);
}
// Construct using com.google.cloud.compute.v1.LocationPolicyLocation.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {
getConstraintsFieldBuilder();
}
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
constraints_ = null;
if (constraintsBuilder_ != null) {
constraintsBuilder_.dispose();
constraintsBuilder_ = null;
}
preference_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.compute.v1.Compute
.internal_static_google_cloud_compute_v1_LocationPolicyLocation_descriptor;
}
@java.lang.Override
public com.google.cloud.compute.v1.LocationPolicyLocation getDefaultInstanceForType() {
return com.google.cloud.compute.v1.LocationPolicyLocation.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.compute.v1.LocationPolicyLocation build() {
com.google.cloud.compute.v1.LocationPolicyLocation result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.compute.v1.LocationPolicyLocation buildPartial() {
com.google.cloud.compute.v1.LocationPolicyLocation result =
new com.google.cloud.compute.v1.LocationPolicyLocation(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(com.google.cloud.compute.v1.LocationPolicyLocation result) {
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.constraints_ =
constraintsBuilder_ == null ? constraints_ : constraintsBuilder_.build();
to_bitField0_ |= 0x00000001;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.preference_ = preference_;
to_bitField0_ |= 0x00000002;
}
result.bitField0_ |= to_bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.compute.v1.LocationPolicyLocation) {
return mergeFrom((com.google.cloud.compute.v1.LocationPolicyLocation) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.compute.v1.LocationPolicyLocation other) {
if (other == com.google.cloud.compute.v1.LocationPolicyLocation.getDefaultInstance())
return this;
if (other.hasConstraints()) {
mergeConstraints(other.getConstraints());
}
if (other.hasPreference()) {
preference_ = other.preference_;
bitField0_ |= 0x00000002;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 31273394:
{
input.readMessage(getConstraintsFieldBuilder().getBuilder(), extensionRegistry);
bitField0_ |= 0x00000001;
break;
} // case 31273394
case 1206249178:
{
preference_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000002;
break;
} // case 1206249178
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private com.google.cloud.compute.v1.LocationPolicyLocationConstraints constraints_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.compute.v1.LocationPolicyLocationConstraints,
com.google.cloud.compute.v1.LocationPolicyLocationConstraints.Builder,
com.google.cloud.compute.v1.LocationPolicyLocationConstraintsOrBuilder>
constraintsBuilder_;
/**
*
*
* <pre>
* Constraints that the caller requires on the result distribution in this zone.
* </pre>
*
* <code>
* optional .google.cloud.compute.v1.LocationPolicyLocationConstraints constraints = 3909174;
* </code>
*
* @return Whether the constraints field is set.
*/
public boolean hasConstraints() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* Constraints that the caller requires on the result distribution in this zone.
* </pre>
*
* <code>
* optional .google.cloud.compute.v1.LocationPolicyLocationConstraints constraints = 3909174;
* </code>
*
* @return The constraints.
*/
public com.google.cloud.compute.v1.LocationPolicyLocationConstraints getConstraints() {
if (constraintsBuilder_ == null) {
return constraints_ == null
? com.google.cloud.compute.v1.LocationPolicyLocationConstraints.getDefaultInstance()
: constraints_;
} else {
return constraintsBuilder_.getMessage();
}
}
/**
*
*
* <pre>
* Constraints that the caller requires on the result distribution in this zone.
* </pre>
*
* <code>
* optional .google.cloud.compute.v1.LocationPolicyLocationConstraints constraints = 3909174;
* </code>
*/
public Builder setConstraints(
com.google.cloud.compute.v1.LocationPolicyLocationConstraints value) {
if (constraintsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
constraints_ = value;
} else {
constraintsBuilder_.setMessage(value);
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Constraints that the caller requires on the result distribution in this zone.
* </pre>
*
* <code>
* optional .google.cloud.compute.v1.LocationPolicyLocationConstraints constraints = 3909174;
* </code>
*/
public Builder setConstraints(
com.google.cloud.compute.v1.LocationPolicyLocationConstraints.Builder builderForValue) {
if (constraintsBuilder_ == null) {
constraints_ = builderForValue.build();
} else {
constraintsBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Constraints that the caller requires on the result distribution in this zone.
* </pre>
*
* <code>
* optional .google.cloud.compute.v1.LocationPolicyLocationConstraints constraints = 3909174;
* </code>
*/
public Builder mergeConstraints(
com.google.cloud.compute.v1.LocationPolicyLocationConstraints value) {
if (constraintsBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0)
&& constraints_ != null
&& constraints_
!= com.google.cloud.compute.v1.LocationPolicyLocationConstraints
.getDefaultInstance()) {
getConstraintsBuilder().mergeFrom(value);
} else {
constraints_ = value;
}
} else {
constraintsBuilder_.mergeFrom(value);
}
if (constraints_ != null) {
bitField0_ |= 0x00000001;
onChanged();
}
return this;
}
/**
*
*
* <pre>
* Constraints that the caller requires on the result distribution in this zone.
* </pre>
*
* <code>
* optional .google.cloud.compute.v1.LocationPolicyLocationConstraints constraints = 3909174;
* </code>
*/
public Builder clearConstraints() {
bitField0_ = (bitField0_ & ~0x00000001);
constraints_ = null;
if (constraintsBuilder_ != null) {
constraintsBuilder_.dispose();
constraintsBuilder_ = null;
}
onChanged();
return this;
}
/**
*
*
* <pre>
* Constraints that the caller requires on the result distribution in this zone.
* </pre>
*
* <code>
* optional .google.cloud.compute.v1.LocationPolicyLocationConstraints constraints = 3909174;
* </code>
*/
public com.google.cloud.compute.v1.LocationPolicyLocationConstraints.Builder
getConstraintsBuilder() {
bitField0_ |= 0x00000001;
onChanged();
return getConstraintsFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* Constraints that the caller requires on the result distribution in this zone.
* </pre>
*
* <code>
* optional .google.cloud.compute.v1.LocationPolicyLocationConstraints constraints = 3909174;
* </code>
*/
public com.google.cloud.compute.v1.LocationPolicyLocationConstraintsOrBuilder
getConstraintsOrBuilder() {
if (constraintsBuilder_ != null) {
return constraintsBuilder_.getMessageOrBuilder();
} else {
return constraints_ == null
? com.google.cloud.compute.v1.LocationPolicyLocationConstraints.getDefaultInstance()
: constraints_;
}
}
/**
*
*
* <pre>
* Constraints that the caller requires on the result distribution in this zone.
* </pre>
*
* <code>
* optional .google.cloud.compute.v1.LocationPolicyLocationConstraints constraints = 3909174;
* </code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.compute.v1.LocationPolicyLocationConstraints,
com.google.cloud.compute.v1.LocationPolicyLocationConstraints.Builder,
com.google.cloud.compute.v1.LocationPolicyLocationConstraintsOrBuilder>
getConstraintsFieldBuilder() {
if (constraintsBuilder_ == null) {
constraintsBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.compute.v1.LocationPolicyLocationConstraints,
com.google.cloud.compute.v1.LocationPolicyLocationConstraints.Builder,
com.google.cloud.compute.v1.LocationPolicyLocationConstraintsOrBuilder>(
getConstraints(), getParentForChildren(), isClean());
constraints_ = null;
}
return constraintsBuilder_;
}
private java.lang.Object preference_ = "";
/**
*
*
* <pre>
* Preference for a given location. Set to either ALLOW or DENY.
* Check the Preference enum for the list of possible values.
* </pre>
*
* <code>optional string preference = 150781147;</code>
*
* @return Whether the preference field is set.
*/
public boolean hasPreference() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
*
*
* <pre>
* Preference for a given location. Set to either ALLOW or DENY.
* Check the Preference enum for the list of possible values.
* </pre>
*
* <code>optional string preference = 150781147;</code>
*
* @return The preference.
*/
public java.lang.String getPreference() {
java.lang.Object ref = preference_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
preference_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Preference for a given location. Set to either ALLOW or DENY.
* Check the Preference enum for the list of possible values.
* </pre>
*
* <code>optional string preference = 150781147;</code>
*
* @return The bytes for preference.
*/
public com.google.protobuf.ByteString getPreferenceBytes() {
java.lang.Object ref = preference_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
preference_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Preference for a given location. Set to either ALLOW or DENY.
* Check the Preference enum for the list of possible values.
* </pre>
*
* <code>optional string preference = 150781147;</code>
*
* @param value The preference to set.
* @return This builder for chaining.
*/
public Builder setPreference(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
preference_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Preference for a given location. Set to either ALLOW or DENY.
* Check the Preference enum for the list of possible values.
* </pre>
*
* <code>optional string preference = 150781147;</code>
*
* @return This builder for chaining.
*/
public Builder clearPreference() {
preference_ = getDefaultInstance().getPreference();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
return this;
}
/**
*
*
* <pre>
* Preference for a given location. Set to either ALLOW or DENY.
* Check the Preference enum for the list of possible values.
* </pre>
*
* <code>optional string preference = 150781147;</code>
*
* @param value The bytes for preference to set.
* @return This builder for chaining.
*/
public Builder setPreferenceBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
preference_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.compute.v1.LocationPolicyLocation)
}
// @@protoc_insertion_point(class_scope:google.cloud.compute.v1.LocationPolicyLocation)
private static final com.google.cloud.compute.v1.LocationPolicyLocation DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.compute.v1.LocationPolicyLocation();
}
public static com.google.cloud.compute.v1.LocationPolicyLocation getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<LocationPolicyLocation> PARSER =
new com.google.protobuf.AbstractParser<LocationPolicyLocation>() {
@java.lang.Override
public LocationPolicyLocation parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<LocationPolicyLocation> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<LocationPolicyLocation> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.compute.v1.LocationPolicyLocation getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleapis/google-cloud-java | 37,124 | java-dataplex/google-cloud-dataplex/src/main/java/com/google/cloud/dataplex/v1/stub/CmekServiceStubSettings.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.cloud.dataplex.v1.stub;
import static com.google.cloud.dataplex.v1.CmekServiceClient.ListEncryptionConfigsPagedResponse;
import static com.google.cloud.dataplex.v1.CmekServiceClient.ListLocationsPagedResponse;
import com.google.api.core.ApiFunction;
import com.google.api.core.ApiFuture;
import com.google.api.core.BetaApi;
import com.google.api.core.ObsoleteApi;
import com.google.api.gax.core.GaxProperties;
import com.google.api.gax.core.GoogleCredentialsProvider;
import com.google.api.gax.core.InstantiatingExecutorProvider;
import com.google.api.gax.grpc.GaxGrpcProperties;
import com.google.api.gax.grpc.GrpcTransportChannel;
import com.google.api.gax.grpc.InstantiatingGrpcChannelProvider;
import com.google.api.gax.grpc.ProtoOperationTransformers;
import com.google.api.gax.httpjson.GaxHttpJsonProperties;
import com.google.api.gax.httpjson.HttpJsonTransportChannel;
import com.google.api.gax.httpjson.InstantiatingHttpJsonChannelProvider;
import com.google.api.gax.longrunning.OperationSnapshot;
import com.google.api.gax.longrunning.OperationTimedPollAlgorithm;
import com.google.api.gax.retrying.RetrySettings;
import com.google.api.gax.rpc.ApiCallContext;
import com.google.api.gax.rpc.ApiClientHeaderProvider;
import com.google.api.gax.rpc.ClientContext;
import com.google.api.gax.rpc.OperationCallSettings;
import com.google.api.gax.rpc.PageContext;
import com.google.api.gax.rpc.PagedCallSettings;
import com.google.api.gax.rpc.PagedListDescriptor;
import com.google.api.gax.rpc.PagedListResponseFactory;
import com.google.api.gax.rpc.StatusCode;
import com.google.api.gax.rpc.StubSettings;
import com.google.api.gax.rpc.TransportChannelProvider;
import com.google.api.gax.rpc.UnaryCallSettings;
import com.google.api.gax.rpc.UnaryCallable;
import com.google.cloud.dataplex.v1.CreateEncryptionConfigRequest;
import com.google.cloud.dataplex.v1.DeleteEncryptionConfigRequest;
import com.google.cloud.dataplex.v1.EncryptionConfig;
import com.google.cloud.dataplex.v1.GetEncryptionConfigRequest;
import com.google.cloud.dataplex.v1.ListEncryptionConfigsRequest;
import com.google.cloud.dataplex.v1.ListEncryptionConfigsResponse;
import com.google.cloud.dataplex.v1.OperationMetadata;
import com.google.cloud.dataplex.v1.UpdateEncryptionConfigRequest;
import com.google.cloud.location.GetLocationRequest;
import com.google.cloud.location.ListLocationsRequest;
import com.google.cloud.location.ListLocationsResponse;
import com.google.cloud.location.Location;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.Lists;
import com.google.longrunning.Operation;
import com.google.protobuf.Empty;
import java.io.IOException;
import java.time.Duration;
import java.util.List;
import javax.annotation.Generated;
// AUTO-GENERATED DOCUMENTATION AND CLASS.
/**
* Settings class to configure an instance of {@link CmekServiceStub}.
*
* <p>The default instance has everything set to sensible defaults:
*
* <ul>
* <li>The default service address (dataplex.googleapis.com) and default port (443) are used.
* <li>Credentials are acquired automatically through Application Default Credentials.
* <li>Retries are configured for idempotent methods but not for non-idempotent methods.
* </ul>
*
* <p>The builder of this class is recursive, so contained classes are themselves builders. When
* build() is called, the tree of builders is called to create the complete settings object.
*
* <p>For example, to set the
* [RetrySettings](https://cloud.google.com/java/docs/reference/gax/latest/com.google.api.gax.retrying.RetrySettings)
* of getEncryptionConfig:
*
* <pre>{@code
* // This snippet has been automatically generated and should be regarded as a code template only.
* // It will require modifications to work:
* // - It may require correct/in-range values for request initialization.
* // - It may require specifying regional endpoints when creating the service client as shown in
* // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library
* CmekServiceStubSettings.Builder cmekServiceSettingsBuilder =
* CmekServiceStubSettings.newBuilder();
* cmekServiceSettingsBuilder
* .getEncryptionConfigSettings()
* .setRetrySettings(
* cmekServiceSettingsBuilder
* .getEncryptionConfigSettings()
* .getRetrySettings()
* .toBuilder()
* .setInitialRetryDelayDuration(Duration.ofSeconds(1))
* .setInitialRpcTimeoutDuration(Duration.ofSeconds(5))
* .setMaxAttempts(5)
* .setMaxRetryDelayDuration(Duration.ofSeconds(30))
* .setMaxRpcTimeoutDuration(Duration.ofSeconds(60))
* .setRetryDelayMultiplier(1.3)
* .setRpcTimeoutMultiplier(1.5)
* .setTotalTimeoutDuration(Duration.ofSeconds(300))
* .build());
* CmekServiceStubSettings cmekServiceSettings = cmekServiceSettingsBuilder.build();
* }</pre>
*
* Please refer to the [Client Side Retry
* Guide](https://github.com/googleapis/google-cloud-java/blob/main/docs/client_retries.md) for
* additional support in setting retries.
*
* <p>To configure the RetrySettings of a Long Running Operation method, create an
* OperationTimedPollAlgorithm object and update the RPC's polling algorithm. For example, to
* configure the RetrySettings for createEncryptionConfig:
*
* <pre>{@code
* // This snippet has been automatically generated and should be regarded as a code template only.
* // It will require modifications to work:
* // - It may require correct/in-range values for request initialization.
* // - It may require specifying regional endpoints when creating the service client as shown in
* // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library
* CmekServiceStubSettings.Builder cmekServiceSettingsBuilder =
* CmekServiceStubSettings.newBuilder();
* TimedRetryAlgorithm timedRetryAlgorithm =
* OperationalTimedPollAlgorithm.create(
* RetrySettings.newBuilder()
* .setInitialRetryDelayDuration(Duration.ofMillis(500))
* .setRetryDelayMultiplier(1.5)
* .setMaxRetryDelayDuration(Duration.ofMillis(5000))
* .setTotalTimeoutDuration(Duration.ofHours(24))
* .build());
* cmekServiceSettingsBuilder
* .createClusterOperationSettings()
* .setPollingAlgorithm(timedRetryAlgorithm)
* .build();
* }</pre>
*/
@Generated("by gapic-generator-java")
public class CmekServiceStubSettings extends StubSettings<CmekServiceStubSettings> {
/** The default scopes of the service. */
private static final ImmutableList<String> DEFAULT_SERVICE_SCOPES =
ImmutableList.<String>builder().add("https://www.googleapis.com/auth/cloud-platform").build();
private final UnaryCallSettings<CreateEncryptionConfigRequest, Operation>
createEncryptionConfigSettings;
private final OperationCallSettings<
CreateEncryptionConfigRequest, EncryptionConfig, OperationMetadata>
createEncryptionConfigOperationSettings;
private final UnaryCallSettings<UpdateEncryptionConfigRequest, Operation>
updateEncryptionConfigSettings;
private final OperationCallSettings<
UpdateEncryptionConfigRequest, EncryptionConfig, OperationMetadata>
updateEncryptionConfigOperationSettings;
private final UnaryCallSettings<DeleteEncryptionConfigRequest, Operation>
deleteEncryptionConfigSettings;
private final OperationCallSettings<DeleteEncryptionConfigRequest, Empty, OperationMetadata>
deleteEncryptionConfigOperationSettings;
private final PagedCallSettings<
ListEncryptionConfigsRequest,
ListEncryptionConfigsResponse,
ListEncryptionConfigsPagedResponse>
listEncryptionConfigsSettings;
private final UnaryCallSettings<GetEncryptionConfigRequest, EncryptionConfig>
getEncryptionConfigSettings;
private final PagedCallSettings<
ListLocationsRequest, ListLocationsResponse, ListLocationsPagedResponse>
listLocationsSettings;
private final UnaryCallSettings<GetLocationRequest, Location> getLocationSettings;
private static final PagedListDescriptor<
ListEncryptionConfigsRequest, ListEncryptionConfigsResponse, EncryptionConfig>
LIST_ENCRYPTION_CONFIGS_PAGE_STR_DESC =
new PagedListDescriptor<
ListEncryptionConfigsRequest, ListEncryptionConfigsResponse, EncryptionConfig>() {
@Override
public String emptyToken() {
return "";
}
@Override
public ListEncryptionConfigsRequest injectToken(
ListEncryptionConfigsRequest payload, String token) {
return ListEncryptionConfigsRequest.newBuilder(payload).setPageToken(token).build();
}
@Override
public ListEncryptionConfigsRequest injectPageSize(
ListEncryptionConfigsRequest payload, int pageSize) {
return ListEncryptionConfigsRequest.newBuilder(payload).setPageSize(pageSize).build();
}
@Override
public Integer extractPageSize(ListEncryptionConfigsRequest payload) {
return payload.getPageSize();
}
@Override
public String extractNextToken(ListEncryptionConfigsResponse payload) {
return payload.getNextPageToken();
}
@Override
public Iterable<EncryptionConfig> extractResources(
ListEncryptionConfigsResponse payload) {
return payload.getEncryptionConfigsList();
}
};
private static final PagedListDescriptor<ListLocationsRequest, ListLocationsResponse, Location>
LIST_LOCATIONS_PAGE_STR_DESC =
new PagedListDescriptor<ListLocationsRequest, ListLocationsResponse, Location>() {
@Override
public String emptyToken() {
return "";
}
@Override
public ListLocationsRequest injectToken(ListLocationsRequest payload, String token) {
return ListLocationsRequest.newBuilder(payload).setPageToken(token).build();
}
@Override
public ListLocationsRequest injectPageSize(ListLocationsRequest payload, int pageSize) {
return ListLocationsRequest.newBuilder(payload).setPageSize(pageSize).build();
}
@Override
public Integer extractPageSize(ListLocationsRequest payload) {
return payload.getPageSize();
}
@Override
public String extractNextToken(ListLocationsResponse payload) {
return payload.getNextPageToken();
}
@Override
public Iterable<Location> extractResources(ListLocationsResponse payload) {
return payload.getLocationsList();
}
};
private static final PagedListResponseFactory<
ListEncryptionConfigsRequest,
ListEncryptionConfigsResponse,
ListEncryptionConfigsPagedResponse>
LIST_ENCRYPTION_CONFIGS_PAGE_STR_FACT =
new PagedListResponseFactory<
ListEncryptionConfigsRequest,
ListEncryptionConfigsResponse,
ListEncryptionConfigsPagedResponse>() {
@Override
public ApiFuture<ListEncryptionConfigsPagedResponse> getFuturePagedResponse(
UnaryCallable<ListEncryptionConfigsRequest, ListEncryptionConfigsResponse> callable,
ListEncryptionConfigsRequest request,
ApiCallContext context,
ApiFuture<ListEncryptionConfigsResponse> futureResponse) {
PageContext<
ListEncryptionConfigsRequest, ListEncryptionConfigsResponse, EncryptionConfig>
pageContext =
PageContext.create(
callable, LIST_ENCRYPTION_CONFIGS_PAGE_STR_DESC, request, context);
return ListEncryptionConfigsPagedResponse.createAsync(pageContext, futureResponse);
}
};
private static final PagedListResponseFactory<
ListLocationsRequest, ListLocationsResponse, ListLocationsPagedResponse>
LIST_LOCATIONS_PAGE_STR_FACT =
new PagedListResponseFactory<
ListLocationsRequest, ListLocationsResponse, ListLocationsPagedResponse>() {
@Override
public ApiFuture<ListLocationsPagedResponse> getFuturePagedResponse(
UnaryCallable<ListLocationsRequest, ListLocationsResponse> callable,
ListLocationsRequest request,
ApiCallContext context,
ApiFuture<ListLocationsResponse> futureResponse) {
PageContext<ListLocationsRequest, ListLocationsResponse, Location> pageContext =
PageContext.create(callable, LIST_LOCATIONS_PAGE_STR_DESC, request, context);
return ListLocationsPagedResponse.createAsync(pageContext, futureResponse);
}
};
/** Returns the object with the settings used for calls to createEncryptionConfig. */
public UnaryCallSettings<CreateEncryptionConfigRequest, Operation>
createEncryptionConfigSettings() {
return createEncryptionConfigSettings;
}
/** Returns the object with the settings used for calls to createEncryptionConfig. */
public OperationCallSettings<CreateEncryptionConfigRequest, EncryptionConfig, OperationMetadata>
createEncryptionConfigOperationSettings() {
return createEncryptionConfigOperationSettings;
}
/** Returns the object with the settings used for calls to updateEncryptionConfig. */
public UnaryCallSettings<UpdateEncryptionConfigRequest, Operation>
updateEncryptionConfigSettings() {
return updateEncryptionConfigSettings;
}
/** Returns the object with the settings used for calls to updateEncryptionConfig. */
public OperationCallSettings<UpdateEncryptionConfigRequest, EncryptionConfig, OperationMetadata>
updateEncryptionConfigOperationSettings() {
return updateEncryptionConfigOperationSettings;
}
/** Returns the object with the settings used for calls to deleteEncryptionConfig. */
public UnaryCallSettings<DeleteEncryptionConfigRequest, Operation>
deleteEncryptionConfigSettings() {
return deleteEncryptionConfigSettings;
}
/** Returns the object with the settings used for calls to deleteEncryptionConfig. */
public OperationCallSettings<DeleteEncryptionConfigRequest, Empty, OperationMetadata>
deleteEncryptionConfigOperationSettings() {
return deleteEncryptionConfigOperationSettings;
}
/** Returns the object with the settings used for calls to listEncryptionConfigs. */
public PagedCallSettings<
ListEncryptionConfigsRequest,
ListEncryptionConfigsResponse,
ListEncryptionConfigsPagedResponse>
listEncryptionConfigsSettings() {
return listEncryptionConfigsSettings;
}
/** Returns the object with the settings used for calls to getEncryptionConfig. */
public UnaryCallSettings<GetEncryptionConfigRequest, EncryptionConfig>
getEncryptionConfigSettings() {
return getEncryptionConfigSettings;
}
/** Returns the object with the settings used for calls to listLocations. */
public PagedCallSettings<ListLocationsRequest, ListLocationsResponse, ListLocationsPagedResponse>
listLocationsSettings() {
return listLocationsSettings;
}
/** Returns the object with the settings used for calls to getLocation. */
public UnaryCallSettings<GetLocationRequest, Location> getLocationSettings() {
return getLocationSettings;
}
public CmekServiceStub createStub() throws IOException {
if (getTransportChannelProvider()
.getTransportName()
.equals(GrpcTransportChannel.getGrpcTransportName())) {
return GrpcCmekServiceStub.create(this);
}
if (getTransportChannelProvider()
.getTransportName()
.equals(HttpJsonTransportChannel.getHttpJsonTransportName())) {
return HttpJsonCmekServiceStub.create(this);
}
throw new UnsupportedOperationException(
String.format(
"Transport not supported: %s", getTransportChannelProvider().getTransportName()));
}
/** Returns the default service name. */
@Override
public String getServiceName() {
return "dataplex";
}
/** Returns a builder for the default ExecutorProvider for this service. */
public static InstantiatingExecutorProvider.Builder defaultExecutorProviderBuilder() {
return InstantiatingExecutorProvider.newBuilder();
}
/** Returns the default service endpoint. */
@ObsoleteApi("Use getEndpoint() instead")
public static String getDefaultEndpoint() {
return "dataplex.googleapis.com:443";
}
/** Returns the default mTLS service endpoint. */
public static String getDefaultMtlsEndpoint() {
return "dataplex.mtls.googleapis.com:443";
}
/** Returns the default service scopes. */
public static List<String> getDefaultServiceScopes() {
return DEFAULT_SERVICE_SCOPES;
}
/** Returns a builder for the default credentials for this service. */
public static GoogleCredentialsProvider.Builder defaultCredentialsProviderBuilder() {
return GoogleCredentialsProvider.newBuilder()
.setScopesToApply(DEFAULT_SERVICE_SCOPES)
.setUseJwtAccessWithScope(true);
}
/** Returns a builder for the default gRPC ChannelProvider for this service. */
public static InstantiatingGrpcChannelProvider.Builder defaultGrpcTransportProviderBuilder() {
return InstantiatingGrpcChannelProvider.newBuilder()
.setMaxInboundMessageSize(Integer.MAX_VALUE);
}
/** Returns a builder for the default REST ChannelProvider for this service. */
@BetaApi
public static InstantiatingHttpJsonChannelProvider.Builder
defaultHttpJsonTransportProviderBuilder() {
return InstantiatingHttpJsonChannelProvider.newBuilder();
}
public static TransportChannelProvider defaultTransportChannelProvider() {
return defaultGrpcTransportProviderBuilder().build();
}
public static ApiClientHeaderProvider.Builder defaultGrpcApiClientHeaderProviderBuilder() {
return ApiClientHeaderProvider.newBuilder()
.setGeneratedLibToken(
"gapic", GaxProperties.getLibraryVersion(CmekServiceStubSettings.class))
.setTransportToken(
GaxGrpcProperties.getGrpcTokenName(), GaxGrpcProperties.getGrpcVersion());
}
public static ApiClientHeaderProvider.Builder defaultHttpJsonApiClientHeaderProviderBuilder() {
return ApiClientHeaderProvider.newBuilder()
.setGeneratedLibToken(
"gapic", GaxProperties.getLibraryVersion(CmekServiceStubSettings.class))
.setTransportToken(
GaxHttpJsonProperties.getHttpJsonTokenName(),
GaxHttpJsonProperties.getHttpJsonVersion());
}
public static ApiClientHeaderProvider.Builder defaultApiClientHeaderProviderBuilder() {
return CmekServiceStubSettings.defaultGrpcApiClientHeaderProviderBuilder();
}
/** Returns a new gRPC builder for this class. */
public static Builder newBuilder() {
return Builder.createDefault();
}
/** Returns a new REST builder for this class. */
public static Builder newHttpJsonBuilder() {
return Builder.createHttpJsonDefault();
}
/** Returns a new builder for this class. */
public static Builder newBuilder(ClientContext clientContext) {
return new Builder(clientContext);
}
/** Returns a builder containing all the values of this settings class. */
public Builder toBuilder() {
return new Builder(this);
}
protected CmekServiceStubSettings(Builder settingsBuilder) throws IOException {
super(settingsBuilder);
createEncryptionConfigSettings = settingsBuilder.createEncryptionConfigSettings().build();
createEncryptionConfigOperationSettings =
settingsBuilder.createEncryptionConfigOperationSettings().build();
updateEncryptionConfigSettings = settingsBuilder.updateEncryptionConfigSettings().build();
updateEncryptionConfigOperationSettings =
settingsBuilder.updateEncryptionConfigOperationSettings().build();
deleteEncryptionConfigSettings = settingsBuilder.deleteEncryptionConfigSettings().build();
deleteEncryptionConfigOperationSettings =
settingsBuilder.deleteEncryptionConfigOperationSettings().build();
listEncryptionConfigsSettings = settingsBuilder.listEncryptionConfigsSettings().build();
getEncryptionConfigSettings = settingsBuilder.getEncryptionConfigSettings().build();
listLocationsSettings = settingsBuilder.listLocationsSettings().build();
getLocationSettings = settingsBuilder.getLocationSettings().build();
}
/** Builder for CmekServiceStubSettings. */
public static class Builder extends StubSettings.Builder<CmekServiceStubSettings, Builder> {
private final ImmutableList<UnaryCallSettings.Builder<?, ?>> unaryMethodSettingsBuilders;
private final UnaryCallSettings.Builder<CreateEncryptionConfigRequest, Operation>
createEncryptionConfigSettings;
private final OperationCallSettings.Builder<
CreateEncryptionConfigRequest, EncryptionConfig, OperationMetadata>
createEncryptionConfigOperationSettings;
private final UnaryCallSettings.Builder<UpdateEncryptionConfigRequest, Operation>
updateEncryptionConfigSettings;
private final OperationCallSettings.Builder<
UpdateEncryptionConfigRequest, EncryptionConfig, OperationMetadata>
updateEncryptionConfigOperationSettings;
private final UnaryCallSettings.Builder<DeleteEncryptionConfigRequest, Operation>
deleteEncryptionConfigSettings;
private final OperationCallSettings.Builder<
DeleteEncryptionConfigRequest, Empty, OperationMetadata>
deleteEncryptionConfigOperationSettings;
private final PagedCallSettings.Builder<
ListEncryptionConfigsRequest,
ListEncryptionConfigsResponse,
ListEncryptionConfigsPagedResponse>
listEncryptionConfigsSettings;
private final UnaryCallSettings.Builder<GetEncryptionConfigRequest, EncryptionConfig>
getEncryptionConfigSettings;
private final PagedCallSettings.Builder<
ListLocationsRequest, ListLocationsResponse, ListLocationsPagedResponse>
listLocationsSettings;
private final UnaryCallSettings.Builder<GetLocationRequest, Location> getLocationSettings;
private static final ImmutableMap<String, ImmutableSet<StatusCode.Code>>
RETRYABLE_CODE_DEFINITIONS;
static {
ImmutableMap.Builder<String, ImmutableSet<StatusCode.Code>> definitions =
ImmutableMap.builder();
definitions.put("no_retry_codes", ImmutableSet.copyOf(Lists.<StatusCode.Code>newArrayList()));
RETRYABLE_CODE_DEFINITIONS = definitions.build();
}
private static final ImmutableMap<String, RetrySettings> RETRY_PARAM_DEFINITIONS;
static {
ImmutableMap.Builder<String, RetrySettings> definitions = ImmutableMap.builder();
RetrySettings settings = null;
settings = RetrySettings.newBuilder().setRpcTimeoutMultiplier(1.0).build();
definitions.put("no_retry_params", settings);
RETRY_PARAM_DEFINITIONS = definitions.build();
}
protected Builder() {
this(((ClientContext) null));
}
protected Builder(ClientContext clientContext) {
super(clientContext);
createEncryptionConfigSettings = UnaryCallSettings.newUnaryCallSettingsBuilder();
createEncryptionConfigOperationSettings = OperationCallSettings.newBuilder();
updateEncryptionConfigSettings = UnaryCallSettings.newUnaryCallSettingsBuilder();
updateEncryptionConfigOperationSettings = OperationCallSettings.newBuilder();
deleteEncryptionConfigSettings = UnaryCallSettings.newUnaryCallSettingsBuilder();
deleteEncryptionConfigOperationSettings = OperationCallSettings.newBuilder();
listEncryptionConfigsSettings =
PagedCallSettings.newBuilder(LIST_ENCRYPTION_CONFIGS_PAGE_STR_FACT);
getEncryptionConfigSettings = UnaryCallSettings.newUnaryCallSettingsBuilder();
listLocationsSettings = PagedCallSettings.newBuilder(LIST_LOCATIONS_PAGE_STR_FACT);
getLocationSettings = UnaryCallSettings.newUnaryCallSettingsBuilder();
unaryMethodSettingsBuilders =
ImmutableList.<UnaryCallSettings.Builder<?, ?>>of(
createEncryptionConfigSettings,
updateEncryptionConfigSettings,
deleteEncryptionConfigSettings,
listEncryptionConfigsSettings,
getEncryptionConfigSettings,
listLocationsSettings,
getLocationSettings);
initDefaults(this);
}
protected Builder(CmekServiceStubSettings settings) {
super(settings);
createEncryptionConfigSettings = settings.createEncryptionConfigSettings.toBuilder();
createEncryptionConfigOperationSettings =
settings.createEncryptionConfigOperationSettings.toBuilder();
updateEncryptionConfigSettings = settings.updateEncryptionConfigSettings.toBuilder();
updateEncryptionConfigOperationSettings =
settings.updateEncryptionConfigOperationSettings.toBuilder();
deleteEncryptionConfigSettings = settings.deleteEncryptionConfigSettings.toBuilder();
deleteEncryptionConfigOperationSettings =
settings.deleteEncryptionConfigOperationSettings.toBuilder();
listEncryptionConfigsSettings = settings.listEncryptionConfigsSettings.toBuilder();
getEncryptionConfigSettings = settings.getEncryptionConfigSettings.toBuilder();
listLocationsSettings = settings.listLocationsSettings.toBuilder();
getLocationSettings = settings.getLocationSettings.toBuilder();
unaryMethodSettingsBuilders =
ImmutableList.<UnaryCallSettings.Builder<?, ?>>of(
createEncryptionConfigSettings,
updateEncryptionConfigSettings,
deleteEncryptionConfigSettings,
listEncryptionConfigsSettings,
getEncryptionConfigSettings,
listLocationsSettings,
getLocationSettings);
}
private static Builder createDefault() {
Builder builder = new Builder(((ClientContext) null));
builder.setTransportChannelProvider(defaultTransportChannelProvider());
builder.setCredentialsProvider(defaultCredentialsProviderBuilder().build());
builder.setInternalHeaderProvider(defaultApiClientHeaderProviderBuilder().build());
builder.setMtlsEndpoint(getDefaultMtlsEndpoint());
builder.setSwitchToMtlsEndpointAllowed(true);
return initDefaults(builder);
}
private static Builder createHttpJsonDefault() {
Builder builder = new Builder(((ClientContext) null));
builder.setTransportChannelProvider(defaultHttpJsonTransportProviderBuilder().build());
builder.setCredentialsProvider(defaultCredentialsProviderBuilder().build());
builder.setInternalHeaderProvider(defaultHttpJsonApiClientHeaderProviderBuilder().build());
builder.setMtlsEndpoint(getDefaultMtlsEndpoint());
builder.setSwitchToMtlsEndpointAllowed(true);
return initDefaults(builder);
}
private static Builder initDefaults(Builder builder) {
builder
.createEncryptionConfigSettings()
.setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("no_retry_codes"))
.setRetrySettings(RETRY_PARAM_DEFINITIONS.get("no_retry_params"));
builder
.updateEncryptionConfigSettings()
.setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("no_retry_codes"))
.setRetrySettings(RETRY_PARAM_DEFINITIONS.get("no_retry_params"));
builder
.deleteEncryptionConfigSettings()
.setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("no_retry_codes"))
.setRetrySettings(RETRY_PARAM_DEFINITIONS.get("no_retry_params"));
builder
.listEncryptionConfigsSettings()
.setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("no_retry_codes"))
.setRetrySettings(RETRY_PARAM_DEFINITIONS.get("no_retry_params"));
builder
.getEncryptionConfigSettings()
.setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("no_retry_codes"))
.setRetrySettings(RETRY_PARAM_DEFINITIONS.get("no_retry_params"));
builder
.listLocationsSettings()
.setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("no_retry_codes"))
.setRetrySettings(RETRY_PARAM_DEFINITIONS.get("no_retry_params"));
builder
.getLocationSettings()
.setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("no_retry_codes"))
.setRetrySettings(RETRY_PARAM_DEFINITIONS.get("no_retry_params"));
builder
.createEncryptionConfigOperationSettings()
.setInitialCallSettings(
UnaryCallSettings
.<CreateEncryptionConfigRequest, OperationSnapshot>newUnaryCallSettingsBuilder()
.setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("no_retry_codes"))
.setRetrySettings(RETRY_PARAM_DEFINITIONS.get("no_retry_params"))
.build())
.setResponseTransformer(
ProtoOperationTransformers.ResponseTransformer.create(EncryptionConfig.class))
.setMetadataTransformer(
ProtoOperationTransformers.MetadataTransformer.create(OperationMetadata.class))
.setPollingAlgorithm(
OperationTimedPollAlgorithm.create(
RetrySettings.newBuilder()
.setInitialRetryDelayDuration(Duration.ofMillis(5000L))
.setRetryDelayMultiplier(1.5)
.setMaxRetryDelayDuration(Duration.ofMillis(45000L))
.setInitialRpcTimeoutDuration(Duration.ZERO)
.setRpcTimeoutMultiplier(1.0)
.setMaxRpcTimeoutDuration(Duration.ZERO)
.setTotalTimeoutDuration(Duration.ofMillis(300000L))
.build()));
builder
.updateEncryptionConfigOperationSettings()
.setInitialCallSettings(
UnaryCallSettings
.<UpdateEncryptionConfigRequest, OperationSnapshot>newUnaryCallSettingsBuilder()
.setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("no_retry_codes"))
.setRetrySettings(RETRY_PARAM_DEFINITIONS.get("no_retry_params"))
.build())
.setResponseTransformer(
ProtoOperationTransformers.ResponseTransformer.create(EncryptionConfig.class))
.setMetadataTransformer(
ProtoOperationTransformers.MetadataTransformer.create(OperationMetadata.class))
.setPollingAlgorithm(
OperationTimedPollAlgorithm.create(
RetrySettings.newBuilder()
.setInitialRetryDelayDuration(Duration.ofMillis(5000L))
.setRetryDelayMultiplier(1.5)
.setMaxRetryDelayDuration(Duration.ofMillis(45000L))
.setInitialRpcTimeoutDuration(Duration.ZERO)
.setRpcTimeoutMultiplier(1.0)
.setMaxRpcTimeoutDuration(Duration.ZERO)
.setTotalTimeoutDuration(Duration.ofMillis(300000L))
.build()));
builder
.deleteEncryptionConfigOperationSettings()
.setInitialCallSettings(
UnaryCallSettings
.<DeleteEncryptionConfigRequest, OperationSnapshot>newUnaryCallSettingsBuilder()
.setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("no_retry_codes"))
.setRetrySettings(RETRY_PARAM_DEFINITIONS.get("no_retry_params"))
.build())
.setResponseTransformer(
ProtoOperationTransformers.ResponseTransformer.create(Empty.class))
.setMetadataTransformer(
ProtoOperationTransformers.MetadataTransformer.create(OperationMetadata.class))
.setPollingAlgorithm(
OperationTimedPollAlgorithm.create(
RetrySettings.newBuilder()
.setInitialRetryDelayDuration(Duration.ofMillis(5000L))
.setRetryDelayMultiplier(1.5)
.setMaxRetryDelayDuration(Duration.ofMillis(45000L))
.setInitialRpcTimeoutDuration(Duration.ZERO)
.setRpcTimeoutMultiplier(1.0)
.setMaxRpcTimeoutDuration(Duration.ZERO)
.setTotalTimeoutDuration(Duration.ofMillis(300000L))
.build()));
return builder;
}
/**
* Applies the given settings updater function to all of the unary API methods in this service.
*
* <p>Note: This method does not support applying settings to streaming methods.
*/
public Builder applyToAllUnaryMethods(
ApiFunction<UnaryCallSettings.Builder<?, ?>, Void> settingsUpdater) {
super.applyToAllUnaryMethods(unaryMethodSettingsBuilders, settingsUpdater);
return this;
}
public ImmutableList<UnaryCallSettings.Builder<?, ?>> unaryMethodSettingsBuilders() {
return unaryMethodSettingsBuilders;
}
/** Returns the builder for the settings used for calls to createEncryptionConfig. */
public UnaryCallSettings.Builder<CreateEncryptionConfigRequest, Operation>
createEncryptionConfigSettings() {
return createEncryptionConfigSettings;
}
/** Returns the builder for the settings used for calls to createEncryptionConfig. */
public OperationCallSettings.Builder<
CreateEncryptionConfigRequest, EncryptionConfig, OperationMetadata>
createEncryptionConfigOperationSettings() {
return createEncryptionConfigOperationSettings;
}
/** Returns the builder for the settings used for calls to updateEncryptionConfig. */
public UnaryCallSettings.Builder<UpdateEncryptionConfigRequest, Operation>
updateEncryptionConfigSettings() {
return updateEncryptionConfigSettings;
}
/** Returns the builder for the settings used for calls to updateEncryptionConfig. */
public OperationCallSettings.Builder<
UpdateEncryptionConfigRequest, EncryptionConfig, OperationMetadata>
updateEncryptionConfigOperationSettings() {
return updateEncryptionConfigOperationSettings;
}
/** Returns the builder for the settings used for calls to deleteEncryptionConfig. */
public UnaryCallSettings.Builder<DeleteEncryptionConfigRequest, Operation>
deleteEncryptionConfigSettings() {
return deleteEncryptionConfigSettings;
}
/** Returns the builder for the settings used for calls to deleteEncryptionConfig. */
public OperationCallSettings.Builder<DeleteEncryptionConfigRequest, Empty, OperationMetadata>
deleteEncryptionConfigOperationSettings() {
return deleteEncryptionConfigOperationSettings;
}
/** Returns the builder for the settings used for calls to listEncryptionConfigs. */
public PagedCallSettings.Builder<
ListEncryptionConfigsRequest,
ListEncryptionConfigsResponse,
ListEncryptionConfigsPagedResponse>
listEncryptionConfigsSettings() {
return listEncryptionConfigsSettings;
}
/** Returns the builder for the settings used for calls to getEncryptionConfig. */
public UnaryCallSettings.Builder<GetEncryptionConfigRequest, EncryptionConfig>
getEncryptionConfigSettings() {
return getEncryptionConfigSettings;
}
/** Returns the builder for the settings used for calls to listLocations. */
public PagedCallSettings.Builder<
ListLocationsRequest, ListLocationsResponse, ListLocationsPagedResponse>
listLocationsSettings() {
return listLocationsSettings;
}
/** Returns the builder for the settings used for calls to getLocation. */
public UnaryCallSettings.Builder<GetLocationRequest, Location> getLocationSettings() {
return getLocationSettings;
}
@Override
public CmekServiceStubSettings build() throws IOException {
return new CmekServiceStubSettings(this);
}
}
}
|
googleapis/sdk-platform-java | 37,058 | test/integration/goldens/library/src/com/google/cloud/example/library/v1/stub/HttpJsonLibraryServiceStub.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.cloud.example.library.v1.stub;
import static com.google.cloud.example.library.v1.LibraryServiceClient.ListBooksPagedResponse;
import static com.google.cloud.example.library.v1.LibraryServiceClient.ListShelvesPagedResponse;
import com.google.api.core.InternalApi;
import com.google.api.gax.core.BackgroundResource;
import com.google.api.gax.core.BackgroundResourceAggregation;
import com.google.api.gax.httpjson.ApiMethodDescriptor;
import com.google.api.gax.httpjson.HttpJsonCallSettings;
import com.google.api.gax.httpjson.HttpJsonStubCallableFactory;
import com.google.api.gax.httpjson.ProtoMessageRequestFormatter;
import com.google.api.gax.httpjson.ProtoMessageResponseParser;
import com.google.api.gax.httpjson.ProtoRestSerializer;
import com.google.api.gax.rpc.ClientContext;
import com.google.api.gax.rpc.RequestParamsBuilder;
import com.google.api.gax.rpc.UnaryCallable;
import com.google.example.library.v1.Book;
import com.google.example.library.v1.CreateBookRequest;
import com.google.example.library.v1.CreateShelfRequest;
import com.google.example.library.v1.DeleteBookRequest;
import com.google.example.library.v1.DeleteShelfRequest;
import com.google.example.library.v1.GetBookRequest;
import com.google.example.library.v1.GetShelfRequest;
import com.google.example.library.v1.ListBooksRequest;
import com.google.example.library.v1.ListBooksResponse;
import com.google.example.library.v1.ListShelvesRequest;
import com.google.example.library.v1.ListShelvesResponse;
import com.google.example.library.v1.MergeShelvesRequest;
import com.google.example.library.v1.MoveBookRequest;
import com.google.example.library.v1.Shelf;
import com.google.example.library.v1.UpdateBookRequest;
import com.google.protobuf.Empty;
import com.google.protobuf.TypeRegistry;
import java.io.IOException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.concurrent.TimeUnit;
import javax.annotation.Generated;
// AUTO-GENERATED DOCUMENTATION AND CLASS.
/**
* REST stub implementation for the LibraryService service API.
*
* <p>This class is for advanced usage and reflects the underlying API directly.
*/
@Generated("by gapic-generator-java")
public class HttpJsonLibraryServiceStub extends LibraryServiceStub {
private static final TypeRegistry typeRegistry = TypeRegistry.newBuilder().build();
private static final ApiMethodDescriptor<CreateShelfRequest, Shelf> createShelfMethodDescriptor =
ApiMethodDescriptor.<CreateShelfRequest, Shelf>newBuilder()
.setFullMethodName("google.example.library.v1.LibraryService/CreateShelf")
.setHttpMethod("POST")
.setType(ApiMethodDescriptor.MethodType.UNARY)
.setRequestFormatter(
ProtoMessageRequestFormatter.<CreateShelfRequest>newBuilder()
.setPath(
"/v1/shelves",
request -> {
Map<String, String> fields = new HashMap<>();
ProtoRestSerializer<CreateShelfRequest> serializer =
ProtoRestSerializer.create();
return fields;
})
.setQueryParamsExtractor(
request -> {
Map<String, List<String>> fields = new HashMap<>();
ProtoRestSerializer<CreateShelfRequest> serializer =
ProtoRestSerializer.create();
serializer.putQueryParam(fields, "$alt", "json;enum-encoding=int");
return fields;
})
.setRequestBodyExtractor(
request ->
ProtoRestSerializer.create().toBody("shelf", request.getShelf(), true))
.build())
.setResponseParser(
ProtoMessageResponseParser.<Shelf>newBuilder()
.setDefaultInstance(Shelf.getDefaultInstance())
.setDefaultTypeRegistry(typeRegistry)
.build())
.build();
private static final ApiMethodDescriptor<GetShelfRequest, Shelf> getShelfMethodDescriptor =
ApiMethodDescriptor.<GetShelfRequest, Shelf>newBuilder()
.setFullMethodName("google.example.library.v1.LibraryService/GetShelf")
.setHttpMethod("GET")
.setType(ApiMethodDescriptor.MethodType.UNARY)
.setRequestFormatter(
ProtoMessageRequestFormatter.<GetShelfRequest>newBuilder()
.setPath(
"/v1/{name=shelves/*}",
request -> {
Map<String, String> fields = new HashMap<>();
ProtoRestSerializer<GetShelfRequest> serializer =
ProtoRestSerializer.create();
serializer.putPathParam(fields, "name", request.getName());
return fields;
})
.setQueryParamsExtractor(
request -> {
Map<String, List<String>> fields = new HashMap<>();
ProtoRestSerializer<GetShelfRequest> serializer =
ProtoRestSerializer.create();
serializer.putQueryParam(fields, "$alt", "json;enum-encoding=int");
return fields;
})
.setRequestBodyExtractor(request -> null)
.build())
.setResponseParser(
ProtoMessageResponseParser.<Shelf>newBuilder()
.setDefaultInstance(Shelf.getDefaultInstance())
.setDefaultTypeRegistry(typeRegistry)
.build())
.build();
private static final ApiMethodDescriptor<ListShelvesRequest, ListShelvesResponse>
listShelvesMethodDescriptor =
ApiMethodDescriptor.<ListShelvesRequest, ListShelvesResponse>newBuilder()
.setFullMethodName("google.example.library.v1.LibraryService/ListShelves")
.setHttpMethod("GET")
.setType(ApiMethodDescriptor.MethodType.UNARY)
.setRequestFormatter(
ProtoMessageRequestFormatter.<ListShelvesRequest>newBuilder()
.setPath(
"/v1/shelves",
request -> {
Map<String, String> fields = new HashMap<>();
ProtoRestSerializer<ListShelvesRequest> serializer =
ProtoRestSerializer.create();
return fields;
})
.setQueryParamsExtractor(
request -> {
Map<String, List<String>> fields = new HashMap<>();
ProtoRestSerializer<ListShelvesRequest> serializer =
ProtoRestSerializer.create();
serializer.putQueryParam(fields, "pageSize", request.getPageSize());
serializer.putQueryParam(fields, "pageToken", request.getPageToken());
serializer.putQueryParam(fields, "$alt", "json;enum-encoding=int");
return fields;
})
.setRequestBodyExtractor(request -> null)
.build())
.setResponseParser(
ProtoMessageResponseParser.<ListShelvesResponse>newBuilder()
.setDefaultInstance(ListShelvesResponse.getDefaultInstance())
.setDefaultTypeRegistry(typeRegistry)
.build())
.build();
private static final ApiMethodDescriptor<DeleteShelfRequest, Empty> deleteShelfMethodDescriptor =
ApiMethodDescriptor.<DeleteShelfRequest, Empty>newBuilder()
.setFullMethodName("google.example.library.v1.LibraryService/DeleteShelf")
.setHttpMethod("DELETE")
.setType(ApiMethodDescriptor.MethodType.UNARY)
.setRequestFormatter(
ProtoMessageRequestFormatter.<DeleteShelfRequest>newBuilder()
.setPath(
"/v1/{name=shelves/*}",
request -> {
Map<String, String> fields = new HashMap<>();
ProtoRestSerializer<DeleteShelfRequest> serializer =
ProtoRestSerializer.create();
serializer.putPathParam(fields, "name", request.getName());
return fields;
})
.setQueryParamsExtractor(
request -> {
Map<String, List<String>> fields = new HashMap<>();
ProtoRestSerializer<DeleteShelfRequest> serializer =
ProtoRestSerializer.create();
serializer.putQueryParam(fields, "$alt", "json;enum-encoding=int");
return fields;
})
.setRequestBodyExtractor(request -> null)
.build())
.setResponseParser(
ProtoMessageResponseParser.<Empty>newBuilder()
.setDefaultInstance(Empty.getDefaultInstance())
.setDefaultTypeRegistry(typeRegistry)
.build())
.build();
private static final ApiMethodDescriptor<MergeShelvesRequest, Shelf>
mergeShelvesMethodDescriptor =
ApiMethodDescriptor.<MergeShelvesRequest, Shelf>newBuilder()
.setFullMethodName("google.example.library.v1.LibraryService/MergeShelves")
.setHttpMethod("POST")
.setType(ApiMethodDescriptor.MethodType.UNARY)
.setRequestFormatter(
ProtoMessageRequestFormatter.<MergeShelvesRequest>newBuilder()
.setPath(
"/v1/{name=shelves/*}:merge",
request -> {
Map<String, String> fields = new HashMap<>();
ProtoRestSerializer<MergeShelvesRequest> serializer =
ProtoRestSerializer.create();
serializer.putPathParam(fields, "name", request.getName());
return fields;
})
.setQueryParamsExtractor(
request -> {
Map<String, List<String>> fields = new HashMap<>();
ProtoRestSerializer<MergeShelvesRequest> serializer =
ProtoRestSerializer.create();
serializer.putQueryParam(fields, "$alt", "json;enum-encoding=int");
return fields;
})
.setRequestBodyExtractor(
request ->
ProtoRestSerializer.create()
.toBody("*", request.toBuilder().clearName().build(), true))
.build())
.setResponseParser(
ProtoMessageResponseParser.<Shelf>newBuilder()
.setDefaultInstance(Shelf.getDefaultInstance())
.setDefaultTypeRegistry(typeRegistry)
.build())
.build();
private static final ApiMethodDescriptor<CreateBookRequest, Book> createBookMethodDescriptor =
ApiMethodDescriptor.<CreateBookRequest, Book>newBuilder()
.setFullMethodName("google.example.library.v1.LibraryService/CreateBook")
.setHttpMethod("POST")
.setType(ApiMethodDescriptor.MethodType.UNARY)
.setRequestFormatter(
ProtoMessageRequestFormatter.<CreateBookRequest>newBuilder()
.setPath(
"/v1/{parent=shelves/*}/books",
request -> {
Map<String, String> fields = new HashMap<>();
ProtoRestSerializer<CreateBookRequest> serializer =
ProtoRestSerializer.create();
serializer.putPathParam(fields, "parent", request.getParent());
return fields;
})
.setQueryParamsExtractor(
request -> {
Map<String, List<String>> fields = new HashMap<>();
ProtoRestSerializer<CreateBookRequest> serializer =
ProtoRestSerializer.create();
serializer.putQueryParam(fields, "$alt", "json;enum-encoding=int");
return fields;
})
.setRequestBodyExtractor(
request ->
ProtoRestSerializer.create().toBody("book", request.getBook(), true))
.build())
.setResponseParser(
ProtoMessageResponseParser.<Book>newBuilder()
.setDefaultInstance(Book.getDefaultInstance())
.setDefaultTypeRegistry(typeRegistry)
.build())
.build();
private static final ApiMethodDescriptor<GetBookRequest, Book> getBookMethodDescriptor =
ApiMethodDescriptor.<GetBookRequest, Book>newBuilder()
.setFullMethodName("google.example.library.v1.LibraryService/GetBook")
.setHttpMethod("GET")
.setType(ApiMethodDescriptor.MethodType.UNARY)
.setRequestFormatter(
ProtoMessageRequestFormatter.<GetBookRequest>newBuilder()
.setPath(
"/v1/{name=shelves/*/books/*}",
request -> {
Map<String, String> fields = new HashMap<>();
ProtoRestSerializer<GetBookRequest> serializer =
ProtoRestSerializer.create();
serializer.putPathParam(fields, "name", request.getName());
return fields;
})
.setQueryParamsExtractor(
request -> {
Map<String, List<String>> fields = new HashMap<>();
ProtoRestSerializer<GetBookRequest> serializer =
ProtoRestSerializer.create();
serializer.putQueryParam(fields, "$alt", "json;enum-encoding=int");
return fields;
})
.setRequestBodyExtractor(request -> null)
.build())
.setResponseParser(
ProtoMessageResponseParser.<Book>newBuilder()
.setDefaultInstance(Book.getDefaultInstance())
.setDefaultTypeRegistry(typeRegistry)
.build())
.build();
private static final ApiMethodDescriptor<ListBooksRequest, ListBooksResponse>
listBooksMethodDescriptor =
ApiMethodDescriptor.<ListBooksRequest, ListBooksResponse>newBuilder()
.setFullMethodName("google.example.library.v1.LibraryService/ListBooks")
.setHttpMethod("GET")
.setType(ApiMethodDescriptor.MethodType.UNARY)
.setRequestFormatter(
ProtoMessageRequestFormatter.<ListBooksRequest>newBuilder()
.setPath(
"/v1/{parent=shelves/*}/books",
request -> {
Map<String, String> fields = new HashMap<>();
ProtoRestSerializer<ListBooksRequest> serializer =
ProtoRestSerializer.create();
serializer.putPathParam(fields, "parent", request.getParent());
return fields;
})
.setQueryParamsExtractor(
request -> {
Map<String, List<String>> fields = new HashMap<>();
ProtoRestSerializer<ListBooksRequest> serializer =
ProtoRestSerializer.create();
serializer.putQueryParam(fields, "pageSize", request.getPageSize());
serializer.putQueryParam(fields, "pageToken", request.getPageToken());
serializer.putQueryParam(fields, "$alt", "json;enum-encoding=int");
return fields;
})
.setRequestBodyExtractor(request -> null)
.build())
.setResponseParser(
ProtoMessageResponseParser.<ListBooksResponse>newBuilder()
.setDefaultInstance(ListBooksResponse.getDefaultInstance())
.setDefaultTypeRegistry(typeRegistry)
.build())
.build();
private static final ApiMethodDescriptor<DeleteBookRequest, Empty> deleteBookMethodDescriptor =
ApiMethodDescriptor.<DeleteBookRequest, Empty>newBuilder()
.setFullMethodName("google.example.library.v1.LibraryService/DeleteBook")
.setHttpMethod("DELETE")
.setType(ApiMethodDescriptor.MethodType.UNARY)
.setRequestFormatter(
ProtoMessageRequestFormatter.<DeleteBookRequest>newBuilder()
.setPath(
"/v1/{name=shelves/*/books/*}",
request -> {
Map<String, String> fields = new HashMap<>();
ProtoRestSerializer<DeleteBookRequest> serializer =
ProtoRestSerializer.create();
serializer.putPathParam(fields, "name", request.getName());
return fields;
})
.setQueryParamsExtractor(
request -> {
Map<String, List<String>> fields = new HashMap<>();
ProtoRestSerializer<DeleteBookRequest> serializer =
ProtoRestSerializer.create();
serializer.putQueryParam(fields, "$alt", "json;enum-encoding=int");
return fields;
})
.setRequestBodyExtractor(request -> null)
.build())
.setResponseParser(
ProtoMessageResponseParser.<Empty>newBuilder()
.setDefaultInstance(Empty.getDefaultInstance())
.setDefaultTypeRegistry(typeRegistry)
.build())
.build();
private static final ApiMethodDescriptor<UpdateBookRequest, Book> updateBookMethodDescriptor =
ApiMethodDescriptor.<UpdateBookRequest, Book>newBuilder()
.setFullMethodName("google.example.library.v1.LibraryService/UpdateBook")
.setHttpMethod("PATCH")
.setType(ApiMethodDescriptor.MethodType.UNARY)
.setRequestFormatter(
ProtoMessageRequestFormatter.<UpdateBookRequest>newBuilder()
.setPath(
"/v1/{book.name=shelves/*/books/*}",
request -> {
Map<String, String> fields = new HashMap<>();
ProtoRestSerializer<UpdateBookRequest> serializer =
ProtoRestSerializer.create();
serializer.putPathParam(fields, "book.name", request.getBook().getName());
return fields;
})
.setQueryParamsExtractor(
request -> {
Map<String, List<String>> fields = new HashMap<>();
ProtoRestSerializer<UpdateBookRequest> serializer =
ProtoRestSerializer.create();
serializer.putQueryParam(fields, "updateMask", request.getUpdateMask());
serializer.putQueryParam(fields, "$alt", "json;enum-encoding=int");
return fields;
})
.setRequestBodyExtractor(
request ->
ProtoRestSerializer.create().toBody("book", request.getBook(), true))
.build())
.setResponseParser(
ProtoMessageResponseParser.<Book>newBuilder()
.setDefaultInstance(Book.getDefaultInstance())
.setDefaultTypeRegistry(typeRegistry)
.build())
.build();
private static final ApiMethodDescriptor<MoveBookRequest, Book> moveBookMethodDescriptor =
ApiMethodDescriptor.<MoveBookRequest, Book>newBuilder()
.setFullMethodName("google.example.library.v1.LibraryService/MoveBook")
.setHttpMethod("POST")
.setType(ApiMethodDescriptor.MethodType.UNARY)
.setRequestFormatter(
ProtoMessageRequestFormatter.<MoveBookRequest>newBuilder()
.setPath(
"/v1/{name=shelves/*/books/*}:move",
request -> {
Map<String, String> fields = new HashMap<>();
ProtoRestSerializer<MoveBookRequest> serializer =
ProtoRestSerializer.create();
serializer.putPathParam(fields, "name", request.getName());
return fields;
})
.setQueryParamsExtractor(
request -> {
Map<String, List<String>> fields = new HashMap<>();
ProtoRestSerializer<MoveBookRequest> serializer =
ProtoRestSerializer.create();
serializer.putQueryParam(fields, "$alt", "json;enum-encoding=int");
return fields;
})
.setRequestBodyExtractor(
request ->
ProtoRestSerializer.create()
.toBody("*", request.toBuilder().clearName().build(), true))
.build())
.setResponseParser(
ProtoMessageResponseParser.<Book>newBuilder()
.setDefaultInstance(Book.getDefaultInstance())
.setDefaultTypeRegistry(typeRegistry)
.build())
.build();
private final UnaryCallable<CreateShelfRequest, Shelf> createShelfCallable;
private final UnaryCallable<GetShelfRequest, Shelf> getShelfCallable;
private final UnaryCallable<ListShelvesRequest, ListShelvesResponse> listShelvesCallable;
private final UnaryCallable<ListShelvesRequest, ListShelvesPagedResponse>
listShelvesPagedCallable;
private final UnaryCallable<DeleteShelfRequest, Empty> deleteShelfCallable;
private final UnaryCallable<MergeShelvesRequest, Shelf> mergeShelvesCallable;
private final UnaryCallable<CreateBookRequest, Book> createBookCallable;
private final UnaryCallable<GetBookRequest, Book> getBookCallable;
private final UnaryCallable<ListBooksRequest, ListBooksResponse> listBooksCallable;
private final UnaryCallable<ListBooksRequest, ListBooksPagedResponse> listBooksPagedCallable;
private final UnaryCallable<DeleteBookRequest, Empty> deleteBookCallable;
private final UnaryCallable<UpdateBookRequest, Book> updateBookCallable;
private final UnaryCallable<MoveBookRequest, Book> moveBookCallable;
private final BackgroundResource backgroundResources;
private final HttpJsonStubCallableFactory callableFactory;
public static final HttpJsonLibraryServiceStub create(LibraryServiceStubSettings settings)
throws IOException {
return new HttpJsonLibraryServiceStub(settings, ClientContext.create(settings));
}
public static final HttpJsonLibraryServiceStub create(ClientContext clientContext)
throws IOException {
return new HttpJsonLibraryServiceStub(
LibraryServiceStubSettings.newHttpJsonBuilder().build(), clientContext);
}
public static final HttpJsonLibraryServiceStub create(
ClientContext clientContext, HttpJsonStubCallableFactory callableFactory) throws IOException {
return new HttpJsonLibraryServiceStub(
LibraryServiceStubSettings.newHttpJsonBuilder().build(), clientContext, callableFactory);
}
/**
* Constructs an instance of HttpJsonLibraryServiceStub, using the given settings. This is
* protected so that it is easy to make a subclass, but otherwise, the static factory methods
* should be preferred.
*/
protected HttpJsonLibraryServiceStub(
LibraryServiceStubSettings settings, ClientContext clientContext) throws IOException {
this(settings, clientContext, new HttpJsonLibraryServiceCallableFactory());
}
/**
* Constructs an instance of HttpJsonLibraryServiceStub, using the given settings. This is
* protected so that it is easy to make a subclass, but otherwise, the static factory methods
* should be preferred.
*/
protected HttpJsonLibraryServiceStub(
LibraryServiceStubSettings settings,
ClientContext clientContext,
HttpJsonStubCallableFactory callableFactory)
throws IOException {
this.callableFactory = callableFactory;
HttpJsonCallSettings<CreateShelfRequest, Shelf> createShelfTransportSettings =
HttpJsonCallSettings.<CreateShelfRequest, Shelf>newBuilder()
.setMethodDescriptor(createShelfMethodDescriptor)
.setTypeRegistry(typeRegistry)
.build();
HttpJsonCallSettings<GetShelfRequest, Shelf> getShelfTransportSettings =
HttpJsonCallSettings.<GetShelfRequest, Shelf>newBuilder()
.setMethodDescriptor(getShelfMethodDescriptor)
.setTypeRegistry(typeRegistry)
.setParamsExtractor(
request -> {
RequestParamsBuilder builder = RequestParamsBuilder.create();
builder.add("name", String.valueOf(request.getName()));
return builder.build();
})
.build();
HttpJsonCallSettings<ListShelvesRequest, ListShelvesResponse> listShelvesTransportSettings =
HttpJsonCallSettings.<ListShelvesRequest, ListShelvesResponse>newBuilder()
.setMethodDescriptor(listShelvesMethodDescriptor)
.setTypeRegistry(typeRegistry)
.build();
HttpJsonCallSettings<DeleteShelfRequest, Empty> deleteShelfTransportSettings =
HttpJsonCallSettings.<DeleteShelfRequest, Empty>newBuilder()
.setMethodDescriptor(deleteShelfMethodDescriptor)
.setTypeRegistry(typeRegistry)
.setParamsExtractor(
request -> {
RequestParamsBuilder builder = RequestParamsBuilder.create();
builder.add("name", String.valueOf(request.getName()));
return builder.build();
})
.build();
HttpJsonCallSettings<MergeShelvesRequest, Shelf> mergeShelvesTransportSettings =
HttpJsonCallSettings.<MergeShelvesRequest, Shelf>newBuilder()
.setMethodDescriptor(mergeShelvesMethodDescriptor)
.setTypeRegistry(typeRegistry)
.setParamsExtractor(
request -> {
RequestParamsBuilder builder = RequestParamsBuilder.create();
builder.add("name", String.valueOf(request.getName()));
return builder.build();
})
.build();
HttpJsonCallSettings<CreateBookRequest, Book> createBookTransportSettings =
HttpJsonCallSettings.<CreateBookRequest, Book>newBuilder()
.setMethodDescriptor(createBookMethodDescriptor)
.setTypeRegistry(typeRegistry)
.setParamsExtractor(
request -> {
RequestParamsBuilder builder = RequestParamsBuilder.create();
builder.add("parent", String.valueOf(request.getParent()));
return builder.build();
})
.build();
HttpJsonCallSettings<GetBookRequest, Book> getBookTransportSettings =
HttpJsonCallSettings.<GetBookRequest, Book>newBuilder()
.setMethodDescriptor(getBookMethodDescriptor)
.setTypeRegistry(typeRegistry)
.setParamsExtractor(
request -> {
RequestParamsBuilder builder = RequestParamsBuilder.create();
builder.add("name", String.valueOf(request.getName()));
return builder.build();
})
.build();
HttpJsonCallSettings<ListBooksRequest, ListBooksResponse> listBooksTransportSettings =
HttpJsonCallSettings.<ListBooksRequest, ListBooksResponse>newBuilder()
.setMethodDescriptor(listBooksMethodDescriptor)
.setTypeRegistry(typeRegistry)
.setParamsExtractor(
request -> {
RequestParamsBuilder builder = RequestParamsBuilder.create();
builder.add("parent", String.valueOf(request.getParent()));
return builder.build();
})
.build();
HttpJsonCallSettings<DeleteBookRequest, Empty> deleteBookTransportSettings =
HttpJsonCallSettings.<DeleteBookRequest, Empty>newBuilder()
.setMethodDescriptor(deleteBookMethodDescriptor)
.setTypeRegistry(typeRegistry)
.setParamsExtractor(
request -> {
RequestParamsBuilder builder = RequestParamsBuilder.create();
builder.add("name", String.valueOf(request.getName()));
return builder.build();
})
.build();
HttpJsonCallSettings<UpdateBookRequest, Book> updateBookTransportSettings =
HttpJsonCallSettings.<UpdateBookRequest, Book>newBuilder()
.setMethodDescriptor(updateBookMethodDescriptor)
.setTypeRegistry(typeRegistry)
.setParamsExtractor(
request -> {
RequestParamsBuilder builder = RequestParamsBuilder.create();
builder.add("book.name", String.valueOf(request.getBook().getName()));
return builder.build();
})
.build();
HttpJsonCallSettings<MoveBookRequest, Book> moveBookTransportSettings =
HttpJsonCallSettings.<MoveBookRequest, Book>newBuilder()
.setMethodDescriptor(moveBookMethodDescriptor)
.setTypeRegistry(typeRegistry)
.setParamsExtractor(
request -> {
RequestParamsBuilder builder = RequestParamsBuilder.create();
builder.add("name", String.valueOf(request.getName()));
return builder.build();
})
.build();
this.createShelfCallable =
callableFactory.createUnaryCallable(
createShelfTransportSettings, settings.createShelfSettings(), clientContext);
this.getShelfCallable =
callableFactory.createUnaryCallable(
getShelfTransportSettings, settings.getShelfSettings(), clientContext);
this.listShelvesCallable =
callableFactory.createUnaryCallable(
listShelvesTransportSettings, settings.listShelvesSettings(), clientContext);
this.listShelvesPagedCallable =
callableFactory.createPagedCallable(
listShelvesTransportSettings, settings.listShelvesSettings(), clientContext);
this.deleteShelfCallable =
callableFactory.createUnaryCallable(
deleteShelfTransportSettings, settings.deleteShelfSettings(), clientContext);
this.mergeShelvesCallable =
callableFactory.createUnaryCallable(
mergeShelvesTransportSettings, settings.mergeShelvesSettings(), clientContext);
this.createBookCallable =
callableFactory.createUnaryCallable(
createBookTransportSettings, settings.createBookSettings(), clientContext);
this.getBookCallable =
callableFactory.createUnaryCallable(
getBookTransportSettings, settings.getBookSettings(), clientContext);
this.listBooksCallable =
callableFactory.createUnaryCallable(
listBooksTransportSettings, settings.listBooksSettings(), clientContext);
this.listBooksPagedCallable =
callableFactory.createPagedCallable(
listBooksTransportSettings, settings.listBooksSettings(), clientContext);
this.deleteBookCallable =
callableFactory.createUnaryCallable(
deleteBookTransportSettings, settings.deleteBookSettings(), clientContext);
this.updateBookCallable =
callableFactory.createUnaryCallable(
updateBookTransportSettings, settings.updateBookSettings(), clientContext);
this.moveBookCallable =
callableFactory.createUnaryCallable(
moveBookTransportSettings, settings.moveBookSettings(), clientContext);
this.backgroundResources =
new BackgroundResourceAggregation(clientContext.getBackgroundResources());
}
@InternalApi
public static List<ApiMethodDescriptor> getMethodDescriptors() {
List<ApiMethodDescriptor> methodDescriptors = new ArrayList<>();
methodDescriptors.add(createShelfMethodDescriptor);
methodDescriptors.add(getShelfMethodDescriptor);
methodDescriptors.add(listShelvesMethodDescriptor);
methodDescriptors.add(deleteShelfMethodDescriptor);
methodDescriptors.add(mergeShelvesMethodDescriptor);
methodDescriptors.add(createBookMethodDescriptor);
methodDescriptors.add(getBookMethodDescriptor);
methodDescriptors.add(listBooksMethodDescriptor);
methodDescriptors.add(deleteBookMethodDescriptor);
methodDescriptors.add(updateBookMethodDescriptor);
methodDescriptors.add(moveBookMethodDescriptor);
return methodDescriptors;
}
@Override
public UnaryCallable<CreateShelfRequest, Shelf> createShelfCallable() {
return createShelfCallable;
}
@Override
public UnaryCallable<GetShelfRequest, Shelf> getShelfCallable() {
return getShelfCallable;
}
@Override
public UnaryCallable<ListShelvesRequest, ListShelvesResponse> listShelvesCallable() {
return listShelvesCallable;
}
@Override
public UnaryCallable<ListShelvesRequest, ListShelvesPagedResponse> listShelvesPagedCallable() {
return listShelvesPagedCallable;
}
@Override
public UnaryCallable<DeleteShelfRequest, Empty> deleteShelfCallable() {
return deleteShelfCallable;
}
@Override
public UnaryCallable<MergeShelvesRequest, Shelf> mergeShelvesCallable() {
return mergeShelvesCallable;
}
@Override
public UnaryCallable<CreateBookRequest, Book> createBookCallable() {
return createBookCallable;
}
@Override
public UnaryCallable<GetBookRequest, Book> getBookCallable() {
return getBookCallable;
}
@Override
public UnaryCallable<ListBooksRequest, ListBooksResponse> listBooksCallable() {
return listBooksCallable;
}
@Override
public UnaryCallable<ListBooksRequest, ListBooksPagedResponse> listBooksPagedCallable() {
return listBooksPagedCallable;
}
@Override
public UnaryCallable<DeleteBookRequest, Empty> deleteBookCallable() {
return deleteBookCallable;
}
@Override
public UnaryCallable<UpdateBookRequest, Book> updateBookCallable() {
return updateBookCallable;
}
@Override
public UnaryCallable<MoveBookRequest, Book> moveBookCallable() {
return moveBookCallable;
}
@Override
public final void close() {
try {
backgroundResources.close();
} catch (RuntimeException e) {
throw e;
} catch (Exception e) {
throw new IllegalStateException("Failed to close resource", e);
}
}
@Override
public void shutdown() {
backgroundResources.shutdown();
}
@Override
public boolean isShutdown() {
return backgroundResources.isShutdown();
}
@Override
public boolean isTerminated() {
return backgroundResources.isTerminated();
}
@Override
public void shutdownNow() {
backgroundResources.shutdownNow();
}
@Override
public boolean awaitTermination(long duration, TimeUnit unit) throws InterruptedException {
return backgroundResources.awaitTermination(duration, unit);
}
}
|
apache/metron | 36,741 | metron-platform/metron-management/src/main/java/org/apache/metron/management/KafkaFunctions.java | /**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.metron.management;
import org.apache.commons.collections4.CollectionUtils;
import org.apache.commons.lang3.ClassUtils;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.apache.kafka.clients.consumer.ConsumerRecords;
import org.apache.kafka.clients.consumer.KafkaConsumer;
import org.apache.kafka.clients.producer.KafkaProducer;
import org.apache.kafka.clients.producer.ProducerRecord;
import org.apache.kafka.clients.producer.RecordMetadata;
import org.apache.kafka.common.PartitionInfo;
import org.apache.kafka.common.TopicPartition;
import org.apache.kafka.common.serialization.StringDeserializer;
import org.apache.kafka.common.serialization.StringSerializer;
import org.apache.metron.common.system.Clock;
import org.apache.metron.stellar.common.LambdaExpression;
import org.apache.metron.stellar.common.utils.ConversionUtils;
import org.apache.metron.stellar.common.utils.JSONUtils;
import org.apache.metron.stellar.dsl.Context;
import org.apache.metron.stellar.dsl.ParseException;
import org.apache.metron.stellar.dsl.Stellar;
import org.apache.metron.stellar.dsl.StellarFunction;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.IOException;
import java.lang.invoke.MethodHandles;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import java.util.Properties;
import java.util.Set;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.Future;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.TimeoutException;
import static java.lang.String.format;
import static org.apache.metron.stellar.dsl.Context.Capabilities.GLOBAL_CONFIG;
import static org.apache.metron.management.Functions.getArg;
/**
* Defines the following Kafka-related functions available in Stellar.
*
* KAFKA_GET
* KAFKA_PUT
* KAFKA_TAIL
* KAFKA_FIND
* KAFKA_PROPS
*/
public class KafkaFunctions {
private static final Logger LOG = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
/**
* The key for the property that defines the maximum amount of time
* to wait to receive messages.
*/
public static final String POLL_TIMEOUT_PROPERTY = "stellar.kafka.poll.timeout";
/**
* How long to wait on each poll request in milliseconds.
*
* <p>One each function call, there will likely be multiple poll requests, each
* waiting this period of time.
*/
private static final int DEFAULT_POLL_TIMEOUT = 500;
/**
* The key for the property that defines the maximum amount of time
* to wait to receive messages in milliseconds.
*/
public static final String MAX_WAIT_PROPERTY = "stellar.kafka.max.wait.millis";
/**
* The default max wait time in milliseconds.
*/
public static final int DEFAULT_MAX_WAIT = 5000;
/**
* The key for the global property that defines how a message is returned
* from the set of KAFKA functions.
*
* <p>simple - The result contains only the message value as a string.
* <p>rich - The result contains the message value, topic, partition, and offset.
*/
public static final String MESSAGE_VIEW_PROPERTY = "stellar.kafka.message.view";
/**
* An acceptable value for the 'stellar.kafka.message.view' property. The result
* provided will contain only the message value as a string.
*/
public static final String MESSAGE_VIEW_SIMPLE = "simple";
/**
* An acceptable value for the 'stellar.kafka.message.view' property.
*
* <p>Provides a view of each message with more detailed metadata beyond just the
* message value. The result provided will contain the message value, topic, partition,
* and offset.
*/
public static final String MESSAGE_VIEW_RICH = "rich";
/**
* The default set of Kafka properties.
*/
private static Properties defaultProperties = defaultKafkaProperties();
/**
* A clock to tell time.
*
* Allows any functions that depend on the system clock to be more readily tested.
*/
protected static Clock clock = new Clock();
/**
* KAFKA_GET
*
* <p>Retrieves messages from a Kafka topic. Subsequent calls will continue retrieving messages
* sequentially from the original offset.
*
* <p>Example: Retrieve one message from a topic.
* <pre>
* {@code
* KAFKA_GET('topic')
* }
* </pre>
*
* <p>Example: Retrieve 10 messages from a topic.
* <pre>
* {@code
* KAFKA_GET('topic', 10)
* }
* </pre>
*
* <p>Example: Retrieve the first message from a topic. This must be the first retrieval
* from the topic, otherwise the messages will be retrieved starting from the
* previously stored consumer offset.
* <pre>
* {@code
* KAFKA_GET('topic', 1, { "auto.offset.reset": "earliest" })
* }
* </pre>
*
* <p>By default, only the message value is returned. By setting the global property
* 'stellar.kafka.message.view' = 'rich' the function will return additional Kafka metadata
* including the topic, partition, offset, key, and timestamp contained in a map. Setting
* this property value to 'simple' or simply not setting the property value, will result
* in the default view behavior.
*/
@Stellar(
namespace = "KAFKA",
name = "GET",
description = "Retrieves messages from a Kafka topic. Subsequent calls will" +
"continue retrieving messages sequentially from the original offset.",
params = {
"topic - The name of the Kafka topic",
"count - The number of Kafka messages to retrieve",
"config - Optional map of key/values that override any global properties."
},
returns = "The messages as a list of strings"
)
public static class KafkaGet implements StellarFunction {
@Override
public Object apply(List<Object> args, Context context) throws ParseException {
// required - name of the topic to retrieve messages from
String topic = getArg("topic", 0, String.class, args);
// optional - how many messages should be retrieved?
int count = 1;
if(args.size() > 1) {
count = getArg("count", 1, Integer.class, args);
}
// optional - property overrides provided by the user
Map<String, String> overrides = new HashMap<>();
if(args.size() > 2) {
overrides = getArg("overrides", 2, Map.class, args);
}
// build the properties for kafka
Properties properties = buildKafkaProperties(overrides, context);
properties.put("max.poll.records", count);
return getMessages(topic, count, properties);
}
/**
* Gets messages from a Kafka topic.
*
* @param topic The Kafka topic.
* @param count The maximum number of messages to get.
* @param properties The function properties.
* @return
*/
private Object getMessages(String topic, int count, Properties properties) {
int maxWait = getMaxWait(properties);
int pollTimeout = getPollTimeout(properties);
List<Object> messages = new ArrayList<>();
// read some messages
try (KafkaConsumer<String, String> consumer = new KafkaConsumer<>(properties)) {
manualPartitionAssignment(topic, consumer);
// continue until we have enough messages or exceeded the max wait time
long wait = 0L;
final long start = clock.currentTimeMillis();
while(messages.size() < count && wait < maxWait) {
for(ConsumerRecord<String, String> record: consumer.poll(pollTimeout)) {
Object viewOfMessage = render(record, properties);
messages.add(viewOfMessage);
}
// how long have we waited?
wait = clock.currentTimeMillis() - start;
consumer.commitSync();
LOG.debug("KAFKA_GET polled for messages; topic={}, count={}, waitTime={} ms",
topic, messages.size(), wait);
}
}
return messages;
}
@Override
public void initialize(Context context) {
// no initialization required
}
@Override
public boolean isInitialized() {
// no initialization required
return true;
}
}
/**
* KAFKA_TAIL
*
* <p>Tails messages from a Kafka topic always starting with the most recently received message.
*
* <p>Example: Retrieve the latest message from a topic.
* <pre>
* {@code
* KAFKA_TAIL('topic')
* }
* </pre>
*
* <p>Example: Retrieve 10 messages from a topic starting with the latest.
* <pre>
* {@code
* KAFKA_TAIL('topic', 10)
* }
* </pre>
*
* <p>By default, only the message value is returned. By setting the global property
* 'stellar.kafka.message.view' = 'rich' the function will return additional Kafka metadata
* including the topic, partition, offset, key, and timestamp contained in a map. Setting
* this property value to 'simple' or simply not setting the property value, will result
* in the default view behavior.
*/
@Stellar(
namespace = "KAFKA",
name = "TAIL",
description = "Tails messages from a Kafka topic always starting with the most recently received message.",
params = {
"topic - The name of the Kafka topic",
"count - The number of Kafka messages to retrieve",
"config - Optional map of key/values that override any global properties."
},
returns = "The messages as a list of strings"
)
public static class KafkaTail implements StellarFunction {
@Override
public Object apply(List<Object> args, Context context) throws ParseException {
// required - name of the topic to retrieve messages from
String topic = getArg("topic", 0, String.class, args);
// optional - how many messages should be retrieved?
int count = 1;
if(args.size() > 1) {
count = getArg("count", 1, Integer.class, args);
}
// optional - property overrides provided by the user
Map<String, String> overrides = new HashMap<>();
if(args.size() > 2) {
overrides = getArg("overrides", 2, Map.class, args);
}
Properties properties = buildKafkaProperties(overrides, context);
properties.put("max.poll.records", count);
return tailMessages(topic, count, properties);
}
/**
* Gets messages from the tail end of a Kafka topic.
*
* @param topic The name of the kafka topic.
* @param count The maximum number of messages to get.
* @param properties The function configuration properties.
* @return A list of messages from the tail end of a Kafka topic.
*/
private Object tailMessages(String topic, int count, Properties properties) {
List<Object> messages = new ArrayList<>();
int pollTimeout = getPollTimeout(properties);
int maxWait = getMaxWait(properties);
// create the consumer
try (KafkaConsumer<String, String> consumer = new KafkaConsumer<>(properties)) {
// seek to the end of all topic/partitions
Set<TopicPartition> partitions = manualPartitionAssignment(topic, consumer);
consumer.seekToEnd(partitions);
// continue until we have enough messages or exceeded the max wait time
long wait = 0L;
final long start = clock.currentTimeMillis();
while(messages.size() < count && wait < maxWait) {
for(ConsumerRecord<String, String> record: consumer.poll(pollTimeout)) {
Object viewOfMessage = render(record, properties);
messages.add(viewOfMessage);
}
// how long have we waited?
wait = clock.currentTimeMillis() - start;
consumer.commitSync();
LOG.debug("KAFKA_TAIL polled for messages; topic={}, count={}, waitTime={} ms",
topic, messages.size(), wait);
}
}
return messages;
}
@Override
public void initialize(Context context) {
// no initialization required
}
@Override
public boolean isInitialized() {
// no initialization required
return true;
}
}
/**
* KAFKA_PUT
*
* <p>Sends messages to a Kafka topic.
*
* <p>Example: Put two messages on the topic 'topic'.
* <pre>
* {@code
* KAFKA_PUT('topic', ["message1", "message2"])
* }
* </pre>
*
* <p>Example: Put a message on a topic and also define an alternative Kafka broker.
* <pre>
* {@code
* KAFKA_PUT('topic', ["message1"], { "bootstrap.servers": "kafka-broker-1:6667" })
* }
* </pre>
*
*/
@Stellar(
namespace = "KAFKA",
name = "PUT",
description = "Sends messages to a Kafka topic. ",
params = {
"topic - The name of the Kafka topic.",
"messages - A list of messages to write.",
"config - An optional map of key/values that override any global properties."
},
returns = " "
)
public static class KafkaPut implements StellarFunction {
@Override
public Object apply(List<Object> args, Context context) throws ParseException {
String topic = ConversionUtils.convert(args.get(0), String.class);
List<String> messages;
if(args.get(1) instanceof String) {
// a single message needs sent
String msg = getArg("message(s)", 1, String.class, args);
messages = Collections.singletonList(msg);
} else {
// a list of messages; all need sent
messages = getArg("message(s)", 1, List.class, args);
}
// are there any overrides?
Map<String, String> overrides = new HashMap<>();
if(args.size() > 2) {
overrides = getArg("overrides", 2, Map.class, args);
}
// send the messages
Properties properties = buildKafkaProperties(overrides, context);
List<RecordMetadata> records = putMessages(topic, messages, properties);
// render a view of the messages that were written for the user
Object view = render(records, properties);
return view;
}
/**
* Render a view of the {@link RecordMetadata} that resulted from writing
* messages to Kafka.
*
* @param records The record metadata.
* @param properties The properties.
* @return
*/
private Object render(List<RecordMetadata> records, Properties properties) {
Object view;
if(MESSAGE_VIEW_RICH.equals(getMessageView(properties))) {
// build a 'rich' view of the messages that were written
List<Object> responses = new ArrayList<>();
for(RecordMetadata record: records) {
// render the 'rich' view of the record
Map<String, Object> richView = new HashMap<>();
richView.put("topic", record.topic());
richView.put("partition", record.partition());
richView.put("offset", record.offset());
richView.put("timestamp", record.timestamp());
responses.add(richView);
}
// the rich view is a list of maps containing metadata about how each message was written
view = responses;
} else {
// otherwise, the view is simply a count of the number of messages written
view = CollectionUtils.size(records);
}
return view;
}
/**
* Put messages to a Kafka topic.
*
* <p>Sends each message synchronously.
*
* @param topic The topic to send messages to.
* @param messages The messages to send.
* @param properties The properties to use with Kafka.
* @return Metadata about all the records written to Kafka.
*/
private List<RecordMetadata> putMessages(String topic, List<String> messages, Properties properties) {
LOG.debug("KAFKA_PUT sending messages; topic={}, count={}", topic, messages.size());
List<RecordMetadata> records = new ArrayList<>();
try (KafkaProducer<String, String> producer = new KafkaProducer<>(properties)) {
List<Future<RecordMetadata>> futures = new ArrayList<>();
// send each message
for(String msg : messages) {
Future<RecordMetadata> future = producer.send(new ProducerRecord<>(topic, msg));
futures.add(future);
}
// wait for the sends to complete
for(Future<RecordMetadata> future : futures) {
RecordMetadata record = waitForResponse(future, properties);
records.add(record);
}
producer.flush();
}
return records;
}
/**
* Wait for response to the message being sent.
*
* @param future The future for the message being sent.
* @param properties The configuration properties.
* @return Metadata about the record that was written to Kafka.
*/
private RecordMetadata waitForResponse(Future<RecordMetadata> future, Properties properties) {
RecordMetadata record = null;
int maxWait = getMaxWait(properties);
try {
// wait for the record and then render it for the user
record = future.get(maxWait, TimeUnit.MILLISECONDS);
LOG.debug("KAFKA_PUT message sent; topic={}, partition={}, offset={}",
record.topic(), record.partition(), record.offset());
} catch(TimeoutException | InterruptedException | ExecutionException e) {
LOG.error("KAFKA_PUT message send failure", e);
}
return record;
}
@Override
public void initialize(Context context) {
// no initialization required
}
@Override
public boolean isInitialized() {
// no initialization required
return true;
}
}
/**
* KAFKA_PROPS
*
* Retrieves the Kafka properties that are used by other KAFKA_* functions
* like KAFKA_GET and KAFKA_PUT. The Kafka properties are compiled from a
* set of default properties, the global properties, and any overrides.
*
* Example: Retrieve the current Kafka properties.
* KAFKA_PROPS()
*
* Example: Retrieve the current Kafka properties taking into account a set of overrides.
* KAFKA_PROPS({ "max.poll.records": 1 })
*/
@Stellar(
namespace = "KAFKA",
name = "PROPS",
description = "Retrieves the Kafka properties that are used by other KAFKA_* functions " +
"like KAFKA_GET and KAFKA_PUT. The Kafka properties are compiled from a " +
"set of default properties, the global properties, and any overrides.",
params = { "config - An optional map of key/values that override any global properties." },
returns = " "
)
public static class KafkaProps implements StellarFunction {
@Override
public Object apply(List<Object> args, Context context) throws ParseException {
// optional - did the user provide any overrides?
Map<String, String> overrides = new HashMap<>();
if(args.size() > 0) {
overrides = getArg("overrides", 0, Map.class, args);
}
return buildKafkaProperties(overrides, context);
}
@Override
public void initialize(Context context) {
// no initialization required
}
@Override
public boolean isInitialized() {
// no initialization required
return true;
}
}
/**
* KAFKA_FIND
*
* <p>Finds messages that satisfy a given filter expression. Subsequent calls will continue retrieving messages
* sequentially from the original offset.
*
* <p>Example: Retrieve a 'bro' message.
* <pre>
* {@code
* KAFKA_FIND('topic', m -> MAP_GET('source.type', m) == 'bro')
* }
* </pre>
*
* <p>Example: Find 10 messages that contain geo-location data.
* <pre>
* {@code
* KAFKA_FIND('topic', m -> MAP_EXISTS('geo', m), 10)
* }
* </pre>
*
* <p>By default, only the message value is returned. By setting the global property
* 'stellar.kafka.message.view' = 'rich' the function will return additional Kafka metadata
* including the topic, partition, offset, key, and timestamp contained in a map. Setting
* this property value to 'simple' or simply not setting the property value, will result
* in the default view behavior.
*/
@Stellar(
namespace = "KAFKA",
name = "FIND",
description = "Find messages that satisfy a given filter expression. Messages are filtered starting from " +
"the latest offset.",
params = {
"topic - The name of the Kafka topic",
"filter - A lambda expression that filters messages. Messages are presented as a map of fields to the expression.",
"count - The number of Kafka messages to retrieve",
"config - Optional map of key/values that override any global properties."
},
returns = "The messages as a list of strings"
)
public static class KafkaFind implements StellarFunction {
@Override
public Object apply(List<Object> args, Context context) throws ParseException {
// required - name of the topic to retrieve messages from
String topic = getArg("topic", 0, String.class, args);
// required - a lambda which filters the messages
LambdaExpression filter = getArg("filter", 1, LambdaExpression.class, args);
// optional - how many messages should be retrieved?
int count = 1;
if(args.size() > 2) {
count = getArg("count", 2, Integer.class, args);
}
// optional - property overrides provided by the user
Map<String, String> overrides = new HashMap<>();
if(args.size() > 3) {
overrides = getArg("overrides", 3, Map.class, args);
}
Properties properties = buildKafkaProperties(overrides, context);
properties.put("max.poll.records", 10 * count);
return findMessages(topic, filter, count, properties);
}
/**
* Find messages in Kafka that satisfy a filter expression.
*
* @param topic The kafka topic.
* @param filter The filter expression.
* @param count The maximum number of messages to find.
* @param properties Function configuration values.
* @return A list of messages that satisfy the filter expression.
*/
private List<Object> findMessages(String topic, LambdaExpression filter, int count, Properties properties) {
final int pollTimeout = getPollTimeout(properties);
final int maxWait = getMaxWait(properties);
List<Object> messages = new ArrayList<>();
try (KafkaConsumer<String, String> consumer = new KafkaConsumer<>(properties)) {
// seek to the end of all topic/partitions
Set<TopicPartition> partitions = manualPartitionAssignment(topic, consumer);
consumer.seekToEnd(partitions);
// continue until we have enough messages or exceeded the max wait time
long wait = 0L;
final long start = clock.currentTimeMillis();
while(messages.size() < count && wait < maxWait) {
// poll kafka for messages
ConsumerRecords<String, String> records = consumer.poll(pollTimeout);
for(ConsumerRecord<String, String> record : records) {
// only keep the message if the filter expression is satisfied
if(isSatisfied(filter, record.value())) {
Object view = render(record, properties);
messages.add(view);
// do we have enough messages already?
if(messages.size() >= count) {
break;
}
}
}
// how long have we waited?
wait = clock.currentTimeMillis() - start;
consumer.commitSync();
LOG.debug("KAFKA_FIND polled for messages; topic={}, count={}, waitTime={} ms",
topic, messages.size(), wait);
}
}
return messages;
}
/**
* Executes a given expression on a message.
*
* @param expr The filter expression to execute.
* @param message The message that the expression is executed on.
* @return Returns true, only if the expression returns true. If the expression
* returns false or fails to execute, false is returned.
*/
public boolean isSatisfied(LambdaExpression expr, String message) {
boolean result = false;
Map<String, Object> messageAsMap;
try {
// transform the message to a map of fields
messageAsMap = JSONUtils.INSTANCE.load(message, JSONUtils.MAP_SUPPLIER);
// apply the filter expression
Object out = expr.apply(Collections.singletonList(messageAsMap));
if(out instanceof Boolean) {
result = (Boolean) out;
} else {
LOG.error("Expected boolean from filter expression, got {}", ClassUtils.getShortClassName(out, "null"));
}
} catch(IOException e) {
LOG.error("Unable to parse message", e);
}
return result;
}
@Override
public void initialize(Context context) {
// no initialization required
}
@Override
public boolean isInitialized() {
// no initialization required
return true;
}
}
/**
* KAFKA_SEEK
*
* <p>Seeks to a specific offset and returns the message.
*
* <p>Example: Find the message in 'topic', partition 2, offset 1001.
* <pre>
* {@code
* KAFKA_SEEK('topic', 1, 1001)
* }
* </pre>
*
* <p>By default, only the message value is returned. By setting the global property
* 'stellar.kafka.message.view' = 'rich' the function will return additional Kafka metadata
* including the topic, partition, offset, key, and timestamp contained in a map. Setting
* this property value to 'simple' or simply not setting the property value, will result
* in the default view behavior.
*/
@Stellar(
namespace = "KAFKA",
name = "SEEK",
description = "Seeks to an offset within a topic and returns the message.",
params = {
"topic - The name of the Kafka topic",
"partition - The partition identifier; starts at 0.",
"offset - The offset within the partition; starts at 0.",
"config - Optional map of key/values that override any global properties."
},
returns = "The message at the given offset, if the offset exists. Otherwise, returns null."
)
public static class KafkaSeek implements StellarFunction {
@Override
public Object apply(List<Object> args, Context context) throws ParseException {
// required - the topic, partition, and offset are all required
String topic = getArg("topic", 0, String.class, args);
int partition = getArg("partition", 1, Integer.class, args);
int offset = getArg("offset", 2, Integer.class, args);
// optional - property overrides provided by the user
Map<String, String> overrides = new HashMap<>();
if(args.size() > 3) {
overrides = getArg("overrides", 3, Map.class, args);
}
Properties properties = buildKafkaProperties(overrides, context);
return seek(topic, partition, offset, properties);
}
/**
* Find messages in Kafka that satisfy a filter expression.
*
* @param topic The kafka topic.
* @param partition The partition identifier.
* @param offset The offset within the given partition.
* @param properties Function configuration values.
* @return A list of messages that satisfy the filter expression.
*/
private Object seek(String topic, int partition, int offset, Properties properties) {
final int pollTimeout = getPollTimeout(properties);
final int maxWait = getMaxWait(properties);
Object message = null;
try (KafkaConsumer<String, String> consumer = new KafkaConsumer<>(properties)) {
// continue until we have the message or exceeded the max wait time
long wait = 0L;
final long start = clock.currentTimeMillis();
while(message == null && wait < maxWait) {
// seek to the offset
TopicPartition topar = new TopicPartition(topic, partition);
consumer.assign(Collections.singletonList(topar));
consumer.seek(topar, offset);
// poll kafka for messages
for(ConsumerRecord<String, String> record : consumer.poll(pollTimeout)) {
// kafka will attempt to be helpful and return a message, even if the actual offset does not exist
if(record.offset() == offset && record.partition() == partition) {
LOG.debug("KAFKA_SEEK found message; topic={}, partition={}, offset={}", topic, partition, offset);
message = render(record, properties);
}
}
// how long have we waited?
wait = clock.currentTimeMillis() - start;
if(LOG.isDebugEnabled() && message == null) {
LOG.debug("KAFKA_SEEK no message yet; topic={}, partition={}, offset={}, waitTime={} ms",
topic, partition, offset, wait);
}
}
}
return message;
}
@Override
public void initialize(Context context) {
// no initialization required
}
@Override
public boolean isInitialized() {
// no initialization required
return true;
}
}
/**
* Renders the Kafka record into a view.
*
* <p>A user can customize the way in which a Kafka record is rendered by altering
* the "stellar.kafka.message.view" property.
*
* @param record The Kafka record to render.
* @param properties The properties which allows a user to customize the rendered view of a record.
* @return
*/
private static Object render(ConsumerRecord<String, String> record, Properties properties) {
LOG.debug("Render message; topic={}, partition={}, offset={}",
record.topic(), record.partition(), record.offset());
Object result;
if(MESSAGE_VIEW_RICH.equals(getMessageView(properties))) {
// build the detailed view of the record
Map<String, Object> view = new HashMap<>();
view.put("value", record.value());
view.put("topic", record.topic());
view.put("partition", record.partition());
view.put("offset", record.offset());
view.put("timestamp", record.timestamp());
view.put("key", record.key());
result = view;
} else {
// default to the simple view
result = record.value();
}
return result;
}
/**
* Manually assigns all partitions in a topic to a consumer
*
* @param topic The topic whose partitions will be assigned.
* @param consumer The consumer to assign partitions to.
* @return A set of topic-partitions that were manually assigned to the consumer.
*/
private static Set<TopicPartition> manualPartitionAssignment(String topic, KafkaConsumer<String, String> consumer) {
// find all partitions for the topic
Set<TopicPartition> partitions = new HashSet<>();
for(PartitionInfo partition : consumer.partitionsFor(topic)) {
partitions.add(new TopicPartition(topic, partition.partition()));
}
if(partitions.size() == 0) {
throw new IllegalStateException(format("No partitions available for consumer assignment; topic=%s", topic));
}
// manually assign this consumer to each partition in the topic
consumer.assign(partitions);
return partitions;
}
/**
* Assembles the set of Properties required by the Kafka client.
*
* A set of default properties has been defined to provide minimum functionality.
* Any properties defined in the global configuration override these defaults.
* Any user-defined overrides then override all others.
*
* @param overrides Property overrides provided by the user.
* @param context The Stellar context.
*/
private static Properties buildKafkaProperties(Map<String, String> overrides, Context context) {
// start with minimal set of default properties
Properties properties = new Properties();
properties.putAll(defaultProperties);
// override the default properties with those in the global configuration
Optional<Object> globalCapability = context.getCapability(GLOBAL_CONFIG, false);
if(globalCapability.isPresent()) {
Map<String, Object> global = (Map<String, Object>) globalCapability.get();
properties.putAll(global);
}
// any user-defined properties will override both the defaults and globals
properties.putAll(overrides);
return properties;
}
/**
* Return the max wait time setting.
*
* @param properties The function configuration properties.
* @return The mex wait time in milliseconds.
*/
private static int getMaxWait(Properties properties) {
int maxWait = DEFAULT_MAX_WAIT;
Object value = properties.get(MAX_WAIT_PROPERTY);
if(value != null) {
maxWait = ConversionUtils.convert(value, Integer.class);
}
return maxWait;
}
/**
* Returns the poll timeout setting.
*
* <p>The maximum amount of time waited each time that Kafka is polled
* for messages.
*
* @param properties The function configuration properties.
* @return
*/
private static int getPollTimeout(Properties properties) {
int pollTimeout = DEFAULT_POLL_TIMEOUT;
Object value = properties.get(POLL_TIMEOUT_PROPERTY);
if(value != null) {
pollTimeout = ConversionUtils.convert(value, Integer.class);
}
return pollTimeout;
}
/**
* Determines how Kafka messages should be rendered for the user.
*
* @param properties The properties.
* @return How the Kafka messages should be rendered.
*/
private static String getMessageView(Properties properties) {
// defaults to the simple view
String messageView = MESSAGE_VIEW_SIMPLE;
if(properties.containsKey(MESSAGE_VIEW_PROPERTY)) {
messageView = ConversionUtils.convert(properties.get(MESSAGE_VIEW_PROPERTY), String.class);
}
return messageView;
}
/**
* Defines a minimal set of default parameters that can be overridden
* via the global properties.
*/
private static Properties defaultKafkaProperties() {
Properties properties = new Properties();
properties.put("bootstrap.servers", "localhost:9092");
properties.put("group.id", "kafka-functions-stellar");
/*
* What to do when there is no initial offset in Kafka or if the current
* offset does not exist any more on the server (e.g. because that data has been deleted):
*
* "earliest": automatically reset the offset to the earliest offset
* "latest": automatically reset the offset to the latest offset
* "none": throw exception to the consumer if no previous offset is found or the consumer's group
* anything else: throw exception to the consumer.
*/
properties.put("auto.offset.reset", "latest");
// limits the number of messages read in a single poll request
properties.put("max.poll.records", 1);
// consumer deserialization
properties.put("key.deserializer", StringDeserializer.class.getName());
properties.put("value.deserializer", StringDeserializer.class.getName());
// producer serialization
properties.put("key.serializer", StringSerializer.class.getName());
properties.put("value.serializer", StringSerializer.class.getName());
// set the default max time to wait for messages
properties.put(MAX_WAIT_PROPERTY, DEFAULT_MAX_WAIT);
// set the default poll timeout
properties.put(POLL_TIMEOUT_PROPERTY, DEFAULT_POLL_TIMEOUT);
// set the default message view
properties.put(MESSAGE_VIEW_PROPERTY, MESSAGE_VIEW_SIMPLE);
return properties;
}
}
|
apache/tomcat-maven-plugin | 36,905 | common-tomcat-maven-plugin/src/main/java/org/apache/tomcat/maven/common/deployer/TomcatManager.java | package org.apache.tomcat.maven.common.deployer;
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import org.apache.commons.codec.binary.Base64;
import org.apache.commons.io.IOUtils;
import org.apache.commons.lang.StringUtils;
import org.apache.http.Header;
import org.apache.http.HttpHost;
import org.apache.http.HttpResponse;
import org.apache.http.HttpStatus;
import org.apache.http.auth.AuthScope;
import org.apache.http.auth.Credentials;
import org.apache.http.auth.UsernamePasswordCredentials;
import org.apache.http.client.AuthCache;
import org.apache.http.client.methods.HttpGet;
import org.apache.http.client.methods.HttpPut;
import org.apache.http.client.methods.HttpRequestBase;
import org.apache.http.client.protocol.ClientContext;
import org.apache.http.conn.params.ConnRoutePNames;
import org.apache.http.entity.AbstractHttpEntity;
import org.apache.http.impl.auth.BasicScheme;
import org.apache.http.impl.client.BasicAuthCache;
import org.apache.http.impl.client.DefaultHttpClient;
import org.apache.http.impl.conn.PoolingClientConnectionManager;
import org.apache.http.protocol.BasicHttpContext;
import org.apache.maven.settings.Proxy;
import org.apache.maven.wagon.proxy.ProxyInfo;
import org.apache.maven.wagon.proxy.ProxyUtils;
import java.io.File;
import java.io.FileInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.io.PrintStream;
import java.net.URL;
import java.net.URLEncoder;
import java.text.DecimalFormat;
import java.text.DecimalFormatSymbols;
import java.util.Locale;
/**
* A Tomcat manager webapp invocation wrapper.
*
* @author Mark Hobson (markhobson@gmail.com)
*/
public class TomcatManager
{
// ----------------------------------------------------------------------
// Constants
// ----------------------------------------------------------------------
/**
* The charset to use when decoding Tomcat manager responses.
*/
private static final String MANAGER_CHARSET = "UTF-8";
// ----------------------------------------------------------------------
// Fields
// ----------------------------------------------------------------------
/**
* The full URL of the Tomcat manager instance to use.
*/
private URL url;
/**
* The username to use when authenticating with Tomcat manager.
*/
private String username;
/**
* The password to use when authenticating with Tomcat manager.
*/
private String password;
/**
* The URL encoding charset to use when communicating with Tomcat manager.
*/
private String charset;
/**
* The user agent name to use when communicating with Tomcat manager.
*/
private String userAgent;
/**
* @since 2.0
*/
private DefaultHttpClient httpClient;
/**
* @since 2.0
*/
private BasicHttpContext localContext;
private Proxy proxy;
/**
* @since 2.2
*/
private boolean verbose;
// ----------------------------------------------------------------------
// Constructors
// ----------------------------------------------------------------------
/**
* Creates a Tomcat manager wrapper for the specified URL that uses a username of <code>admin</code>, an empty
* password and ISO-8859-1 URL encoding.
*
* @param url the full URL of the Tomcat manager instance to use
*/
public TomcatManager( URL url )
{
this( url, "admin" );
}
/**
* Creates a Tomcat manager wrapper for the specified URL and username that uses an empty password and ISO-8859-1
* URL encoding.
*
* @param url the full URL of the Tomcat manager instance to use
* @param username the username to use when authenticating with Tomcat manager
*/
public TomcatManager( URL url, String username )
{
this( url, username, "" );
}
/**
* Creates a Tomcat manager wrapper for the specified URL, username and password that uses ISO-8859-1 URL encoding.
*
* @param url the full URL of the Tomcat manager instance to use
* @param username the username to use when authenticating with Tomcat manager
* @param password the password to use when authenticating with Tomcat manager
*/
public TomcatManager( URL url, String username, String password )
{
this( url, username, password, "ISO-8859-1" );
}
/**
* Creates a Tomcat manager wrapper for the specified URL, username, password and URL encoding.
*
* @param url the full URL of the Tomcat manager instance to use
* @param username the username to use when authenticating with Tomcat manager
* @param password the password to use when authenticating with Tomcat manager
* @param charset the URL encoding charset to use when communicating with Tomcat manager
*/
public TomcatManager( URL url, String username, String password, String charset )
{
this( url, username, password, charset, true );
}
/**
* Creates a Tomcat manager wrapper for the specified URL, username, password and URL encoding.
*
* @param url the full URL of the Tomcat manager instance to use
* @param username the username to use when authenticating with Tomcat manager
* @param password the password to use when authenticating with Tomcat manager
* @param charset the URL encoding charset to use when communicating with Tomcat manager
* @param verbose if the build is in verbose mode (quiet mode otherwise)
* @since 2.2
*/
public TomcatManager( URL url, String username, String password, String charset, boolean verbose )
{
this.url = url;
this.username = username;
this.password = password;
this.charset = charset;
this.verbose = verbose;
PoolingClientConnectionManager poolingClientConnectionManager = new PoolingClientConnectionManager();
poolingClientConnectionManager.setMaxTotal( 5 );
this.httpClient = new DefaultHttpClient( poolingClientConnectionManager );
if ( StringUtils.isNotEmpty( username ) )
{
Credentials creds = new UsernamePasswordCredentials( username, password );
String host = url.getHost();
int port = url.getPort() > -1 ? url.getPort() : AuthScope.ANY_PORT;
httpClient.getCredentialsProvider().setCredentials( new AuthScope( host, port ), creds );
AuthCache authCache = new BasicAuthCache();
BasicScheme basicAuth = new BasicScheme();
HttpHost targetHost = new HttpHost( url.getHost(), url.getPort(), url.getProtocol() );
authCache.put( targetHost, basicAuth );
localContext = new BasicHttpContext();
localContext.setAttribute( ClientContext.AUTH_CACHE, authCache );
}
}
// ----------------------------------------------------------------------
// Public Methods
// ----------------------------------------------------------------------
/**
* Gets the full URL of the Tomcat manager instance.
*
* @return the full URL of the Tomcat manager instance
*/
public URL getURL()
{
return url;
}
/**
* Gets the username to use when authenticating with Tomcat manager.
*
* @return the username to use when authenticating with Tomcat manager
*/
public String getUserName()
{
return username;
}
/**
* Gets the password to use when authenticating with Tomcat manager.
*
* @return the password to use when authenticating with Tomcat manager
*/
public String getPassword()
{
return password;
}
/**
* Gets the URL encoding charset to use when communicating with Tomcat manager.
*
* @return the URL encoding charset to use when communicating with Tomcat manager
*/
public String getCharset()
{
return charset;
}
/**
* Gets the user agent name to use when communicating with Tomcat manager.
*
* @return the user agent name to use when communicating with Tomcat manager
*/
public String getUserAgent()
{
return userAgent;
}
/**
* Sets the user agent name to use when communicating with Tomcat manager.
*
* @param userAgent the user agent name to use when communicating with Tomcat manager
*/
public void setUserAgent( String userAgent )
{
this.userAgent = userAgent;
}
/**
* Sets the proxy to use when communicating with Tomcat manager.
*
* @param proxy the proxy to use when communicating with Tomcat manager
*/
public void setProxy( Proxy proxy )
{
if ( this.proxy != proxy )
{
this.proxy = proxy;
if ( httpClient != null )
{
applyProxy();
}
}
}
/**
* {@link #setProxy(Proxy)} is called by {@link AbstractCatinalMojo#getManager()} after the constructor
*/
private void applyProxy()
{
if ( this.proxy != null )
{
ProxyInfo proxyInfo = new ProxyInfo();
proxyInfo.setNonProxyHosts( this.proxy.getNonProxyHosts() );
if ( !ProxyUtils.validateNonProxyHosts( proxyInfo, url.getHost() ) )
{
HttpHost proxy = new HttpHost( this.proxy.getHost(), this.proxy.getPort(), this.proxy.getProtocol() );
httpClient.getParams().setParameter( ConnRoutePNames.DEFAULT_PROXY, proxy );
if ( this.proxy.getUsername() != null )
{
httpClient.getCredentialsProvider().setCredentials(
new AuthScope( this.proxy.getHost(), this.proxy.getPort() ),
new UsernamePasswordCredentials( this.proxy.getUsername(), this.proxy.getPassword() ) );
}
}
}
else
{
httpClient.getParams().removeParameter( ConnRoutePNames.DEFAULT_PROXY );
}
}
/**
* Deploys the specified WAR as a URL to the specified context path.
*
* @param path the webapp context path to deploy to
* @param war the URL of the WAR to deploy
* @return the Tomcat manager response
* @throws TomcatManagerException if the Tomcat manager request fails
* @throws IOException if an i/o error occurs
*/
public TomcatManagerResponse deploy( String path, URL war )
throws TomcatManagerException, IOException
{
return deploy( path, war, false );
}
/**
* Deploys the specified WAR as a URL to the specified context path, optionally undeploying the webapp if it already
* exists.
*
* @param path the webapp context path to deploy to
* @param war the URL of the WAR to deploy
* @param update whether to first undeploy the webapp if it already exists
* @return the Tomcat manager response
* @throws TomcatManagerException if the Tomcat manager request fails
* @throws IOException if an i/o error occurs
*/
public TomcatManagerResponse deploy( String path, URL war, boolean update )
throws TomcatManagerException, IOException
{
return deploy( path, war, update, null );
}
/**
* Deploys the specified WAR as a URL to the specified context path, optionally undeploying the webapp if it already
* exists and using the specified tag name.
*
* @param path the webapp context path to deploy to
* @param war the URL of the WAR to deploy
* @param update whether to first undeploy the webapp if it already exists
* @param tag the tag name to use
* @return the Tomcat manager response
* @throws TomcatManagerException if the Tomcat manager request fails
* @throws IOException if an i/o error occurs
*/
public TomcatManagerResponse deploy( String path, URL war, boolean update, String tag )
throws TomcatManagerException, IOException
{
return deployImpl( path, null, war, null, update, tag );
}
/**
* Deploys the specified WAR as a HTTP PUT to the specified context path.
*
* @param path the webapp context path to deploy to
* @param war an input stream to the WAR to deploy
* @return the Tomcat manager response
* @throws TomcatManagerException if the Tomcat manager request fails
* @throws IOException if an i/o error occurs
*/
public TomcatManagerResponse deploy( String path, File war )
throws TomcatManagerException, IOException
{
return deploy( path, war, false );
}
/**
* Deploys the specified WAR as a HTTP PUT to the specified context path, optionally undeploying the webapp if it
* already exists.
*
* @param path the webapp context path to deploy to
* @param war an input stream to the WAR to deploy
* @param update whether to first undeploy the webapp if it already exists
* @return the Tomcat manager response
* @throws TomcatManagerException if the Tomcat manager request fails
* @throws IOException if an i/o error occurs
*/
public TomcatManagerResponse deploy( String path, File war, boolean update )
throws TomcatManagerException, IOException
{
return deploy( path, war, update, null );
}
/**
* Deploys the specified WAR as a HTTP PUT to the specified context path, optionally undeploying the webapp if it
* already exists and using the specified tag name.
*
* @param path the webapp context path to deploy to
* @param war an input stream to the WAR to deploy
* @param update whether to first undeploy the webapp if it already exists
* @param tag the tag name to use
* @return the Tomcat manager response
* @throws TomcatManagerException if the Tomcat manager request fails
* @throws IOException if an i/o error occurs
*/
public TomcatManagerResponse deploy( String path, File war, boolean update, String tag )
throws TomcatManagerException, IOException
{
return deployImpl( path, null, null, war, update, tag );
}
/**
* Deploys the specified WAR as a HTTP PUT to the specified context path, optionally undeploying the webapp if it
* already exists and using the specified tag name.
*
* @param path the webapp context path to deploy to
* @param war an input stream to the WAR to deploy
* @param update whether to first undeploy the webapp if it already exists
* @param tag the tag name to use
* @param length the size of the war deployed
* @return the Tomcat manager response
* @throws TomcatManagerException if the Tomcat manager request fails
* @throws IOException if an i/o error occurs
* @since 2.0
*/
public TomcatManagerResponse deploy( String path, File war, boolean update, String tag, long length )
throws TomcatManagerException, IOException
{
return deployImpl( path, null, null, war, update, tag, length );
}
/**
* Deploys the specified context XML configuration to the specified context path.
*
* @param path the webapp context path to deploy to
* @param config the URL of the context XML configuration to deploy
* @return the Tomcat manager response
* @throws TomcatManagerException if the Tomcat manager request fails
* @throws IOException if an i/o error occurs
*/
public TomcatManagerResponse deployContext( String path, URL config )
throws TomcatManagerException, IOException
{
return deployContext( path, config, false );
}
/**
* Deploys the specified context XML configuration to the specified context path, optionally undeploying the webapp
* if it already exists.
*
* @param path the webapp context path to deploy to
* @param config the URL of the context XML configuration to deploy
* @param update whether to first undeploy the webapp if it already exists
* @return the Tomcat manager response
* @throws TomcatManagerException if the Tomcat manager request fails
* @throws IOException if an i/o error occurs
*/
public TomcatManagerResponse deployContext( String path, URL config, boolean update )
throws TomcatManagerException, IOException
{
return deployContext( path, config, update, null );
}
/**
* Deploys the specified context XML configuration to the specified context path, optionally undeploying the webapp
* if it already exists and using the specified tag name.
*
* @param path the webapp context path to deploy to
* @param config the URL of the context XML configuration to deploy
* @param update whether to first undeploy the webapp if it already exists
* @param tag the tag name to use
* @return the Tomcat manager response
* @throws TomcatManagerException if the Tomcat manager request fails
* @throws IOException if an i/o error occurs
*/
public TomcatManagerResponse deployContext( String path, URL config, boolean update, String tag )
throws TomcatManagerException, IOException
{
return deployContext( path, config, null, update, tag );
}
/**
* Deploys the specified context XML configuration and WAR as a URL to the specified context path.
*
* @param path the webapp context path to deploy to
* @param config the URL of the context XML configuration to deploy
* @param war the URL of the WAR to deploy
* @return the Tomcat manager response
* @throws TomcatManagerException if the Tomcat manager request fails
* @throws IOException if an i/o error occurs
*/
public TomcatManagerResponse deployContext( String path, URL config, URL war )
throws TomcatManagerException, IOException
{
return deployContext( path, config, war, false );
}
/**
* Deploys the specified context XML configuration and WAR as a URL to the specified context path, optionally
* undeploying the webapp if it already exists.
*
* @param path the webapp context path to deploy to
* @param config the URL of the context XML configuration to deploy
* @param war the URL of the WAR to deploy
* @param update whether to first undeploy the webapp if it already exists
* @return the Tomcat manager response
* @throws TomcatManagerException if the Tomcat manager request fails
* @throws IOException if an i/o error occurs
*/
public TomcatManagerResponse deployContext( String path, URL config, URL war, boolean update )
throws TomcatManagerException, IOException
{
return deployContext( path, config, war, update, null );
}
/**
* Deploys the specified context XML configuration and WAR as a URL to the specified context path, optionally
* undeploying the webapp if it already exists and using the specified tag name.
*
* @param path the webapp context path to deploy to
* @param config the URL of the context XML configuration to deploy
* @param war the URL of the WAR to deploy
* @param update whether to first undeploy the webapp if it already exists
* @param tag the tag name to use
* @return the Tomcat manager response
* @throws TomcatManagerException if the Tomcat manager request fails
* @throws IOException if an i/o error occurs
*/
public TomcatManagerResponse deployContext( String path, URL config, URL war, boolean update, String tag )
throws TomcatManagerException, IOException
{
return deployImpl( path, config, war, null, update, tag );
}
/**
* Undeploys the webapp at the specified context path.
*
* @param path the webapp context path to undeploy
* @return the Tomcat manager response
* @throws TomcatManagerException if the Tomcat manager request fails
* @throws IOException if an i/o error occurs
*/
public TomcatManagerResponse undeploy( String path )
throws TomcatManagerException, IOException
{
return invoke( "/undeploy?path=" + URLEncoder.encode( path, charset ) );
}
/**
* Reloads the webapp at the specified context path.
*
* @param path the webapp context path to reload
* @return the Tomcat manager response
* @throws TomcatManagerException if the Tomcat manager request fails
* @throws IOException if an i/o error occurs
*/
public TomcatManagerResponse reload( String path )
throws TomcatManagerException, IOException
{
return invoke( "/reload?path=" + URLEncoder.encode( path, charset ) );
}
/**
* Starts the webapp at the specified context path.
*
* @param path the webapp context path to start
* @return the Tomcat manager response
* @throws TomcatManagerException if the Tomcat manager request fails
* @throws IOException if an i/o error occurs
*/
public TomcatManagerResponse start( String path )
throws TomcatManagerException, IOException
{
return invoke( "/start?path=" + URLEncoder.encode( path, charset ) );
}
/**
* Stops the webapp at the specified context path.
*
* @param path the webapp context path to stop
* @return the Tomcat manager response
* @throws TomcatManagerException if the Tomcat manager request fails
* @throws IOException if an i/o error occurs
*/
public TomcatManagerResponse stop( String path )
throws TomcatManagerException, IOException
{
return invoke( "/stop?path=" + URLEncoder.encode( path, charset ) );
}
/**
* Lists all the currently deployed web applications.
*
* @return the list of currently deployed applications
* @throws TomcatManagerException if the Tomcat manager request fails
* @throws IOException if an i/o error occurs
*/
public TomcatManagerResponse list()
throws TomcatManagerException, IOException
{
return invoke( "/list" );
}
/**
* Lists information about the Tomcat version, OS, and JVM properties.
*
* @return the server information
* @throws TomcatManagerException if the Tomcat manager request fails
* @throws IOException if an i/o error occurs
*/
public TomcatManagerResponse getServerInfo()
throws TomcatManagerException, IOException
{
return invoke( "/serverinfo" );
}
/**
* Lists all of the global JNDI resources.
*
* @return the list of all global JNDI resources
* @throws TomcatManagerException if the Tomcat manager request fails
* @throws IOException if an i/o error occurs
*/
public TomcatManagerResponse getResources()
throws TomcatManagerException, IOException
{
return getResources( null );
}
/**
* Lists the global JNDI resources of the given type.
*
* @param type the class name of the resources to list, or <code>null</code> for all
* @return the list of global JNDI resources of the given type
* @throws TomcatManagerException if the Tomcat manager request fails
* @throws IOException if an i/o error occurs
*/
public TomcatManagerResponse getResources( String type )
throws TomcatManagerException, IOException
{
StringBuffer buffer = new StringBuffer();
buffer.append( "/resources" );
if ( type != null )
{
buffer.append( "?type=" + URLEncoder.encode( type, charset ) );
}
return invoke( buffer.toString() );
}
/**
* Lists the security role names and corresponding descriptions that are available.
*
* @return the list of security role names and corresponding descriptions
* @throws TomcatManagerException if the Tomcat manager request fails
* @throws IOException if an i/o error occurs
*/
public TomcatManagerResponse getRoles()
throws TomcatManagerException, IOException
{
return invoke( "/roles" );
}
/**
* Lists the default session timeout and the number of currently active sessions for the given context path.
*
* @param path the context path to list session information for
* @return the default session timeout and the number of currently active sessions
* @throws TomcatManagerException if the Tomcat manager request fails
* @throws IOException if an i/o error occurs
*/
public TomcatManagerResponse getSessions( String path )
throws TomcatManagerException, IOException
{
return invoke( "/sessions?path=" + URLEncoder.encode( path, charset ) );
}
// ----------------------------------------------------------------------
// Protected Methods
// ----------------------------------------------------------------------
/**
* Invokes Tomcat manager with the specified command.
*
* @param path the Tomcat manager command to invoke
* @return the Tomcat manager response
* @throws TomcatManagerException if the Tomcat manager request fails
* @throws IOException if an i/o error occurs
*/
protected TomcatManagerResponse invoke( String path )
throws TomcatManagerException, IOException
{
return invoke( path, null, -1 );
}
// ----------------------------------------------------------------------
// Private Methods
// ----------------------------------------------------------------------
private TomcatManagerResponse deployImpl( String path, URL config, URL war, File data, boolean update, String tag )
throws TomcatManagerException, IOException
{
return deployImpl( path, config, war, data, update, tag, -1 );
}
/**
* Deploys the specified WAR.
*
* @param path the webapp context path to deploy to
* @param config the URL of the context XML configuration to deploy, or null for none
* @param war the URL of the WAR to deploy, or null to use <code>data</code>
* @param data WAR file to deploy, or null to use <code>war</code>
* @param update whether to first undeploy the webapp if it already exists
* @param tag the tag name to use
* @return the Tomcat manager response
* @throws TomcatManagerException if the Tomcat manager request fails
* @throws IOException if an i/o error occurs
*/
private TomcatManagerResponse deployImpl( String path, URL config, URL war, File data, boolean update, String tag,
long length )
throws TomcatManagerException, IOException
{
StringBuilder buffer = new StringBuilder( "/deploy" );
buffer.append( "?path=" ).append( URLEncoder.encode( path, charset ) );
if ( config != null )
{
buffer.append( "&config=" ).append( URLEncoder.encode( config.toString(), charset ) );
}
if ( war != null )
{
buffer.append( "&war=" ).append( URLEncoder.encode( war.toString(), charset ) );
}
if ( update )
{
buffer.append( "&update=true" );
}
if ( tag != null )
{
buffer.append( "&tag=" ).append( URLEncoder.encode( tag, charset ) );
}
return invoke( buffer.toString(), data, length );
}
/**
* Invokes Tomcat manager with the specified command and content data.
*
* @param path the Tomcat manager command to invoke
* @param data file to deploy
* @return the Tomcat manager response
* @throws TomcatManagerException if the Tomcat manager request fails
* @throws IOException if an i/o error occurs
*/
protected TomcatManagerResponse invoke( String path, File data, long length )
throws TomcatManagerException, IOException
{
HttpRequestBase httpRequestBase = null;
if ( data == null )
{
httpRequestBase = new HttpGet( url + path );
}
else
{
HttpPut httpPut = new HttpPut( url + path );
httpPut.setEntity( new RequestEntityImplementation( data, length, url + path, verbose ) );
httpRequestBase = httpPut;
}
if ( userAgent != null )
{
httpRequestBase.setHeader( "User-Agent", userAgent );
}
HttpResponse response = httpClient.execute( httpRequestBase, localContext );
int statusCode = response.getStatusLine().getStatusCode();
switch ( statusCode )
{
// Success Codes
case HttpStatus.SC_OK: // 200
case HttpStatus.SC_CREATED: // 201
case HttpStatus.SC_ACCEPTED: // 202
break;
// handle all redirect even if http specs says " the user agent MUST NOT automatically redirect the request unless it can be confirmed by the user"
case HttpStatus.SC_MOVED_PERMANENTLY: // 301
case HttpStatus.SC_MOVED_TEMPORARILY: // 302
case HttpStatus.SC_SEE_OTHER: // 303
String relocateUrl = calculateRelocatedUrl( response );
this.url = new URL( relocateUrl );
return invoke( path, data, length );
}
return new TomcatManagerResponse().setStatusCode( response.getStatusLine().getStatusCode() ).setReasonPhrase(
response.getStatusLine().getReasonPhrase() ).setHttpResponseBody(
IOUtils.toString( response.getEntity().getContent() ) );
}
protected String calculateRelocatedUrl( HttpResponse response )
{
Header locationHeader = response.getFirstHeader( "Location" );
String locationField = locationHeader.getValue();
// is it a relative Location or a full ?
return locationField.startsWith( "http" ) ? locationField : url.toString() + '/' + locationField;
}
/**
* Gets the HTTP Basic Authorization header value for the supplied username and password.
*
* @param username the username to use for authentication
* @param password the password to use for authentication
* @return the HTTP Basic Authorization header value
*/
private String toAuthorization( String username, String password )
{
StringBuffer buffer = new StringBuffer();
buffer.append( username ).append( ':' );
if ( password != null )
{
buffer.append( password );
}
return "Basic " + new String( Base64.encodeBase64( buffer.toString().getBytes() ) );
}
private final class RequestEntityImplementation
extends AbstractHttpEntity
{
private final static int BUFFER_SIZE = 2048;
private File file;
PrintStream out = System.out;
private long length = -1;
private int lastLength;
private String url;
private long startTime;
private boolean verbose;
private RequestEntityImplementation( final File file, long length, String url, boolean verbose )
{
this.file = file;
this.length = length;
this.url = url;
this.verbose = verbose;
}
public long getContentLength()
{
return length >= 0 ? length : ( file.length() >= 0 ? file.length() : -1 );
}
public InputStream getContent()
throws IOException, IllegalStateException
{
return new FileInputStream( this.file );
}
public boolean isRepeatable()
{
return true;
}
public void writeTo( final OutputStream outstream )
throws IOException
{
long completed = 0;
if ( outstream == null )
{
throw new IllegalArgumentException( "Output stream may not be null" );
}
FileInputStream stream = new FileInputStream( this.file );
transferInitiated( this.url );
this.startTime = System.currentTimeMillis();
try
{
byte[] buffer = new byte[BUFFER_SIZE];
int l;
if ( this.length < 0 )
{
// until EOF
while ( ( l = stream.read( buffer ) ) != -1 )
{
transferProgressed( completed += buffer.length, -1 );
outstream.write( buffer, 0, l );
}
}
else
{
// no need to consume more than length
long remaining = this.length;
while ( remaining > 0 )
{
int transferSize = (int) Math.min( BUFFER_SIZE, remaining );
completed += transferSize;
l = stream.read( buffer, 0, transferSize );
if ( l == -1 )
{
break;
}
outstream.write( buffer, 0, l );
remaining -= l;
transferProgressed( completed, this.length );
}
}
transferSucceeded( completed );
}
finally
{
stream.close();
out.println();
}
// end transfer
}
public boolean isStreaming()
{
return true;
}
public void transferInitiated( String url )
{
String message = "Uploading";
out.println( message + ": " + url );
}
public void transferProgressed( long completedSize, long totalSize )
{
if ( !verbose )
{
return;
}
StringBuilder buffer = new StringBuilder( 64 );
buffer.append( getStatus( completedSize, totalSize ) ).append( " " );
lastLength = buffer.length();
buffer.append( '\r' );
out.print( buffer );
}
public void transferSucceeded( long contentLength )
{
if ( contentLength >= 0 )
{
String type = "Uploaded";
String len = contentLength >= 1024 ? toKB( contentLength ) + " KB" : contentLength + " B";
String throughput = "";
long duration = System.currentTimeMillis() - startTime;
if ( duration > 0 )
{
DecimalFormat format = new DecimalFormat( "0.0", new DecimalFormatSymbols( Locale.ENGLISH ) );
double kbPerSec = ( contentLength / 1024.0 ) / ( duration / 1000.0 );
throughput = " at " + format.format( kbPerSec ) + " KB/sec";
}
out.println( type + ": " + url + " (" + len + throughput + ")" );
}
}
private String getStatus( long complete, long total )
{
if ( total >= 1024 )
{
return toKB( complete ) + "/" + toKB( total ) + " KB ";
}
else if ( total >= 0 )
{
return complete + "/" + total + " B ";
}
else if ( complete >= 1024 )
{
return toKB( complete ) + " KB ";
}
else
{
return complete + " B ";
}
}
private long toKB( long bytes )
{
return ( bytes + 1023 ) / 1024;
}
}
}
|
googleapis/google-cloud-java | 36,859 | java-talent/proto-google-cloud-talent-v4beta1/src/main/java/com/google/cloud/talent/v4beta1/HistogramQueryResult.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/talent/v4beta1/histogram.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.talent.v4beta1;
/**
*
*
* <pre>
* Histogram result that matches
* [HistogramQuery][google.cloud.talent.v4beta1.HistogramQuery] specified in
* searches.
* </pre>
*
* Protobuf type {@code google.cloud.talent.v4beta1.HistogramQueryResult}
*/
public final class HistogramQueryResult extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.talent.v4beta1.HistogramQueryResult)
HistogramQueryResultOrBuilder {
private static final long serialVersionUID = 0L;
// Use HistogramQueryResult.newBuilder() to construct.
private HistogramQueryResult(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private HistogramQueryResult() {
histogramQuery_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new HistogramQueryResult();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.talent.v4beta1.HistogramProto
.internal_static_google_cloud_talent_v4beta1_HistogramQueryResult_descriptor;
}
@SuppressWarnings({"rawtypes"})
@java.lang.Override
protected com.google.protobuf.MapFieldReflectionAccessor internalGetMapFieldReflection(
int number) {
switch (number) {
case 2:
return internalGetHistogram();
default:
throw new RuntimeException("Invalid map field number: " + number);
}
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.talent.v4beta1.HistogramProto
.internal_static_google_cloud_talent_v4beta1_HistogramQueryResult_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.talent.v4beta1.HistogramQueryResult.class,
com.google.cloud.talent.v4beta1.HistogramQueryResult.Builder.class);
}
public static final int HISTOGRAM_QUERY_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private volatile java.lang.Object histogramQuery_ = "";
/**
*
*
* <pre>
* Requested histogram expression.
* </pre>
*
* <code>string histogram_query = 1;</code>
*
* @return The histogramQuery.
*/
@java.lang.Override
public java.lang.String getHistogramQuery() {
java.lang.Object ref = histogramQuery_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
histogramQuery_ = s;
return s;
}
}
/**
*
*
* <pre>
* Requested histogram expression.
* </pre>
*
* <code>string histogram_query = 1;</code>
*
* @return The bytes for histogramQuery.
*/
@java.lang.Override
public com.google.protobuf.ByteString getHistogramQueryBytes() {
java.lang.Object ref = histogramQuery_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
histogramQuery_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int HISTOGRAM_FIELD_NUMBER = 2;
private static final class HistogramDefaultEntryHolder {
static final com.google.protobuf.MapEntry<java.lang.String, java.lang.Long> defaultEntry =
com.google.protobuf.MapEntry.<java.lang.String, java.lang.Long>newDefaultInstance(
com.google.cloud.talent.v4beta1.HistogramProto
.internal_static_google_cloud_talent_v4beta1_HistogramQueryResult_HistogramEntry_descriptor,
com.google.protobuf.WireFormat.FieldType.STRING,
"",
com.google.protobuf.WireFormat.FieldType.INT64,
0L);
}
@SuppressWarnings("serial")
private com.google.protobuf.MapField<java.lang.String, java.lang.Long> histogram_;
private com.google.protobuf.MapField<java.lang.String, java.lang.Long> internalGetHistogram() {
if (histogram_ == null) {
return com.google.protobuf.MapField.emptyMapField(HistogramDefaultEntryHolder.defaultEntry);
}
return histogram_;
}
public int getHistogramCount() {
return internalGetHistogram().getMap().size();
}
/**
*
*
* <pre>
* A map from the values of the facet associated with distinct values to the
* number of matching entries with corresponding value.
*
* The key format is:
*
* * (for string histogram) string values stored in the field.
* * (for named numeric bucket) name specified in `bucket()` function, like
* for `bucket(0, MAX, "non-negative")`, the key will be `non-negative`.
* * (for anonymous numeric bucket) range formatted as `<low>-<high>`, for
* example, `0-1000`, `MIN-0`, and `0-MAX`.
* </pre>
*
* <code>map<string, int64> histogram = 2;</code>
*/
@java.lang.Override
public boolean containsHistogram(java.lang.String key) {
if (key == null) {
throw new NullPointerException("map key");
}
return internalGetHistogram().getMap().containsKey(key);
}
/** Use {@link #getHistogramMap()} instead. */
@java.lang.Override
@java.lang.Deprecated
public java.util.Map<java.lang.String, java.lang.Long> getHistogram() {
return getHistogramMap();
}
/**
*
*
* <pre>
* A map from the values of the facet associated with distinct values to the
* number of matching entries with corresponding value.
*
* The key format is:
*
* * (for string histogram) string values stored in the field.
* * (for named numeric bucket) name specified in `bucket()` function, like
* for `bucket(0, MAX, "non-negative")`, the key will be `non-negative`.
* * (for anonymous numeric bucket) range formatted as `<low>-<high>`, for
* example, `0-1000`, `MIN-0`, and `0-MAX`.
* </pre>
*
* <code>map<string, int64> histogram = 2;</code>
*/
@java.lang.Override
public java.util.Map<java.lang.String, java.lang.Long> getHistogramMap() {
return internalGetHistogram().getMap();
}
/**
*
*
* <pre>
* A map from the values of the facet associated with distinct values to the
* number of matching entries with corresponding value.
*
* The key format is:
*
* * (for string histogram) string values stored in the field.
* * (for named numeric bucket) name specified in `bucket()` function, like
* for `bucket(0, MAX, "non-negative")`, the key will be `non-negative`.
* * (for anonymous numeric bucket) range formatted as `<low>-<high>`, for
* example, `0-1000`, `MIN-0`, and `0-MAX`.
* </pre>
*
* <code>map<string, int64> histogram = 2;</code>
*/
@java.lang.Override
public long getHistogramOrDefault(java.lang.String key, long defaultValue) {
if (key == null) {
throw new NullPointerException("map key");
}
java.util.Map<java.lang.String, java.lang.Long> map = internalGetHistogram().getMap();
return map.containsKey(key) ? map.get(key) : defaultValue;
}
/**
*
*
* <pre>
* A map from the values of the facet associated with distinct values to the
* number of matching entries with corresponding value.
*
* The key format is:
*
* * (for string histogram) string values stored in the field.
* * (for named numeric bucket) name specified in `bucket()` function, like
* for `bucket(0, MAX, "non-negative")`, the key will be `non-negative`.
* * (for anonymous numeric bucket) range formatted as `<low>-<high>`, for
* example, `0-1000`, `MIN-0`, and `0-MAX`.
* </pre>
*
* <code>map<string, int64> histogram = 2;</code>
*/
@java.lang.Override
public long getHistogramOrThrow(java.lang.String key) {
if (key == null) {
throw new NullPointerException("map key");
}
java.util.Map<java.lang.String, java.lang.Long> map = internalGetHistogram().getMap();
if (!map.containsKey(key)) {
throw new java.lang.IllegalArgumentException();
}
return map.get(key);
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(histogramQuery_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 1, histogramQuery_);
}
com.google.protobuf.GeneratedMessageV3.serializeStringMapTo(
output, internalGetHistogram(), HistogramDefaultEntryHolder.defaultEntry, 2);
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(histogramQuery_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, histogramQuery_);
}
for (java.util.Map.Entry<java.lang.String, java.lang.Long> entry :
internalGetHistogram().getMap().entrySet()) {
com.google.protobuf.MapEntry<java.lang.String, java.lang.Long> histogram__ =
HistogramDefaultEntryHolder.defaultEntry
.newBuilderForType()
.setKey(entry.getKey())
.setValue(entry.getValue())
.build();
size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, histogram__);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.talent.v4beta1.HistogramQueryResult)) {
return super.equals(obj);
}
com.google.cloud.talent.v4beta1.HistogramQueryResult other =
(com.google.cloud.talent.v4beta1.HistogramQueryResult) obj;
if (!getHistogramQuery().equals(other.getHistogramQuery())) return false;
if (!internalGetHistogram().equals(other.internalGetHistogram())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + HISTOGRAM_QUERY_FIELD_NUMBER;
hash = (53 * hash) + getHistogramQuery().hashCode();
if (!internalGetHistogram().getMap().isEmpty()) {
hash = (37 * hash) + HISTOGRAM_FIELD_NUMBER;
hash = (53 * hash) + internalGetHistogram().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.talent.v4beta1.HistogramQueryResult parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.talent.v4beta1.HistogramQueryResult parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.talent.v4beta1.HistogramQueryResult parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.talent.v4beta1.HistogramQueryResult parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.talent.v4beta1.HistogramQueryResult parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.talent.v4beta1.HistogramQueryResult parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.talent.v4beta1.HistogramQueryResult parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.talent.v4beta1.HistogramQueryResult parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.talent.v4beta1.HistogramQueryResult parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.talent.v4beta1.HistogramQueryResult parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.talent.v4beta1.HistogramQueryResult parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.talent.v4beta1.HistogramQueryResult parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.cloud.talent.v4beta1.HistogramQueryResult prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Histogram result that matches
* [HistogramQuery][google.cloud.talent.v4beta1.HistogramQuery] specified in
* searches.
* </pre>
*
* Protobuf type {@code google.cloud.talent.v4beta1.HistogramQueryResult}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.talent.v4beta1.HistogramQueryResult)
com.google.cloud.talent.v4beta1.HistogramQueryResultOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.talent.v4beta1.HistogramProto
.internal_static_google_cloud_talent_v4beta1_HistogramQueryResult_descriptor;
}
@SuppressWarnings({"rawtypes"})
protected com.google.protobuf.MapFieldReflectionAccessor internalGetMapFieldReflection(
int number) {
switch (number) {
case 2:
return internalGetHistogram();
default:
throw new RuntimeException("Invalid map field number: " + number);
}
}
@SuppressWarnings({"rawtypes"})
protected com.google.protobuf.MapFieldReflectionAccessor internalGetMutableMapFieldReflection(
int number) {
switch (number) {
case 2:
return internalGetMutableHistogram();
default:
throw new RuntimeException("Invalid map field number: " + number);
}
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.talent.v4beta1.HistogramProto
.internal_static_google_cloud_talent_v4beta1_HistogramQueryResult_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.talent.v4beta1.HistogramQueryResult.class,
com.google.cloud.talent.v4beta1.HistogramQueryResult.Builder.class);
}
// Construct using com.google.cloud.talent.v4beta1.HistogramQueryResult.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
histogramQuery_ = "";
internalGetMutableHistogram().clear();
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.talent.v4beta1.HistogramProto
.internal_static_google_cloud_talent_v4beta1_HistogramQueryResult_descriptor;
}
@java.lang.Override
public com.google.cloud.talent.v4beta1.HistogramQueryResult getDefaultInstanceForType() {
return com.google.cloud.talent.v4beta1.HistogramQueryResult.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.talent.v4beta1.HistogramQueryResult build() {
com.google.cloud.talent.v4beta1.HistogramQueryResult result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.talent.v4beta1.HistogramQueryResult buildPartial() {
com.google.cloud.talent.v4beta1.HistogramQueryResult result =
new com.google.cloud.talent.v4beta1.HistogramQueryResult(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(com.google.cloud.talent.v4beta1.HistogramQueryResult result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.histogramQuery_ = histogramQuery_;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.histogram_ = internalGetHistogram();
result.histogram_.makeImmutable();
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.talent.v4beta1.HistogramQueryResult) {
return mergeFrom((com.google.cloud.talent.v4beta1.HistogramQueryResult) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.talent.v4beta1.HistogramQueryResult other) {
if (other == com.google.cloud.talent.v4beta1.HistogramQueryResult.getDefaultInstance())
return this;
if (!other.getHistogramQuery().isEmpty()) {
histogramQuery_ = other.histogramQuery_;
bitField0_ |= 0x00000001;
onChanged();
}
internalGetMutableHistogram().mergeFrom(other.internalGetHistogram());
bitField0_ |= 0x00000002;
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
histogramQuery_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000001;
break;
} // case 10
case 18:
{
com.google.protobuf.MapEntry<java.lang.String, java.lang.Long> histogram__ =
input.readMessage(
HistogramDefaultEntryHolder.defaultEntry.getParserForType(),
extensionRegistry);
internalGetMutableHistogram()
.getMutableMap()
.put(histogram__.getKey(), histogram__.getValue());
bitField0_ |= 0x00000002;
break;
} // case 18
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.lang.Object histogramQuery_ = "";
/**
*
*
* <pre>
* Requested histogram expression.
* </pre>
*
* <code>string histogram_query = 1;</code>
*
* @return The histogramQuery.
*/
public java.lang.String getHistogramQuery() {
java.lang.Object ref = histogramQuery_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
histogramQuery_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Requested histogram expression.
* </pre>
*
* <code>string histogram_query = 1;</code>
*
* @return The bytes for histogramQuery.
*/
public com.google.protobuf.ByteString getHistogramQueryBytes() {
java.lang.Object ref = histogramQuery_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
histogramQuery_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Requested histogram expression.
* </pre>
*
* <code>string histogram_query = 1;</code>
*
* @param value The histogramQuery to set.
* @return This builder for chaining.
*/
public Builder setHistogramQuery(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
histogramQuery_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Requested histogram expression.
* </pre>
*
* <code>string histogram_query = 1;</code>
*
* @return This builder for chaining.
*/
public Builder clearHistogramQuery() {
histogramQuery_ = getDefaultInstance().getHistogramQuery();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
return this;
}
/**
*
*
* <pre>
* Requested histogram expression.
* </pre>
*
* <code>string histogram_query = 1;</code>
*
* @param value The bytes for histogramQuery to set.
* @return This builder for chaining.
*/
public Builder setHistogramQueryBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
histogramQuery_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
private com.google.protobuf.MapField<java.lang.String, java.lang.Long> histogram_;
private com.google.protobuf.MapField<java.lang.String, java.lang.Long> internalGetHistogram() {
if (histogram_ == null) {
return com.google.protobuf.MapField.emptyMapField(HistogramDefaultEntryHolder.defaultEntry);
}
return histogram_;
}
private com.google.protobuf.MapField<java.lang.String, java.lang.Long>
internalGetMutableHistogram() {
if (histogram_ == null) {
histogram_ =
com.google.protobuf.MapField.newMapField(HistogramDefaultEntryHolder.defaultEntry);
}
if (!histogram_.isMutable()) {
histogram_ = histogram_.copy();
}
bitField0_ |= 0x00000002;
onChanged();
return histogram_;
}
public int getHistogramCount() {
return internalGetHistogram().getMap().size();
}
/**
*
*
* <pre>
* A map from the values of the facet associated with distinct values to the
* number of matching entries with corresponding value.
*
* The key format is:
*
* * (for string histogram) string values stored in the field.
* * (for named numeric bucket) name specified in `bucket()` function, like
* for `bucket(0, MAX, "non-negative")`, the key will be `non-negative`.
* * (for anonymous numeric bucket) range formatted as `<low>-<high>`, for
* example, `0-1000`, `MIN-0`, and `0-MAX`.
* </pre>
*
* <code>map<string, int64> histogram = 2;</code>
*/
@java.lang.Override
public boolean containsHistogram(java.lang.String key) {
if (key == null) {
throw new NullPointerException("map key");
}
return internalGetHistogram().getMap().containsKey(key);
}
/** Use {@link #getHistogramMap()} instead. */
@java.lang.Override
@java.lang.Deprecated
public java.util.Map<java.lang.String, java.lang.Long> getHistogram() {
return getHistogramMap();
}
/**
*
*
* <pre>
* A map from the values of the facet associated with distinct values to the
* number of matching entries with corresponding value.
*
* The key format is:
*
* * (for string histogram) string values stored in the field.
* * (for named numeric bucket) name specified in `bucket()` function, like
* for `bucket(0, MAX, "non-negative")`, the key will be `non-negative`.
* * (for anonymous numeric bucket) range formatted as `<low>-<high>`, for
* example, `0-1000`, `MIN-0`, and `0-MAX`.
* </pre>
*
* <code>map<string, int64> histogram = 2;</code>
*/
@java.lang.Override
public java.util.Map<java.lang.String, java.lang.Long> getHistogramMap() {
return internalGetHistogram().getMap();
}
/**
*
*
* <pre>
* A map from the values of the facet associated with distinct values to the
* number of matching entries with corresponding value.
*
* The key format is:
*
* * (for string histogram) string values stored in the field.
* * (for named numeric bucket) name specified in `bucket()` function, like
* for `bucket(0, MAX, "non-negative")`, the key will be `non-negative`.
* * (for anonymous numeric bucket) range formatted as `<low>-<high>`, for
* example, `0-1000`, `MIN-0`, and `0-MAX`.
* </pre>
*
* <code>map<string, int64> histogram = 2;</code>
*/
@java.lang.Override
public long getHistogramOrDefault(java.lang.String key, long defaultValue) {
if (key == null) {
throw new NullPointerException("map key");
}
java.util.Map<java.lang.String, java.lang.Long> map = internalGetHistogram().getMap();
return map.containsKey(key) ? map.get(key) : defaultValue;
}
/**
*
*
* <pre>
* A map from the values of the facet associated with distinct values to the
* number of matching entries with corresponding value.
*
* The key format is:
*
* * (for string histogram) string values stored in the field.
* * (for named numeric bucket) name specified in `bucket()` function, like
* for `bucket(0, MAX, "non-negative")`, the key will be `non-negative`.
* * (for anonymous numeric bucket) range formatted as `<low>-<high>`, for
* example, `0-1000`, `MIN-0`, and `0-MAX`.
* </pre>
*
* <code>map<string, int64> histogram = 2;</code>
*/
@java.lang.Override
public long getHistogramOrThrow(java.lang.String key) {
if (key == null) {
throw new NullPointerException("map key");
}
java.util.Map<java.lang.String, java.lang.Long> map = internalGetHistogram().getMap();
if (!map.containsKey(key)) {
throw new java.lang.IllegalArgumentException();
}
return map.get(key);
}
public Builder clearHistogram() {
bitField0_ = (bitField0_ & ~0x00000002);
internalGetMutableHistogram().getMutableMap().clear();
return this;
}
/**
*
*
* <pre>
* A map from the values of the facet associated with distinct values to the
* number of matching entries with corresponding value.
*
* The key format is:
*
* * (for string histogram) string values stored in the field.
* * (for named numeric bucket) name specified in `bucket()` function, like
* for `bucket(0, MAX, "non-negative")`, the key will be `non-negative`.
* * (for anonymous numeric bucket) range formatted as `<low>-<high>`, for
* example, `0-1000`, `MIN-0`, and `0-MAX`.
* </pre>
*
* <code>map<string, int64> histogram = 2;</code>
*/
public Builder removeHistogram(java.lang.String key) {
if (key == null) {
throw new NullPointerException("map key");
}
internalGetMutableHistogram().getMutableMap().remove(key);
return this;
}
/** Use alternate mutation accessors instead. */
@java.lang.Deprecated
public java.util.Map<java.lang.String, java.lang.Long> getMutableHistogram() {
bitField0_ |= 0x00000002;
return internalGetMutableHistogram().getMutableMap();
}
/**
*
*
* <pre>
* A map from the values of the facet associated with distinct values to the
* number of matching entries with corresponding value.
*
* The key format is:
*
* * (for string histogram) string values stored in the field.
* * (for named numeric bucket) name specified in `bucket()` function, like
* for `bucket(0, MAX, "non-negative")`, the key will be `non-negative`.
* * (for anonymous numeric bucket) range formatted as `<low>-<high>`, for
* example, `0-1000`, `MIN-0`, and `0-MAX`.
* </pre>
*
* <code>map<string, int64> histogram = 2;</code>
*/
public Builder putHistogram(java.lang.String key, long value) {
if (key == null) {
throw new NullPointerException("map key");
}
internalGetMutableHistogram().getMutableMap().put(key, value);
bitField0_ |= 0x00000002;
return this;
}
/**
*
*
* <pre>
* A map from the values of the facet associated with distinct values to the
* number of matching entries with corresponding value.
*
* The key format is:
*
* * (for string histogram) string values stored in the field.
* * (for named numeric bucket) name specified in `bucket()` function, like
* for `bucket(0, MAX, "non-negative")`, the key will be `non-negative`.
* * (for anonymous numeric bucket) range formatted as `<low>-<high>`, for
* example, `0-1000`, `MIN-0`, and `0-MAX`.
* </pre>
*
* <code>map<string, int64> histogram = 2;</code>
*/
public Builder putAllHistogram(java.util.Map<java.lang.String, java.lang.Long> values) {
internalGetMutableHistogram().getMutableMap().putAll(values);
bitField0_ |= 0x00000002;
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.talent.v4beta1.HistogramQueryResult)
}
// @@protoc_insertion_point(class_scope:google.cloud.talent.v4beta1.HistogramQueryResult)
private static final com.google.cloud.talent.v4beta1.HistogramQueryResult DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.talent.v4beta1.HistogramQueryResult();
}
public static com.google.cloud.talent.v4beta1.HistogramQueryResult getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<HistogramQueryResult> PARSER =
new com.google.protobuf.AbstractParser<HistogramQueryResult>() {
@java.lang.Override
public HistogramQueryResult parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<HistogramQueryResult> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<HistogramQueryResult> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.talent.v4beta1.HistogramQueryResult getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
apache/paimon | 36,933 | paimon-common/src/test/java/org/apache/paimon/casting/CastExecutorTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.paimon.casting;
import org.apache.paimon.data.BinaryString;
import org.apache.paimon.data.GenericArray;
import org.apache.paimon.data.GenericMap;
import org.apache.paimon.data.GenericRow;
import org.apache.paimon.data.InternalArray;
import org.apache.paimon.data.InternalMap;
import org.apache.paimon.data.InternalRow;
import org.apache.paimon.data.Timestamp;
import org.apache.paimon.types.ArrayType;
import org.apache.paimon.types.BigIntType;
import org.apache.paimon.types.BinaryType;
import org.apache.paimon.types.BooleanType;
import org.apache.paimon.types.CharType;
import org.apache.paimon.types.DataTypes;
import org.apache.paimon.types.DateType;
import org.apache.paimon.types.DecimalType;
import org.apache.paimon.types.DoubleType;
import org.apache.paimon.types.FloatType;
import org.apache.paimon.types.IntType;
import org.apache.paimon.types.LocalZonedTimestampType;
import org.apache.paimon.types.MapType;
import org.apache.paimon.types.RowType;
import org.apache.paimon.types.SmallIntType;
import org.apache.paimon.types.TimeType;
import org.apache.paimon.types.TimestampType;
import org.apache.paimon.types.TinyIntType;
import org.apache.paimon.types.VarBinaryType;
import org.apache.paimon.types.VarCharType;
import org.apache.paimon.utils.DateTimeUtils;
import org.apache.paimon.utils.DecimalUtils;
import org.junit.jupiter.api.Test;
import java.util.HashMap;
import java.util.Map;
import java.util.TimeZone;
import static org.assertj.core.api.Assertions.assertThat;
import static org.assertj.core.api.Assertions.assertThatThrownBy;
import static org.assertj.core.api.Assertions.fail;
/** Test for {@link CastExecutor}. */
public class CastExecutorTest {
@Test
public void testNumericToNumeric() {
// byte to other numeric
compareCastResult(
CastExecutors.resolve(new TinyIntType(false), new SmallIntType(false)),
(byte) 1,
(short) 1);
compareCastResult(
CastExecutors.resolve(new TinyIntType(false), new IntType(false)), (byte) 1, 1);
compareCastResult(
CastExecutors.resolve(new TinyIntType(false), new BigIntType(false)), (byte) 1, 1L);
compareCastResult(
CastExecutors.resolve(new TinyIntType(false), new FloatType(false)), (byte) 1, 1F);
compareCastResult(
CastExecutors.resolve(new TinyIntType(false), new DoubleType(false)), (byte) 1, 1D);
// short to other numeric
compareCastResult(
CastExecutors.resolve(new SmallIntType(false), new TinyIntType(false)),
(short) 123,
(byte) 123);
compareCastResult(
CastExecutors.resolve(new SmallIntType(false), new IntType(false)), (short) 1, 1);
compareCastResult(
CastExecutors.resolve(new SmallIntType(false), new BigIntType(false)),
(short) 1,
1L);
compareCastResult(
CastExecutors.resolve(new SmallIntType(false), new FloatType(false)),
(short) 1,
1F);
compareCastResult(
CastExecutors.resolve(new SmallIntType(false), new DoubleType(false)),
(short) 1,
1D);
// int to other numeric
compareCastResult(CastExecutors.resolve(new IntType(false), new BigIntType(false)), 1, 1L);
compareCastResult(CastExecutors.resolve(new IntType(false), new FloatType(false)), 1, 1F);
compareCastResult(CastExecutors.resolve(new IntType(false), new DoubleType(false)), 1, 1D);
// bigint to other numeric
compareCastResult(
CastExecutors.resolve(new BigIntType(false), new FloatType(false)), 1L, 1F);
compareCastResult(
CastExecutors.resolve(new BigIntType(false), new DoubleType(false)), 1L, 1D);
// float to double
compareCastResult(
CastExecutors.resolve(new FloatType(false), new DoubleType(false)), 1F, 1D);
}
@Test
public void testNumericToTimestamp() {
compareCastResult(
CastExecutors.resolve(new BigIntType(false), new TimestampType(3)),
1721898748,
DateTimeUtils.parseTimestampData("2024-07-25 09:12:28.000", 3));
Timestamp timestamp = Timestamp.fromEpochMillis(1721898748000L);
String tsString = DateTimeUtils.formatTimestamp(timestamp, TimeZone.getDefault(), 3);
Timestamp timestamp1 = DateTimeUtils.parseTimestampData(tsString, 3);
compareCastResult(
CastExecutors.resolve(new BigIntType(false), new LocalZonedTimestampType(3)),
1721898748L,
timestamp1);
}
@Test
public void testNumericToDecimal() {
compareCastResult(
CastExecutors.resolve(new TinyIntType(false), new DecimalType(10, 2)),
(byte) 1,
DecimalUtils.castFrom(1, 10, 2));
compareCastResult(
CastExecutors.resolve(new SmallIntType(false), new DecimalType(10, 2)),
(short) 1,
DecimalUtils.castFrom(1, 10, 2));
compareCastResult(
CastExecutors.resolve(new IntType(false), new DecimalType(10, 2)),
1,
DecimalUtils.castFrom(1, 10, 2));
compareCastResult(
CastExecutors.resolve(new BigIntType(false), new DecimalType(10, 2)),
1L,
DecimalUtils.castFrom(1, 10, 2));
compareCastResult(
CastExecutors.resolve(new FloatType(false), new DecimalType(10, 2)),
1.23456F,
DecimalUtils.castFrom(1.23456D, 10, 2));
compareCastResult(
CastExecutors.resolve(new DoubleType(false), new DecimalType(10, 2)),
1.23456D,
DecimalUtils.castFrom(1.23456D, 10, 2));
}
@Test
public void testDecimalToDecimal() {
compareCastResult(
CastExecutors.resolve(new DecimalType(10, 4), new DecimalType(10, 2)),
DecimalUtils.castFrom(1.23456D, 10, 4),
DecimalUtils.castFrom(1.23456D, 10, 2));
compareCastResult(
CastExecutors.resolve(new DecimalType(10, 2), new DecimalType(10, 4)),
DecimalUtils.castFrom(1.23456D, 10, 2),
DecimalUtils.castFrom(1.2300D, 10, 4));
}
@Test
public void testDecimalToNumeric() {
compareCastResult(
CastExecutors.resolve(new DecimalType(10, 4), new FloatType(false)),
DecimalUtils.castFrom(1.23456D, 10, 4),
1.2346F);
compareCastResult(
CastExecutors.resolve(new DecimalType(10, 2), new DoubleType(false)),
DecimalUtils.castFrom(1.23456D, 10, 2),
1.23D);
}
@Test
public void testBooleanToNumeric() {
compareCastResult(
CastExecutors.resolve(new BooleanType(false), new TinyIntType(false)),
true,
(byte) 1);
compareCastResult(
CastExecutors.resolve(new BooleanType(false), new SmallIntType(false)),
true,
(short) 1);
compareCastResult(
CastExecutors.resolve(new BooleanType(false), new IntType(false)), true, 1);
compareCastResult(
CastExecutors.resolve(new BooleanType(false), new BigIntType(false)), true, 1L);
compareCastResult(
CastExecutors.resolve(new BooleanType(false), new FloatType(false)), true, 1F);
compareCastResult(
CastExecutors.resolve(new BooleanType(false), new DoubleType(false)), true, 1D);
compareCastResult(
CastExecutors.resolve(new BooleanType(false), new DecimalType(1, 0)),
true,
DecimalUtils.castFrom(1, 1, 0));
}
@Test
public void testNumericToBoolean() {
compareCastResult(
CastExecutors.resolve(new TinyIntType(false), new BooleanType(false)),
(byte) 1,
true);
compareCastResult(
CastExecutors.resolve(new SmallIntType(false), new BooleanType(false)),
(short) 1,
true);
compareCastResult(
CastExecutors.resolve(new IntType(false), new BooleanType(false)), 0, false);
compareCastResult(
CastExecutors.resolve(new BigIntType(false), new BooleanType(false)), 12L, true);
}
// To string rules
@Test
public void testNumericToString() {
// byte to string
compareCastResult(
CastExecutors.resolve(new TinyIntType(false), new VarCharType(5)),
(byte) 1,
BinaryString.fromString("1"));
// short to string
compareCastResult(
CastExecutors.resolve(new SmallIntType(false), new VarCharType(5)),
(short) 1,
BinaryString.fromString("1"));
// int to string
compareCastResult(
CastExecutors.resolve(new IntType(false), new VarCharType(5)),
1,
BinaryString.fromString("1"));
// bigint to string
compareCastResult(
CastExecutors.resolve(new BigIntType(false), new VarCharType(5)),
1L,
BinaryString.fromString("1"));
// float to string
compareCastResult(
CastExecutors.resolve(new FloatType(false), new VarCharType(10)),
1.23456F,
BinaryString.fromString("1.23456"));
// double to string
compareCastResult(
CastExecutors.resolve(new DoubleType(false), new VarCharType(10)),
1.23456D,
BinaryString.fromString("1.23456"));
// decimal to string
compareCastResult(
CastExecutors.resolve(new DecimalType(10, 5), new VarCharType(20)),
DecimalUtils.castFrom(1.23456D, 10, 5),
BinaryString.fromString("1.23456"));
}
@Test
public void testBooleanToString() {
compareCastResult(
CastExecutors.resolve(new BooleanType(false), new CharType(5)),
true,
BinaryString.fromString("true "));
compareCastResult(
CastExecutors.resolve(new BooleanType(false), new VarCharType(5)),
true,
BinaryString.fromString("true"));
}
@Test
public void testTimestampToString() {
long mills = System.currentTimeMillis();
Timestamp timestamp = Timestamp.fromEpochMillis(mills);
compareCastResult(
CastExecutors.resolve(new TimestampType(5), VarCharType.STRING_TYPE),
timestamp,
BinaryString.fromString(
DateTimeUtils.formatTimestamp(timestamp, DateTimeUtils.UTC_ZONE, 5)));
compareCastResult(
CastExecutors.resolve(new LocalZonedTimestampType(5), VarCharType.STRING_TYPE),
timestamp,
BinaryString.fromString(
DateTimeUtils.formatTimestamp(timestamp, TimeZone.getDefault(), 5)));
}
@Test
public void testTimestampToNumeric() {
long mills = System.currentTimeMillis() / 1000 * 1000;
Timestamp timestamp1 = Timestamp.fromEpochMillis(mills);
long millisecond = timestamp1.getMillisecond();
Timestamp timestamp2 =
Timestamp.fromLocalDateTime(
DateTimeUtils.toLocalDateTime(mills, TimeZone.getDefault().toZoneId()));
long millisecond1 = timestamp2.getMillisecond();
// cast from TimestampType to BigIntType or IntType
compareCastResult(
CastExecutors.resolve(new TimestampType(3), new BigIntType(false)),
timestamp1,
DateTimeUtils.unixTimestamp(millisecond));
compareCastResult(
CastExecutors.resolve(new LocalZonedTimestampType(3), new BigIntType(false)),
timestamp2,
DateTimeUtils.unixTimestamp(millisecond1));
compareCastResult(
CastExecutors.resolve(new TimestampType(3), new IntType(false)),
timestamp1,
(int) DateTimeUtils.unixTimestamp(millisecond));
compareCastResult(
CastExecutors.resolve(new LocalZonedTimestampType(3), new IntType(false)),
timestamp2,
(int) DateTimeUtils.unixTimestamp(millisecond1));
// cast from BigIntType or IntType to TimestampType
compareCastResult(
CastExecutors.resolve(new BigIntType(false), new TimestampType(3)),
DateTimeUtils.unixTimestamp(millisecond),
timestamp1);
compareCastResult(
CastExecutors.resolve(new BigIntType(false), new LocalZonedTimestampType(3)),
DateTimeUtils.unixTimestamp(millisecond),
timestamp2);
compareCastResult(
CastExecutors.resolve(new IntType(false), new TimestampType(3)),
(int) DateTimeUtils.unixTimestamp(millisecond),
timestamp1);
compareCastResult(
CastExecutors.resolve(new IntType(false), new LocalZonedTimestampType(3)),
(int) DateTimeUtils.unixTimestamp(millisecond),
timestamp2);
}
@Test
public void testTimeToString() {
compareCastResult(
CastExecutors.resolve(new TimeType(2), VarCharType.STRING_TYPE),
36115615,
BinaryString.fromString("10:01:55.61"));
}
@Test
public void testDateToString() {
compareCastResult(
CastExecutors.resolve(new DateType(), VarCharType.STRING_TYPE),
19516,
BinaryString.fromString("2023-06-08"));
}
@Test
public void testStringToString() {
// varchar(10) to varchar(5)
compareCastResult(
CastExecutors.resolve(new VarCharType(10), new VarCharType(5)),
BinaryString.fromString("1234567890"),
BinaryString.fromString("12345"));
// varchar(10) to varchar(20)
compareCastResult(
CastExecutors.resolve(new VarCharType(10), new VarCharType(20)),
BinaryString.fromString("1234567890"),
BinaryString.fromString("1234567890"));
// varchar(10) to char(5)
compareCastResult(
CastExecutors.resolve(new VarCharType(10), new CharType(5)),
BinaryString.fromString("1234567890"),
BinaryString.fromString("12345"));
// varchar(10) to char(20)
compareCastResult(
CastExecutors.resolve(new VarCharType(10), new CharType(20)),
BinaryString.fromString("1234567890"),
BinaryString.fromString("1234567890 "));
// char(10) to varchar(5)
compareCastResult(
CastExecutors.resolve(new CharType(10), new VarCharType(5)),
BinaryString.fromString("1234567890"),
BinaryString.fromString("12345"));
// char(10) to varchar(20)
compareCastResult(
CastExecutors.resolve(new CharType(10), new VarCharType(20)),
BinaryString.fromString("12345678 "),
BinaryString.fromString("12345678 "));
// char(10) to char(5)
compareCastResult(
CastExecutors.resolve(new CharType(10), new CharType(5)),
BinaryString.fromString("12345678 "),
BinaryString.fromString("12345"));
// char(10) to char(20)
compareCastResult(
CastExecutors.resolve(new CharType(10), new CharType(20)),
BinaryString.fromString("12345678 "),
BinaryString.fromString("12345678 "));
}
// From string rules
@Test
public void testStringToBoolean() {
compareCastResult(
CastExecutors.resolve(new VarCharType(5), new BooleanType(false)),
BinaryString.fromString("t"),
true);
compareCastResult(
CastExecutors.resolve(new VarCharType(5), new BooleanType(false)),
BinaryString.fromString("true"),
true);
compareCastResult(
CastExecutors.resolve(new VarCharType(5), new BooleanType(false)),
BinaryString.fromString("y"),
true);
compareCastResult(
CastExecutors.resolve(new VarCharType(5), new BooleanType(false)),
BinaryString.fromString("yes"),
true);
compareCastResult(
CastExecutors.resolve(new VarCharType(5), new BooleanType(false)),
BinaryString.fromString("1"),
true);
compareCastResult(
CastExecutors.resolve(new VarCharType(5), new BooleanType(false)),
BinaryString.fromString("TRUE"),
true);
compareCastResult(
CastExecutors.resolve(new VarCharType(5), new BooleanType(false)),
BinaryString.fromString("f"),
false);
compareCastResult(
CastExecutors.resolve(new VarCharType(5), new BooleanType(false)),
BinaryString.fromString("false"),
false);
compareCastResult(
CastExecutors.resolve(new VarCharType(5), new BooleanType(false)),
BinaryString.fromString("n"),
false);
compareCastResult(
CastExecutors.resolve(new VarCharType(5), new BooleanType(false)),
BinaryString.fromString("no"),
false);
compareCastResult(
CastExecutors.resolve(new VarCharType(5), new BooleanType(false)),
BinaryString.fromString("0"),
false);
assertThatThrownBy(
() ->
compareCastResult(
CastExecutors.resolve(
new VarCharType(5), new BooleanType(false)),
BinaryString.fromString("11"),
false))
.hasMessage("Cannot parse '11' as BOOLEAN.");
}
@Test
public void testStringToDecimal() {
compareCastResult(
CastExecutors.resolve(new VarCharType(5), new DecimalType(5, 2)),
BinaryString.fromString("1.233"),
DecimalUtils.castFrom(1.233D, 5, 2));
}
@Test
public void testStringToNumeric() {
// string to byte
compareCastResult(
CastExecutors.resolve(new VarCharType(5), new TinyIntType(false)),
BinaryString.fromString("1"),
(byte) 1);
// string to short
compareCastResult(
CastExecutors.resolve(new VarCharType(5), new SmallIntType(false)),
BinaryString.fromString("1"),
(short) 1);
// string to int
compareCastResult(
CastExecutors.resolve(new VarCharType(5), new IntType(false)),
BinaryString.fromString("1"),
1);
// string to bigint
compareCastResult(
CastExecutors.resolve(new VarCharType(5), new BigIntType(false)),
BinaryString.fromString("1"),
1L);
// string to float
compareCastResult(
CastExecutors.resolve(new VarCharType(10), new FloatType(false)),
BinaryString.fromString("1.23456"),
1.23456F);
// string to double
compareCastResult(
CastExecutors.resolve(new VarCharType(10), new DoubleType(false)),
BinaryString.fromString("1.23456"),
1.23456D);
}
@Test
public void testStringToDate() {
String date = "2023-06-06";
compareCastResult(
CastExecutors.resolve(new VarCharType(25), new DateType()),
BinaryString.fromString(date),
DateTimeUtils.parseDate(date));
}
@Test
public void testStringToTime() {
String date = "09:30:00.0";
compareCastResult(
CastExecutors.resolve(new VarCharType(25), new TimeType(2)),
BinaryString.fromString(date),
DateTimeUtils.parseTime(date));
}
@Test
public void testStringToTimestamp() {
String date = "2017-12-12 09:30:00.0";
compareCastResult(
CastExecutors.resolve(new VarCharType(25), new TimestampType(3)),
BinaryString.fromString(date),
DateTimeUtils.parseTimestampData(date, 3));
compareCastResult(
CastExecutors.resolve(new VarCharType(25), new LocalZonedTimestampType(3)),
BinaryString.fromString(date),
DateTimeUtils.parseTimestampData(date, 3, TimeZone.getDefault()));
}
@Test
public void testStringToBinary() {
// string(10) to binary(5)
compareCastResult(
CastExecutors.resolve(new VarCharType(10), new VarBinaryType(5)),
BinaryString.fromString("12345678"),
"12345".getBytes());
// string(10) to binary(20)
compareCastResult(
CastExecutors.resolve(new VarCharType(10), new VarBinaryType(20)),
BinaryString.fromString("12345678"),
"12345678".getBytes());
}
@Test
public void testStringToArray() {
CastExecutor<BinaryString, InternalArray> stringToIntArray =
(CastExecutor<BinaryString, InternalArray>)
CastExecutors.resolve(
VarCharType.STRING_TYPE, DataTypes.ARRAY(DataTypes.INT()));
InternalArray result = stringToIntArray.cast(BinaryString.fromString("[1, 2, 3]"));
assertThat(result.size()).isEqualTo(3);
assertThat(result.getInt(0)).isEqualTo(1);
assertThat(result.getInt(1)).isEqualTo(2);
assertThat(result.getInt(2)).isEqualTo(3);
// Test empty array
result = stringToIntArray.cast(BinaryString.fromString("[]"));
assertThat(result.size()).isEqualTo(0);
// Test string to string array
CastExecutor<BinaryString, InternalArray> stringToStringArray =
(CastExecutor<BinaryString, InternalArray>)
CastExecutors.resolve(
VarCharType.STRING_TYPE, DataTypes.ARRAY(DataTypes.STRING()));
result = stringToStringArray.cast(BinaryString.fromString("[hello, world, test]"));
assertThat(result.size()).isEqualTo(3);
assertThat(result.getString(0).toString()).isEqualTo("hello");
assertThat(result.getString(1).toString()).isEqualTo("world");
assertThat(result.getString(2).toString()).isEqualTo("test");
// Test array with null values
result = stringToIntArray.cast(BinaryString.fromString("[1, null, 3]"));
assertThat(result.size()).isEqualTo(3);
assertThat(result.getInt(0)).isEqualTo(1);
assertThat(result.isNullAt(1)).isTrue();
assertThat(result.getInt(2)).isEqualTo(3);
}
@Test
public void testStringToMap() {
// Test string to map<string, int>
CastExecutor<BinaryString, InternalMap> stringToMap =
(CastExecutor<BinaryString, InternalMap>)
CastExecutors.resolve(
VarCharType.STRING_TYPE,
DataTypes.MAP(DataTypes.STRING(), DataTypes.INT()));
InternalMap result = stringToMap.cast(BinaryString.fromString("{key1 -> 1, key2 -> 2}"));
assertThat(result.size()).isEqualTo(2);
InternalArray keyArray = result.keyArray();
InternalArray valueArray = result.valueArray();
assertThat(keyArray.getString(0).toString()).isEqualTo("key2");
assertThat(valueArray.getInt(0)).isEqualTo(2);
assertThat(keyArray.getString(1).toString()).isEqualTo("key1");
assertThat(valueArray.getInt(1)).isEqualTo(1);
// Test empty map
result = stringToMap.cast(BinaryString.fromString("{}"));
assertThat(result.size()).isEqualTo(0);
// Test map with null values
result = stringToMap.cast(BinaryString.fromString("{key1 -> null, key2 -> 42}"));
assertThat(result.size()).isEqualTo(2);
keyArray = result.keyArray();
valueArray = result.valueArray();
assertThat(keyArray.getString(0).toString()).isEqualTo("key2");
assertThat(valueArray.getInt(0)).isEqualTo(42);
assertThat(keyArray.getString(1).toString()).isEqualTo("key1");
assertThat(valueArray.isNullAt(1)).isTrue();
}
@Test
public void testStringToRow() {
// Test string to row
RowType rowType = RowType.of(DataTypes.INT(), DataTypes.STRING(), DataTypes.BOOLEAN());
CastExecutor<BinaryString, InternalRow> stringToRow =
(CastExecutor<BinaryString, InternalRow>)
CastExecutors.resolve(VarCharType.STRING_TYPE, rowType);
InternalRow result = stringToRow.cast(BinaryString.fromString("{42, hello, true}"));
assertThat(result.getFieldCount()).isEqualTo(3);
assertThat(result.getInt(0)).isEqualTo(42);
assertThat(result.getString(1).toString()).isEqualTo("hello");
assertThat(result.getBoolean(2)).isTrue();
// Test empty row
result = stringToRow.cast(BinaryString.fromString("{}"));
assertThat(result.getFieldCount()).isEqualTo(3);
assertThat(result.isNullAt(0)).isTrue();
assertThat(result.isNullAt(1)).isTrue();
assertThat(result.isNullAt(2)).isTrue();
// Test row with null values
result = stringToRow.cast(BinaryString.fromString("{null, test, false}"));
assertThat(result.getFieldCount()).isEqualTo(3);
assertThat(result.isNullAt(0)).isTrue();
assertThat(result.getString(1).toString()).isEqualTo("test");
assertThat(result.getBoolean(2)).isFalse();
}
@Test
public void testStringToComplexTypesErrorHandling() {
// Test invalid array format
CastExecutor<BinaryString, InternalArray> stringToIntArray =
(CastExecutor<BinaryString, InternalArray>)
CastExecutors.resolve(
VarCharType.STRING_TYPE, DataTypes.ARRAY(DataTypes.INT()));
try {
stringToIntArray.cast(BinaryString.fromString("[1, 2, 3")); // missing closing bracket
fail("Expected RuntimeException for invalid array format");
} catch (RuntimeException e) {
assertThat(e.getMessage()).contains("Cannot parse");
assertThat(e.getMessage()).contains("as ARRAY");
}
// Test invalid map format
CastExecutor<BinaryString, InternalMap> stringToMap =
(CastExecutor<BinaryString, InternalMap>)
CastExecutors.resolve(
VarCharType.STRING_TYPE,
DataTypes.MAP(DataTypes.STRING(), DataTypes.INT()));
try {
stringToMap.cast(BinaryString.fromString("{key1 -> 1, key2")); // incomplete entry
fail("Expected RuntimeException for invalid map format");
} catch (RuntimeException e) {
assertThat(e.getMessage()).contains("Cannot parse");
assertThat(e.getMessage()).contains("as MAP");
}
// Test invalid row format
RowType rowType = RowType.of(DataTypes.INT(), DataTypes.STRING());
CastExecutor<BinaryString, InternalRow> stringToRow =
(CastExecutor<BinaryString, InternalRow>)
CastExecutors.resolve(VarCharType.STRING_TYPE, rowType);
try {
stringToRow.cast(BinaryString.fromString("{42, hello, extra}")); // too many fields
fail("Expected RuntimeException for field count mismatch");
} catch (RuntimeException e) {
assertThat(e.getMessage()).contains("field count mismatch");
}
}
@Test
public void testBinaryToString() {
// binary(5) to string(10)
compareCastResult(
CastExecutors.resolve(new VarBinaryType(5), new VarCharType(10)),
"12345".getBytes(),
BinaryString.fromString("12345"));
// binary(20) to string(10)
compareCastResult(
CastExecutors.resolve(new VarBinaryType(20), new VarCharType(10)),
"12345678".getBytes(),
BinaryString.fromString("12345678"));
}
// To binary rules
@Test
public void testBinaryToBinary() {
// binary(10) to binary(5)
compareCastResult(
CastExecutors.resolve(new BinaryType(10), new BinaryType(5)),
"1234567890".getBytes(),
"12345".getBytes());
// binary(10) to binary(20)
compareCastResult(
CastExecutors.resolve(new BinaryType(10), new BinaryType(20)),
"12345678".getBytes(),
new byte[] {49, 50, 51, 52, 53, 54, 55, 56, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0});
// binary(10) to varbinary(5)
compareCastResult(
CastExecutors.resolve(new BinaryType(10), new VarBinaryType(5)),
"1234567890".getBytes(),
"12345".getBytes());
// binary(10) to varbinary(20)
compareCastResult(
CastExecutors.resolve(new BinaryType(10), new VarBinaryType(20)),
"12345678".getBytes(),
"12345678".getBytes());
}
// Date/Time/Timestamp rules
@Test
public void testTimestampData() {
long mills = System.currentTimeMillis();
Timestamp timestamp = Timestamp.fromEpochMillis(mills);
// timestamp(5) to timestamp(2)
compareCastResult(
CastExecutors.resolve(new TimestampType(5), new TimestampType(2)),
timestamp,
DateTimeUtils.truncate(Timestamp.fromEpochMillis(mills), 2));
// timestamp to date
compareCastResult(
CastExecutors.resolve(new TimestampType(5), new DateType()),
Timestamp.fromEpochMillis(mills),
(int) (mills / DateTimeUtils.MILLIS_PER_DAY));
// timestamp to time
compareCastResult(
CastExecutors.resolve(new TimestampType(5), new TimeType(2)),
Timestamp.fromEpochMillis(mills),
(int) (mills % DateTimeUtils.MILLIS_PER_DAY));
// timestamp(3) to timestamp_ltz(3)
compareCastResult(
CastExecutors.resolve(new TimestampType(3), new LocalZonedTimestampType(3)),
timestamp,
DateTimeUtils.timestampToTimestampWithLocalZone(
Timestamp.fromEpochMillis(mills), TimeZone.getDefault()));
// timestamp_ltz(5) to timestamp(2)
compareCastResult(
CastExecutors.resolve(new LocalZonedTimestampType(5), new TimestampType(2)),
timestamp,
DateTimeUtils.truncate(
DateTimeUtils.timestampWithLocalZoneToTimestamp(
Timestamp.fromEpochMillis(mills), TimeZone.getDefault()),
2));
// timestamp_ltz to date
compareCastResult(
CastExecutors.resolve(new LocalZonedTimestampType(5), new DateType()),
Timestamp.fromEpochMillis(mills),
DateTimeUtils.timestampWithLocalZoneToDate(timestamp, TimeZone.getDefault()));
// timestamp_ltz to time
compareCastResult(
CastExecutors.resolve(new LocalZonedTimestampType(5), new TimeType(2)),
Timestamp.fromEpochMillis(mills),
DateTimeUtils.timestampWithLocalZoneToTime(timestamp, TimeZone.getDefault()));
}
@Test
public void testDateToTimestamp() {
String date = "2023-06-06";
compareCastResult(
CastExecutors.resolve(new DateType(), new TimestampType(5)),
DateTimeUtils.parseDate(date),
DateTimeUtils.parseTimestampData(date, 3));
compareCastResult(
CastExecutors.resolve(new DateType(), new LocalZonedTimestampType(5)),
DateTimeUtils.parseDate(date),
DateTimeUtils.parseTimestampData(date, 3, TimeZone.getDefault()));
}
@Test
public void testTimeToTimestamp() {
String time = "12:00:00.123";
compareCastResult(
CastExecutors.resolve(new TimeType(), new TimestampType(3)),
DateTimeUtils.parseTime(time),
DateTimeUtils.parseTimestampData("1970-01-01 " + time, 3));
}
@Test
public void testArrayToString() {
ArrayType arrayType = new ArrayType(DataTypes.INT());
GenericArray genericArray = new GenericArray(new Integer[] {1, null, 2});
compareCastResult(
CastExecutors.resolve(arrayType, DataTypes.STRING()),
genericArray,
BinaryString.fromString("[1, null, 2]"));
}
@Test
public void testMapToString() {
MapType mapType = new MapType(DataTypes.INT(), DataTypes.STRING());
Map<Object, Object> javaMap = new HashMap<>();
javaMap.put(1, BinaryString.fromString("i"));
javaMap.put(2, BinaryString.fromString("miss"));
javaMap.put(3, BinaryString.fromString("you"));
javaMap.put(4, null);
GenericMap genericMap = new GenericMap(javaMap);
compareCastResult(
CastExecutors.resolve(mapType, DataTypes.STRING()),
genericMap,
BinaryString.fromString("{1 -> i, 2 -> miss, 3 -> you, 4 -> null}"));
}
@Test
public void testRowToString() {
RowType rowType =
DataTypes.ROW(
DataTypes.FIELD(0, "f0", DataTypes.INT()),
DataTypes.FIELD(
1,
"f1",
DataTypes.ROW(
DataTypes.FIELD(2, "f0", DataTypes.DATE()),
DataTypes.FIELD(
3,
"f1",
new MapType(
DataTypes.INT(),
new ArrayType(DataTypes.INT()))),
DataTypes.FIELD(4, "f2", DataTypes.INT()))));
HashMap<Integer, GenericArray> javaMap = new HashMap<>();
javaMap.put(1, new GenericArray(new Integer[] {1, null, 2}));
GenericRow row =
GenericRow.of(
1,
GenericRow.of(
DateTimeUtils.parseDate("2025-01-06"),
new GenericMap(javaMap),
null));
compareCastResult(
CastExecutors.resolve(rowType, DataTypes.STRING()),
row,
BinaryString.fromString("{1, {2025-01-06, {1 -> [1, null, 2]}, null}}"));
}
@SuppressWarnings("rawtypes")
private void compareCastResult(CastExecutor<?, ?> cast, Object input, Object output) {
assertThat(((CastExecutor) cast).cast(input)).isEqualTo(output);
}
}
|
googleapis/google-cloud-java | 37,011 | java-dialogflow-cx/grpc-google-cloud-dialogflow-cx-v3/src/main/java/com/google/cloud/dialogflow/cx/v3/PagesGrpc.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.cloud.dialogflow.cx.v3;
import static io.grpc.MethodDescriptor.generateFullMethodName;
/**
*
*
* <pre>
* Service for managing [Pages][google.cloud.dialogflow.cx.v3.Page].
* </pre>
*/
@javax.annotation.Generated(
value = "by gRPC proto compiler",
comments = "Source: google/cloud/dialogflow/cx/v3/page.proto")
@io.grpc.stub.annotations.GrpcGenerated
public final class PagesGrpc {
private PagesGrpc() {}
public static final java.lang.String SERVICE_NAME = "google.cloud.dialogflow.cx.v3.Pages";
// Static method descriptors that strictly reflect the proto.
private static volatile io.grpc.MethodDescriptor<
com.google.cloud.dialogflow.cx.v3.ListPagesRequest,
com.google.cloud.dialogflow.cx.v3.ListPagesResponse>
getListPagesMethod;
@io.grpc.stub.annotations.RpcMethod(
fullMethodName = SERVICE_NAME + '/' + "ListPages",
requestType = com.google.cloud.dialogflow.cx.v3.ListPagesRequest.class,
responseType = com.google.cloud.dialogflow.cx.v3.ListPagesResponse.class,
methodType = io.grpc.MethodDescriptor.MethodType.UNARY)
public static io.grpc.MethodDescriptor<
com.google.cloud.dialogflow.cx.v3.ListPagesRequest,
com.google.cloud.dialogflow.cx.v3.ListPagesResponse>
getListPagesMethod() {
io.grpc.MethodDescriptor<
com.google.cloud.dialogflow.cx.v3.ListPagesRequest,
com.google.cloud.dialogflow.cx.v3.ListPagesResponse>
getListPagesMethod;
if ((getListPagesMethod = PagesGrpc.getListPagesMethod) == null) {
synchronized (PagesGrpc.class) {
if ((getListPagesMethod = PagesGrpc.getListPagesMethod) == null) {
PagesGrpc.getListPagesMethod =
getListPagesMethod =
io.grpc.MethodDescriptor
.<com.google.cloud.dialogflow.cx.v3.ListPagesRequest,
com.google.cloud.dialogflow.cx.v3.ListPagesResponse>
newBuilder()
.setType(io.grpc.MethodDescriptor.MethodType.UNARY)
.setFullMethodName(generateFullMethodName(SERVICE_NAME, "ListPages"))
.setSampledToLocalTracing(true)
.setRequestMarshaller(
io.grpc.protobuf.ProtoUtils.marshaller(
com.google.cloud.dialogflow.cx.v3.ListPagesRequest
.getDefaultInstance()))
.setResponseMarshaller(
io.grpc.protobuf.ProtoUtils.marshaller(
com.google.cloud.dialogflow.cx.v3.ListPagesResponse
.getDefaultInstance()))
.setSchemaDescriptor(new PagesMethodDescriptorSupplier("ListPages"))
.build();
}
}
}
return getListPagesMethod;
}
private static volatile io.grpc.MethodDescriptor<
com.google.cloud.dialogflow.cx.v3.GetPageRequest, com.google.cloud.dialogflow.cx.v3.Page>
getGetPageMethod;
@io.grpc.stub.annotations.RpcMethod(
fullMethodName = SERVICE_NAME + '/' + "GetPage",
requestType = com.google.cloud.dialogflow.cx.v3.GetPageRequest.class,
responseType = com.google.cloud.dialogflow.cx.v3.Page.class,
methodType = io.grpc.MethodDescriptor.MethodType.UNARY)
public static io.grpc.MethodDescriptor<
com.google.cloud.dialogflow.cx.v3.GetPageRequest, com.google.cloud.dialogflow.cx.v3.Page>
getGetPageMethod() {
io.grpc.MethodDescriptor<
com.google.cloud.dialogflow.cx.v3.GetPageRequest,
com.google.cloud.dialogflow.cx.v3.Page>
getGetPageMethod;
if ((getGetPageMethod = PagesGrpc.getGetPageMethod) == null) {
synchronized (PagesGrpc.class) {
if ((getGetPageMethod = PagesGrpc.getGetPageMethod) == null) {
PagesGrpc.getGetPageMethod =
getGetPageMethod =
io.grpc.MethodDescriptor
.<com.google.cloud.dialogflow.cx.v3.GetPageRequest,
com.google.cloud.dialogflow.cx.v3.Page>
newBuilder()
.setType(io.grpc.MethodDescriptor.MethodType.UNARY)
.setFullMethodName(generateFullMethodName(SERVICE_NAME, "GetPage"))
.setSampledToLocalTracing(true)
.setRequestMarshaller(
io.grpc.protobuf.ProtoUtils.marshaller(
com.google.cloud.dialogflow.cx.v3.GetPageRequest
.getDefaultInstance()))
.setResponseMarshaller(
io.grpc.protobuf.ProtoUtils.marshaller(
com.google.cloud.dialogflow.cx.v3.Page.getDefaultInstance()))
.setSchemaDescriptor(new PagesMethodDescriptorSupplier("GetPage"))
.build();
}
}
}
return getGetPageMethod;
}
private static volatile io.grpc.MethodDescriptor<
com.google.cloud.dialogflow.cx.v3.CreatePageRequest,
com.google.cloud.dialogflow.cx.v3.Page>
getCreatePageMethod;
@io.grpc.stub.annotations.RpcMethod(
fullMethodName = SERVICE_NAME + '/' + "CreatePage",
requestType = com.google.cloud.dialogflow.cx.v3.CreatePageRequest.class,
responseType = com.google.cloud.dialogflow.cx.v3.Page.class,
methodType = io.grpc.MethodDescriptor.MethodType.UNARY)
public static io.grpc.MethodDescriptor<
com.google.cloud.dialogflow.cx.v3.CreatePageRequest,
com.google.cloud.dialogflow.cx.v3.Page>
getCreatePageMethod() {
io.grpc.MethodDescriptor<
com.google.cloud.dialogflow.cx.v3.CreatePageRequest,
com.google.cloud.dialogflow.cx.v3.Page>
getCreatePageMethod;
if ((getCreatePageMethod = PagesGrpc.getCreatePageMethod) == null) {
synchronized (PagesGrpc.class) {
if ((getCreatePageMethod = PagesGrpc.getCreatePageMethod) == null) {
PagesGrpc.getCreatePageMethod =
getCreatePageMethod =
io.grpc.MethodDescriptor
.<com.google.cloud.dialogflow.cx.v3.CreatePageRequest,
com.google.cloud.dialogflow.cx.v3.Page>
newBuilder()
.setType(io.grpc.MethodDescriptor.MethodType.UNARY)
.setFullMethodName(generateFullMethodName(SERVICE_NAME, "CreatePage"))
.setSampledToLocalTracing(true)
.setRequestMarshaller(
io.grpc.protobuf.ProtoUtils.marshaller(
com.google.cloud.dialogflow.cx.v3.CreatePageRequest
.getDefaultInstance()))
.setResponseMarshaller(
io.grpc.protobuf.ProtoUtils.marshaller(
com.google.cloud.dialogflow.cx.v3.Page.getDefaultInstance()))
.setSchemaDescriptor(new PagesMethodDescriptorSupplier("CreatePage"))
.build();
}
}
}
return getCreatePageMethod;
}
private static volatile io.grpc.MethodDescriptor<
com.google.cloud.dialogflow.cx.v3.UpdatePageRequest,
com.google.cloud.dialogflow.cx.v3.Page>
getUpdatePageMethod;
@io.grpc.stub.annotations.RpcMethod(
fullMethodName = SERVICE_NAME + '/' + "UpdatePage",
requestType = com.google.cloud.dialogflow.cx.v3.UpdatePageRequest.class,
responseType = com.google.cloud.dialogflow.cx.v3.Page.class,
methodType = io.grpc.MethodDescriptor.MethodType.UNARY)
public static io.grpc.MethodDescriptor<
com.google.cloud.dialogflow.cx.v3.UpdatePageRequest,
com.google.cloud.dialogflow.cx.v3.Page>
getUpdatePageMethod() {
io.grpc.MethodDescriptor<
com.google.cloud.dialogflow.cx.v3.UpdatePageRequest,
com.google.cloud.dialogflow.cx.v3.Page>
getUpdatePageMethod;
if ((getUpdatePageMethod = PagesGrpc.getUpdatePageMethod) == null) {
synchronized (PagesGrpc.class) {
if ((getUpdatePageMethod = PagesGrpc.getUpdatePageMethod) == null) {
PagesGrpc.getUpdatePageMethod =
getUpdatePageMethod =
io.grpc.MethodDescriptor
.<com.google.cloud.dialogflow.cx.v3.UpdatePageRequest,
com.google.cloud.dialogflow.cx.v3.Page>
newBuilder()
.setType(io.grpc.MethodDescriptor.MethodType.UNARY)
.setFullMethodName(generateFullMethodName(SERVICE_NAME, "UpdatePage"))
.setSampledToLocalTracing(true)
.setRequestMarshaller(
io.grpc.protobuf.ProtoUtils.marshaller(
com.google.cloud.dialogflow.cx.v3.UpdatePageRequest
.getDefaultInstance()))
.setResponseMarshaller(
io.grpc.protobuf.ProtoUtils.marshaller(
com.google.cloud.dialogflow.cx.v3.Page.getDefaultInstance()))
.setSchemaDescriptor(new PagesMethodDescriptorSupplier("UpdatePage"))
.build();
}
}
}
return getUpdatePageMethod;
}
private static volatile io.grpc.MethodDescriptor<
com.google.cloud.dialogflow.cx.v3.DeletePageRequest, com.google.protobuf.Empty>
getDeletePageMethod;
@io.grpc.stub.annotations.RpcMethod(
fullMethodName = SERVICE_NAME + '/' + "DeletePage",
requestType = com.google.cloud.dialogflow.cx.v3.DeletePageRequest.class,
responseType = com.google.protobuf.Empty.class,
methodType = io.grpc.MethodDescriptor.MethodType.UNARY)
public static io.grpc.MethodDescriptor<
com.google.cloud.dialogflow.cx.v3.DeletePageRequest, com.google.protobuf.Empty>
getDeletePageMethod() {
io.grpc.MethodDescriptor<
com.google.cloud.dialogflow.cx.v3.DeletePageRequest, com.google.protobuf.Empty>
getDeletePageMethod;
if ((getDeletePageMethod = PagesGrpc.getDeletePageMethod) == null) {
synchronized (PagesGrpc.class) {
if ((getDeletePageMethod = PagesGrpc.getDeletePageMethod) == null) {
PagesGrpc.getDeletePageMethod =
getDeletePageMethod =
io.grpc.MethodDescriptor
.<com.google.cloud.dialogflow.cx.v3.DeletePageRequest,
com.google.protobuf.Empty>
newBuilder()
.setType(io.grpc.MethodDescriptor.MethodType.UNARY)
.setFullMethodName(generateFullMethodName(SERVICE_NAME, "DeletePage"))
.setSampledToLocalTracing(true)
.setRequestMarshaller(
io.grpc.protobuf.ProtoUtils.marshaller(
com.google.cloud.dialogflow.cx.v3.DeletePageRequest
.getDefaultInstance()))
.setResponseMarshaller(
io.grpc.protobuf.ProtoUtils.marshaller(
com.google.protobuf.Empty.getDefaultInstance()))
.setSchemaDescriptor(new PagesMethodDescriptorSupplier("DeletePage"))
.build();
}
}
}
return getDeletePageMethod;
}
/** Creates a new async stub that supports all call types for the service */
public static PagesStub newStub(io.grpc.Channel channel) {
io.grpc.stub.AbstractStub.StubFactory<PagesStub> factory =
new io.grpc.stub.AbstractStub.StubFactory<PagesStub>() {
@java.lang.Override
public PagesStub newStub(io.grpc.Channel channel, io.grpc.CallOptions callOptions) {
return new PagesStub(channel, callOptions);
}
};
return PagesStub.newStub(factory, channel);
}
/** Creates a new blocking-style stub that supports all types of calls on the service */
public static PagesBlockingV2Stub newBlockingV2Stub(io.grpc.Channel channel) {
io.grpc.stub.AbstractStub.StubFactory<PagesBlockingV2Stub> factory =
new io.grpc.stub.AbstractStub.StubFactory<PagesBlockingV2Stub>() {
@java.lang.Override
public PagesBlockingV2Stub newStub(
io.grpc.Channel channel, io.grpc.CallOptions callOptions) {
return new PagesBlockingV2Stub(channel, callOptions);
}
};
return PagesBlockingV2Stub.newStub(factory, channel);
}
/**
* Creates a new blocking-style stub that supports unary and streaming output calls on the service
*/
public static PagesBlockingStub newBlockingStub(io.grpc.Channel channel) {
io.grpc.stub.AbstractStub.StubFactory<PagesBlockingStub> factory =
new io.grpc.stub.AbstractStub.StubFactory<PagesBlockingStub>() {
@java.lang.Override
public PagesBlockingStub newStub(
io.grpc.Channel channel, io.grpc.CallOptions callOptions) {
return new PagesBlockingStub(channel, callOptions);
}
};
return PagesBlockingStub.newStub(factory, channel);
}
/** Creates a new ListenableFuture-style stub that supports unary calls on the service */
public static PagesFutureStub newFutureStub(io.grpc.Channel channel) {
io.grpc.stub.AbstractStub.StubFactory<PagesFutureStub> factory =
new io.grpc.stub.AbstractStub.StubFactory<PagesFutureStub>() {
@java.lang.Override
public PagesFutureStub newStub(io.grpc.Channel channel, io.grpc.CallOptions callOptions) {
return new PagesFutureStub(channel, callOptions);
}
};
return PagesFutureStub.newStub(factory, channel);
}
/**
*
*
* <pre>
* Service for managing [Pages][google.cloud.dialogflow.cx.v3.Page].
* </pre>
*/
public interface AsyncService {
/**
*
*
* <pre>
* Returns the list of all pages in the specified flow.
* </pre>
*/
default void listPages(
com.google.cloud.dialogflow.cx.v3.ListPagesRequest request,
io.grpc.stub.StreamObserver<com.google.cloud.dialogflow.cx.v3.ListPagesResponse>
responseObserver) {
io.grpc.stub.ServerCalls.asyncUnimplementedUnaryCall(getListPagesMethod(), responseObserver);
}
/**
*
*
* <pre>
* Retrieves the specified page.
* </pre>
*/
default void getPage(
com.google.cloud.dialogflow.cx.v3.GetPageRequest request,
io.grpc.stub.StreamObserver<com.google.cloud.dialogflow.cx.v3.Page> responseObserver) {
io.grpc.stub.ServerCalls.asyncUnimplementedUnaryCall(getGetPageMethod(), responseObserver);
}
/**
*
*
* <pre>
* Creates a page in the specified flow.
* Note: You should always train a flow prior to sending it queries. See the
* [training
* documentation](https://cloud.google.com/dialogflow/cx/docs/concept/training).
* </pre>
*/
default void createPage(
com.google.cloud.dialogflow.cx.v3.CreatePageRequest request,
io.grpc.stub.StreamObserver<com.google.cloud.dialogflow.cx.v3.Page> responseObserver) {
io.grpc.stub.ServerCalls.asyncUnimplementedUnaryCall(getCreatePageMethod(), responseObserver);
}
/**
*
*
* <pre>
* Updates the specified page.
* Note: You should always train a flow prior to sending it queries. See the
* [training
* documentation](https://cloud.google.com/dialogflow/cx/docs/concept/training).
* </pre>
*/
default void updatePage(
com.google.cloud.dialogflow.cx.v3.UpdatePageRequest request,
io.grpc.stub.StreamObserver<com.google.cloud.dialogflow.cx.v3.Page> responseObserver) {
io.grpc.stub.ServerCalls.asyncUnimplementedUnaryCall(getUpdatePageMethod(), responseObserver);
}
/**
*
*
* <pre>
* Deletes the specified page.
* Note: You should always train a flow prior to sending it queries. See the
* [training
* documentation](https://cloud.google.com/dialogflow/cx/docs/concept/training).
* </pre>
*/
default void deletePage(
com.google.cloud.dialogflow.cx.v3.DeletePageRequest request,
io.grpc.stub.StreamObserver<com.google.protobuf.Empty> responseObserver) {
io.grpc.stub.ServerCalls.asyncUnimplementedUnaryCall(getDeletePageMethod(), responseObserver);
}
}
/**
* Base class for the server implementation of the service Pages.
*
* <pre>
* Service for managing [Pages][google.cloud.dialogflow.cx.v3.Page].
* </pre>
*/
public abstract static class PagesImplBase implements io.grpc.BindableService, AsyncService {
@java.lang.Override
public final io.grpc.ServerServiceDefinition bindService() {
return PagesGrpc.bindService(this);
}
}
/**
* A stub to allow clients to do asynchronous rpc calls to service Pages.
*
* <pre>
* Service for managing [Pages][google.cloud.dialogflow.cx.v3.Page].
* </pre>
*/
public static final class PagesStub extends io.grpc.stub.AbstractAsyncStub<PagesStub> {
private PagesStub(io.grpc.Channel channel, io.grpc.CallOptions callOptions) {
super(channel, callOptions);
}
@java.lang.Override
protected PagesStub build(io.grpc.Channel channel, io.grpc.CallOptions callOptions) {
return new PagesStub(channel, callOptions);
}
/**
*
*
* <pre>
* Returns the list of all pages in the specified flow.
* </pre>
*/
public void listPages(
com.google.cloud.dialogflow.cx.v3.ListPagesRequest request,
io.grpc.stub.StreamObserver<com.google.cloud.dialogflow.cx.v3.ListPagesResponse>
responseObserver) {
io.grpc.stub.ClientCalls.asyncUnaryCall(
getChannel().newCall(getListPagesMethod(), getCallOptions()), request, responseObserver);
}
/**
*
*
* <pre>
* Retrieves the specified page.
* </pre>
*/
public void getPage(
com.google.cloud.dialogflow.cx.v3.GetPageRequest request,
io.grpc.stub.StreamObserver<com.google.cloud.dialogflow.cx.v3.Page> responseObserver) {
io.grpc.stub.ClientCalls.asyncUnaryCall(
getChannel().newCall(getGetPageMethod(), getCallOptions()), request, responseObserver);
}
/**
*
*
* <pre>
* Creates a page in the specified flow.
* Note: You should always train a flow prior to sending it queries. See the
* [training
* documentation](https://cloud.google.com/dialogflow/cx/docs/concept/training).
* </pre>
*/
public void createPage(
com.google.cloud.dialogflow.cx.v3.CreatePageRequest request,
io.grpc.stub.StreamObserver<com.google.cloud.dialogflow.cx.v3.Page> responseObserver) {
io.grpc.stub.ClientCalls.asyncUnaryCall(
getChannel().newCall(getCreatePageMethod(), getCallOptions()), request, responseObserver);
}
/**
*
*
* <pre>
* Updates the specified page.
* Note: You should always train a flow prior to sending it queries. See the
* [training
* documentation](https://cloud.google.com/dialogflow/cx/docs/concept/training).
* </pre>
*/
public void updatePage(
com.google.cloud.dialogflow.cx.v3.UpdatePageRequest request,
io.grpc.stub.StreamObserver<com.google.cloud.dialogflow.cx.v3.Page> responseObserver) {
io.grpc.stub.ClientCalls.asyncUnaryCall(
getChannel().newCall(getUpdatePageMethod(), getCallOptions()), request, responseObserver);
}
/**
*
*
* <pre>
* Deletes the specified page.
* Note: You should always train a flow prior to sending it queries. See the
* [training
* documentation](https://cloud.google.com/dialogflow/cx/docs/concept/training).
* </pre>
*/
public void deletePage(
com.google.cloud.dialogflow.cx.v3.DeletePageRequest request,
io.grpc.stub.StreamObserver<com.google.protobuf.Empty> responseObserver) {
io.grpc.stub.ClientCalls.asyncUnaryCall(
getChannel().newCall(getDeletePageMethod(), getCallOptions()), request, responseObserver);
}
}
/**
* A stub to allow clients to do synchronous rpc calls to service Pages.
*
* <pre>
* Service for managing [Pages][google.cloud.dialogflow.cx.v3.Page].
* </pre>
*/
public static final class PagesBlockingV2Stub
extends io.grpc.stub.AbstractBlockingStub<PagesBlockingV2Stub> {
private PagesBlockingV2Stub(io.grpc.Channel channel, io.grpc.CallOptions callOptions) {
super(channel, callOptions);
}
@java.lang.Override
protected PagesBlockingV2Stub build(io.grpc.Channel channel, io.grpc.CallOptions callOptions) {
return new PagesBlockingV2Stub(channel, callOptions);
}
/**
*
*
* <pre>
* Returns the list of all pages in the specified flow.
* </pre>
*/
public com.google.cloud.dialogflow.cx.v3.ListPagesResponse listPages(
com.google.cloud.dialogflow.cx.v3.ListPagesRequest request) {
return io.grpc.stub.ClientCalls.blockingUnaryCall(
getChannel(), getListPagesMethod(), getCallOptions(), request);
}
/**
*
*
* <pre>
* Retrieves the specified page.
* </pre>
*/
public com.google.cloud.dialogflow.cx.v3.Page getPage(
com.google.cloud.dialogflow.cx.v3.GetPageRequest request) {
return io.grpc.stub.ClientCalls.blockingUnaryCall(
getChannel(), getGetPageMethod(), getCallOptions(), request);
}
/**
*
*
* <pre>
* Creates a page in the specified flow.
* Note: You should always train a flow prior to sending it queries. See the
* [training
* documentation](https://cloud.google.com/dialogflow/cx/docs/concept/training).
* </pre>
*/
public com.google.cloud.dialogflow.cx.v3.Page createPage(
com.google.cloud.dialogflow.cx.v3.CreatePageRequest request) {
return io.grpc.stub.ClientCalls.blockingUnaryCall(
getChannel(), getCreatePageMethod(), getCallOptions(), request);
}
/**
*
*
* <pre>
* Updates the specified page.
* Note: You should always train a flow prior to sending it queries. See the
* [training
* documentation](https://cloud.google.com/dialogflow/cx/docs/concept/training).
* </pre>
*/
public com.google.cloud.dialogflow.cx.v3.Page updatePage(
com.google.cloud.dialogflow.cx.v3.UpdatePageRequest request) {
return io.grpc.stub.ClientCalls.blockingUnaryCall(
getChannel(), getUpdatePageMethod(), getCallOptions(), request);
}
/**
*
*
* <pre>
* Deletes the specified page.
* Note: You should always train a flow prior to sending it queries. See the
* [training
* documentation](https://cloud.google.com/dialogflow/cx/docs/concept/training).
* </pre>
*/
public com.google.protobuf.Empty deletePage(
com.google.cloud.dialogflow.cx.v3.DeletePageRequest request) {
return io.grpc.stub.ClientCalls.blockingUnaryCall(
getChannel(), getDeletePageMethod(), getCallOptions(), request);
}
}
/**
* A stub to allow clients to do limited synchronous rpc calls to service Pages.
*
* <pre>
* Service for managing [Pages][google.cloud.dialogflow.cx.v3.Page].
* </pre>
*/
public static final class PagesBlockingStub
extends io.grpc.stub.AbstractBlockingStub<PagesBlockingStub> {
private PagesBlockingStub(io.grpc.Channel channel, io.grpc.CallOptions callOptions) {
super(channel, callOptions);
}
@java.lang.Override
protected PagesBlockingStub build(io.grpc.Channel channel, io.grpc.CallOptions callOptions) {
return new PagesBlockingStub(channel, callOptions);
}
/**
*
*
* <pre>
* Returns the list of all pages in the specified flow.
* </pre>
*/
public com.google.cloud.dialogflow.cx.v3.ListPagesResponse listPages(
com.google.cloud.dialogflow.cx.v3.ListPagesRequest request) {
return io.grpc.stub.ClientCalls.blockingUnaryCall(
getChannel(), getListPagesMethod(), getCallOptions(), request);
}
/**
*
*
* <pre>
* Retrieves the specified page.
* </pre>
*/
public com.google.cloud.dialogflow.cx.v3.Page getPage(
com.google.cloud.dialogflow.cx.v3.GetPageRequest request) {
return io.grpc.stub.ClientCalls.blockingUnaryCall(
getChannel(), getGetPageMethod(), getCallOptions(), request);
}
/**
*
*
* <pre>
* Creates a page in the specified flow.
* Note: You should always train a flow prior to sending it queries. See the
* [training
* documentation](https://cloud.google.com/dialogflow/cx/docs/concept/training).
* </pre>
*/
public com.google.cloud.dialogflow.cx.v3.Page createPage(
com.google.cloud.dialogflow.cx.v3.CreatePageRequest request) {
return io.grpc.stub.ClientCalls.blockingUnaryCall(
getChannel(), getCreatePageMethod(), getCallOptions(), request);
}
/**
*
*
* <pre>
* Updates the specified page.
* Note: You should always train a flow prior to sending it queries. See the
* [training
* documentation](https://cloud.google.com/dialogflow/cx/docs/concept/training).
* </pre>
*/
public com.google.cloud.dialogflow.cx.v3.Page updatePage(
com.google.cloud.dialogflow.cx.v3.UpdatePageRequest request) {
return io.grpc.stub.ClientCalls.blockingUnaryCall(
getChannel(), getUpdatePageMethod(), getCallOptions(), request);
}
/**
*
*
* <pre>
* Deletes the specified page.
* Note: You should always train a flow prior to sending it queries. See the
* [training
* documentation](https://cloud.google.com/dialogflow/cx/docs/concept/training).
* </pre>
*/
public com.google.protobuf.Empty deletePage(
com.google.cloud.dialogflow.cx.v3.DeletePageRequest request) {
return io.grpc.stub.ClientCalls.blockingUnaryCall(
getChannel(), getDeletePageMethod(), getCallOptions(), request);
}
}
/**
* A stub to allow clients to do ListenableFuture-style rpc calls to service Pages.
*
* <pre>
* Service for managing [Pages][google.cloud.dialogflow.cx.v3.Page].
* </pre>
*/
public static final class PagesFutureStub
extends io.grpc.stub.AbstractFutureStub<PagesFutureStub> {
private PagesFutureStub(io.grpc.Channel channel, io.grpc.CallOptions callOptions) {
super(channel, callOptions);
}
@java.lang.Override
protected PagesFutureStub build(io.grpc.Channel channel, io.grpc.CallOptions callOptions) {
return new PagesFutureStub(channel, callOptions);
}
/**
*
*
* <pre>
* Returns the list of all pages in the specified flow.
* </pre>
*/
public com.google.common.util.concurrent.ListenableFuture<
com.google.cloud.dialogflow.cx.v3.ListPagesResponse>
listPages(com.google.cloud.dialogflow.cx.v3.ListPagesRequest request) {
return io.grpc.stub.ClientCalls.futureUnaryCall(
getChannel().newCall(getListPagesMethod(), getCallOptions()), request);
}
/**
*
*
* <pre>
* Retrieves the specified page.
* </pre>
*/
public com.google.common.util.concurrent.ListenableFuture<
com.google.cloud.dialogflow.cx.v3.Page>
getPage(com.google.cloud.dialogflow.cx.v3.GetPageRequest request) {
return io.grpc.stub.ClientCalls.futureUnaryCall(
getChannel().newCall(getGetPageMethod(), getCallOptions()), request);
}
/**
*
*
* <pre>
* Creates a page in the specified flow.
* Note: You should always train a flow prior to sending it queries. See the
* [training
* documentation](https://cloud.google.com/dialogflow/cx/docs/concept/training).
* </pre>
*/
public com.google.common.util.concurrent.ListenableFuture<
com.google.cloud.dialogflow.cx.v3.Page>
createPage(com.google.cloud.dialogflow.cx.v3.CreatePageRequest request) {
return io.grpc.stub.ClientCalls.futureUnaryCall(
getChannel().newCall(getCreatePageMethod(), getCallOptions()), request);
}
/**
*
*
* <pre>
* Updates the specified page.
* Note: You should always train a flow prior to sending it queries. See the
* [training
* documentation](https://cloud.google.com/dialogflow/cx/docs/concept/training).
* </pre>
*/
public com.google.common.util.concurrent.ListenableFuture<
com.google.cloud.dialogflow.cx.v3.Page>
updatePage(com.google.cloud.dialogflow.cx.v3.UpdatePageRequest request) {
return io.grpc.stub.ClientCalls.futureUnaryCall(
getChannel().newCall(getUpdatePageMethod(), getCallOptions()), request);
}
/**
*
*
* <pre>
* Deletes the specified page.
* Note: You should always train a flow prior to sending it queries. See the
* [training
* documentation](https://cloud.google.com/dialogflow/cx/docs/concept/training).
* </pre>
*/
public com.google.common.util.concurrent.ListenableFuture<com.google.protobuf.Empty> deletePage(
com.google.cloud.dialogflow.cx.v3.DeletePageRequest request) {
return io.grpc.stub.ClientCalls.futureUnaryCall(
getChannel().newCall(getDeletePageMethod(), getCallOptions()), request);
}
}
private static final int METHODID_LIST_PAGES = 0;
private static final int METHODID_GET_PAGE = 1;
private static final int METHODID_CREATE_PAGE = 2;
private static final int METHODID_UPDATE_PAGE = 3;
private static final int METHODID_DELETE_PAGE = 4;
private static final class MethodHandlers<Req, Resp>
implements io.grpc.stub.ServerCalls.UnaryMethod<Req, Resp>,
io.grpc.stub.ServerCalls.ServerStreamingMethod<Req, Resp>,
io.grpc.stub.ServerCalls.ClientStreamingMethod<Req, Resp>,
io.grpc.stub.ServerCalls.BidiStreamingMethod<Req, Resp> {
private final AsyncService serviceImpl;
private final int methodId;
MethodHandlers(AsyncService serviceImpl, int methodId) {
this.serviceImpl = serviceImpl;
this.methodId = methodId;
}
@java.lang.Override
@java.lang.SuppressWarnings("unchecked")
public void invoke(Req request, io.grpc.stub.StreamObserver<Resp> responseObserver) {
switch (methodId) {
case METHODID_LIST_PAGES:
serviceImpl.listPages(
(com.google.cloud.dialogflow.cx.v3.ListPagesRequest) request,
(io.grpc.stub.StreamObserver<com.google.cloud.dialogflow.cx.v3.ListPagesResponse>)
responseObserver);
break;
case METHODID_GET_PAGE:
serviceImpl.getPage(
(com.google.cloud.dialogflow.cx.v3.GetPageRequest) request,
(io.grpc.stub.StreamObserver<com.google.cloud.dialogflow.cx.v3.Page>)
responseObserver);
break;
case METHODID_CREATE_PAGE:
serviceImpl.createPage(
(com.google.cloud.dialogflow.cx.v3.CreatePageRequest) request,
(io.grpc.stub.StreamObserver<com.google.cloud.dialogflow.cx.v3.Page>)
responseObserver);
break;
case METHODID_UPDATE_PAGE:
serviceImpl.updatePage(
(com.google.cloud.dialogflow.cx.v3.UpdatePageRequest) request,
(io.grpc.stub.StreamObserver<com.google.cloud.dialogflow.cx.v3.Page>)
responseObserver);
break;
case METHODID_DELETE_PAGE:
serviceImpl.deletePage(
(com.google.cloud.dialogflow.cx.v3.DeletePageRequest) request,
(io.grpc.stub.StreamObserver<com.google.protobuf.Empty>) responseObserver);
break;
default:
throw new AssertionError();
}
}
@java.lang.Override
@java.lang.SuppressWarnings("unchecked")
public io.grpc.stub.StreamObserver<Req> invoke(
io.grpc.stub.StreamObserver<Resp> responseObserver) {
switch (methodId) {
default:
throw new AssertionError();
}
}
}
public static final io.grpc.ServerServiceDefinition bindService(AsyncService service) {
return io.grpc.ServerServiceDefinition.builder(getServiceDescriptor())
.addMethod(
getListPagesMethod(),
io.grpc.stub.ServerCalls.asyncUnaryCall(
new MethodHandlers<
com.google.cloud.dialogflow.cx.v3.ListPagesRequest,
com.google.cloud.dialogflow.cx.v3.ListPagesResponse>(
service, METHODID_LIST_PAGES)))
.addMethod(
getGetPageMethod(),
io.grpc.stub.ServerCalls.asyncUnaryCall(
new MethodHandlers<
com.google.cloud.dialogflow.cx.v3.GetPageRequest,
com.google.cloud.dialogflow.cx.v3.Page>(service, METHODID_GET_PAGE)))
.addMethod(
getCreatePageMethod(),
io.grpc.stub.ServerCalls.asyncUnaryCall(
new MethodHandlers<
com.google.cloud.dialogflow.cx.v3.CreatePageRequest,
com.google.cloud.dialogflow.cx.v3.Page>(service, METHODID_CREATE_PAGE)))
.addMethod(
getUpdatePageMethod(),
io.grpc.stub.ServerCalls.asyncUnaryCall(
new MethodHandlers<
com.google.cloud.dialogflow.cx.v3.UpdatePageRequest,
com.google.cloud.dialogflow.cx.v3.Page>(service, METHODID_UPDATE_PAGE)))
.addMethod(
getDeletePageMethod(),
io.grpc.stub.ServerCalls.asyncUnaryCall(
new MethodHandlers<
com.google.cloud.dialogflow.cx.v3.DeletePageRequest, com.google.protobuf.Empty>(
service, METHODID_DELETE_PAGE)))
.build();
}
private abstract static class PagesBaseDescriptorSupplier
implements io.grpc.protobuf.ProtoFileDescriptorSupplier,
io.grpc.protobuf.ProtoServiceDescriptorSupplier {
PagesBaseDescriptorSupplier() {}
@java.lang.Override
public com.google.protobuf.Descriptors.FileDescriptor getFileDescriptor() {
return com.google.cloud.dialogflow.cx.v3.PageProto.getDescriptor();
}
@java.lang.Override
public com.google.protobuf.Descriptors.ServiceDescriptor getServiceDescriptor() {
return getFileDescriptor().findServiceByName("Pages");
}
}
private static final class PagesFileDescriptorSupplier extends PagesBaseDescriptorSupplier {
PagesFileDescriptorSupplier() {}
}
private static final class PagesMethodDescriptorSupplier extends PagesBaseDescriptorSupplier
implements io.grpc.protobuf.ProtoMethodDescriptorSupplier {
private final java.lang.String methodName;
PagesMethodDescriptorSupplier(java.lang.String methodName) {
this.methodName = methodName;
}
@java.lang.Override
public com.google.protobuf.Descriptors.MethodDescriptor getMethodDescriptor() {
return getServiceDescriptor().findMethodByName(methodName);
}
}
private static volatile io.grpc.ServiceDescriptor serviceDescriptor;
public static io.grpc.ServiceDescriptor getServiceDescriptor() {
io.grpc.ServiceDescriptor result = serviceDescriptor;
if (result == null) {
synchronized (PagesGrpc.class) {
result = serviceDescriptor;
if (result == null) {
serviceDescriptor =
result =
io.grpc.ServiceDescriptor.newBuilder(SERVICE_NAME)
.setSchemaDescriptor(new PagesFileDescriptorSupplier())
.addMethod(getListPagesMethod())
.addMethod(getGetPageMethod())
.addMethod(getCreatePageMethod())
.addMethod(getUpdatePageMethod())
.addMethod(getDeletePageMethod())
.build();
}
}
}
return result;
}
}
|
hibernate/query-validator | 36,958 | src/main/java/org/hibernate/query/validator/MockSessionFactory.java | package org.hibernate.query.validator;
import org.hibernate.CustomEntityDirtinessStrategy;
import org.hibernate.EntityNameResolver;
import org.hibernate.MappingException;
import org.hibernate.SessionFactoryObserver;
import org.hibernate.TimeZoneStorageStrategy;
import org.hibernate.boot.internal.DefaultCustomEntityDirtinessStrategy;
import org.hibernate.boot.internal.MetadataImpl;
import org.hibernate.boot.internal.StandardEntityNotFoundDelegate;
import org.hibernate.boot.model.FunctionContributions;
import org.hibernate.boot.model.naming.ImplicitNamingStrategy;
import org.hibernate.boot.model.naming.ImplicitNamingStrategyJpaCompliantImpl;
import org.hibernate.boot.model.naming.PhysicalNamingStrategy;
import org.hibernate.boot.model.naming.PhysicalNamingStrategyStandardImpl;
import org.hibernate.boot.model.relational.Database;
import org.hibernate.boot.model.relational.SqlStringGenerationContext;
import org.hibernate.boot.registry.BootstrapServiceRegistryBuilder;
import org.hibernate.boot.registry.classloading.internal.ClassLoaderServiceImpl;
import org.hibernate.boot.registry.classloading.spi.ClassLoadingException;
import org.hibernate.boot.registry.internal.StandardServiceRegistryImpl;
import org.hibernate.boot.spi.BootstrapContext;
import org.hibernate.boot.spi.MappingDefaults;
import org.hibernate.boot.spi.MetadataBuildingContext;
import org.hibernate.boot.spi.MetadataBuildingOptions;
import org.hibernate.boot.spi.MetadataImplementor;
import org.hibernate.boot.spi.SessionFactoryOptions;
import org.hibernate.cache.internal.DisabledCaching;
import org.hibernate.cache.spi.CacheImplementor;
import org.hibernate.cache.spi.access.AccessType;
import org.hibernate.context.spi.CurrentTenantIdentifierResolver;
import org.hibernate.dialect.Dialect;
import org.hibernate.dialect.function.CommonFunctionFactory;
import org.hibernate.engine.jdbc.spi.JdbcServices;
import org.hibernate.engine.query.internal.NativeQueryInterpreterStandardImpl;
import org.hibernate.engine.query.spi.NativeQueryInterpreter;
import org.hibernate.engine.spi.SessionFactoryImplementor;
import org.hibernate.graph.spi.RootGraphImplementor;
import org.hibernate.id.factory.IdentifierGeneratorFactory;
import org.hibernate.id.factory.internal.StandardIdentifierGeneratorFactory;
import org.hibernate.internal.FastSessionServices;
import org.hibernate.jpa.internal.MutableJpaComplianceImpl;
import org.hibernate.jpa.spi.JpaCompliance;
import org.hibernate.jpa.spi.MutableJpaCompliance;
import org.hibernate.loader.BatchFetchStyle;
import org.hibernate.mapping.Property;
import org.hibernate.metamodel.AttributeClassification;
import org.hibernate.metamodel.CollectionClassification;
import org.hibernate.metamodel.internal.JpaMetaModelPopulationSetting;
import org.hibernate.metamodel.internal.JpaStaticMetaModelPopulationSetting;
import org.hibernate.metamodel.internal.MetadataContext;
import org.hibernate.metamodel.internal.RuntimeMetamodelsImpl;
import org.hibernate.metamodel.mapping.JdbcMapping;
import org.hibernate.metamodel.model.domain.DomainType;
import org.hibernate.metamodel.model.domain.EntityDomainType;
import org.hibernate.metamodel.model.domain.ManagedDomainType;
import org.hibernate.metamodel.model.domain.PersistentAttribute;
import org.hibernate.metamodel.model.domain.internal.AbstractAttribute;
import org.hibernate.metamodel.model.domain.internal.AbstractPluralAttribute;
import org.hibernate.metamodel.model.domain.internal.BagAttributeImpl;
import org.hibernate.metamodel.model.domain.internal.BasicTypeImpl;
import org.hibernate.metamodel.model.domain.internal.EmbeddableTypeImpl;
import org.hibernate.metamodel.model.domain.internal.EntityTypeImpl;
import org.hibernate.metamodel.model.domain.internal.JpaMetamodelImpl;
import org.hibernate.metamodel.model.domain.internal.ListAttributeImpl;
import org.hibernate.metamodel.model.domain.internal.MapAttributeImpl;
import org.hibernate.metamodel.model.domain.internal.MappedSuperclassTypeImpl;
import org.hibernate.metamodel.model.domain.internal.MappingMetamodelImpl;
import org.hibernate.metamodel.model.domain.internal.PluralAttributeBuilder;
import org.hibernate.metamodel.model.domain.internal.SetAttributeImpl;
import org.hibernate.metamodel.model.domain.internal.SingularAttributeImpl;
import org.hibernate.metamodel.model.domain.spi.JpaMetamodelImplementor;
import org.hibernate.metamodel.spi.MappingMetamodelImplementor;
import org.hibernate.metamodel.spi.MetamodelImplementor;
import org.hibernate.metamodel.spi.RuntimeMetamodelsImplementor;
import org.hibernate.metamodel.spi.RuntimeModelCreationContext;
import org.hibernate.persister.collection.CollectionPersister;
import org.hibernate.persister.entity.EntityPersister;
import org.hibernate.proxy.EntityNotFoundDelegate;
import org.hibernate.query.criteria.ValueHandlingMode;
import org.hibernate.query.hql.HqlTranslator;
import org.hibernate.query.hql.internal.StandardHqlTranslator;
import org.hibernate.query.internal.NamedObjectRepositoryImpl;
import org.hibernate.query.internal.QueryInterpretationCacheDisabledImpl;
import org.hibernate.query.named.NamedObjectRepository;
import org.hibernate.query.spi.QueryEngine;
import org.hibernate.query.spi.QueryInterpretationCache;
import org.hibernate.query.sqm.NodeBuilder;
import org.hibernate.query.sqm.SqmPathSource;
import org.hibernate.query.sqm.function.SqmFunctionRegistry;
import org.hibernate.query.sqm.internal.SqmCriteriaNodeBuilder;
import org.hibernate.query.sqm.sql.SqmTranslatorFactory;
import org.hibernate.query.sqm.sql.StandardSqmTranslatorFactory;
import org.hibernate.stat.internal.StatisticsImpl;
import org.hibernate.stat.spi.StatisticsImplementor;
import org.hibernate.type.BagType;
import org.hibernate.type.CollectionType;
import org.hibernate.type.CompositeType;
import org.hibernate.type.ListType;
import org.hibernate.type.MapType;
import org.hibernate.type.SetType;
import org.hibernate.type.SqlTypes;
import org.hibernate.type.Type;
import org.hibernate.type.descriptor.java.JavaType;
import org.hibernate.type.descriptor.java.spi.UnknownBasicJavaType;
import org.hibernate.type.descriptor.jdbc.JdbcTypeIndicators;
import org.hibernate.type.descriptor.jdbc.ObjectJdbcType;
import org.hibernate.type.spi.TypeConfiguration;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.Set;
import java.util.UUID;
import java.util.stream.Collectors;
import static java.util.Collections.emptyList;
import static java.util.Collections.emptyMap;
import static java.util.Collections.emptySet;
import static java.util.Collections.singletonList;
/**
* @author Gavin King
*/
public abstract class MockSessionFactory
implements SessionFactoryImplementor, QueryEngine, RuntimeModelCreationContext, MetadataBuildingOptions,
BootstrapContext, MetadataBuildingContext, FunctionContributions, SessionFactoryOptions, JdbcTypeIndicators {
// static so other things can get at it
// TODO: make a static instance of this whole object instead!
static TypeConfiguration typeConfiguration;
private final Map<String, MockEntityPersister> entityPersistersByName = new HashMap<>();
private final Map<String, MockCollectionPersister> collectionPersistersByName = new HashMap<>();
private final StandardServiceRegistryImpl serviceRegistry;
private final SqmFunctionRegistry functionRegistry;
private final MappingMetamodelImpl metamodel;
private final MetadataImplementor bootModel;
private final MetadataContext metadataContext;
public MockSessionFactory() {
serviceRegistry = StandardServiceRegistryImpl.create(
new BootstrapServiceRegistryBuilder().applyClassLoaderService(new ClassLoaderServiceImpl() {
@Override
@SuppressWarnings("unchecked")
public Class<?> classForName(String className) {
try {
return super.classForName(className);
}
catch (ClassLoadingException e) {
if (isClassDefined(className)) {
return Object[].class;
}
else {
throw e;
}
}
}
}).build(),
singletonList(MockJdbcServicesInitiator.INSTANCE),
emptyList(),
emptyMap()
);
functionRegistry = new SqmFunctionRegistry();
metamodel = new MockMappingMetamodelImpl();
bootModel = new MetadataImpl(
UUID.randomUUID(),
this,
emptyMap(),
emptyList(),
emptyMap(),
emptyMap(),
emptyMap(),
emptyMap(),
emptyMap(),
emptyMap(),
emptyMap(),
emptyMap(),
emptyMap(),
emptyMap(),
emptyMap(),
emptyMap(),
emptyMap(),
emptyMap(),
new Database(this, MockJdbcServicesInitiator.jdbcServices.getJdbcEnvironment()),
this
);
metadataContext = new MetadataContext(
metamodel.getJpaMetamodel(),
metamodel,
bootModel,
JpaStaticMetaModelPopulationSetting.DISABLED,
JpaMetaModelPopulationSetting.DISABLED,
this
);
typeConfiguration = new TypeConfiguration();
typeConfiguration.scope((MetadataBuildingContext) this);
MockJdbcServicesInitiator.genericDialect.initializeFunctionRegistry(this);
CommonFunctionFactory functionFactory = new CommonFunctionFactory(this);
functionFactory.listagg(null);
functionFactory.inverseDistributionOrderedSetAggregates();
functionFactory.hypotheticalOrderedSetAggregates();
functionFactory.windowFunctions();
typeConfiguration.scope((SessionFactoryImplementor) this);
}
@Override
public TypeConfiguration getTypeConfiguration() {
return typeConfiguration;
}
@Override
public void addObserver(SessionFactoryObserver observer) {
}
@Override
public MetadataBuildingOptions getBuildingOptions() {
return this;
}
@Override
public PhysicalNamingStrategy getPhysicalNamingStrategy() {
return new PhysicalNamingStrategyStandardImpl();
}
@Override
public ImplicitNamingStrategy getImplicitNamingStrategy() {
return new ImplicitNamingStrategyJpaCompliantImpl();
}
static CollectionType createCollectionType(String role, String name) {
switch (name) {
case "Set":
case "SortedSet":
//might actually be a bag!
//TODO: look for @OrderColumn on the property
return new SetType(role, null);
case "List":
case "SortedList":
return new ListType(role, null);
case "Map":
case "SortedMap":
return new MapType(role, null);
default:
return new BagType(role, null);
}
}
/**
* Lazily create a {@link MockEntityPersister}
*/
abstract MockEntityPersister createMockEntityPersister(String entityName);
/**
* Lazily create a {@link MockCollectionPersister}
*/
abstract MockCollectionPersister createMockCollectionPersister(String role);
abstract boolean isEntityDefined(String entityName);
abstract String qualifyName(String entityName);
abstract boolean isAttributeDefined(String entityName, String fieldName);
abstract boolean isClassDefined(String qualifiedName);
abstract boolean isFieldDefined(String qualifiedClassName, String fieldName);
abstract boolean isConstructorDefined(String qualifiedClassName, List<Type> argumentTypes);
abstract Type propertyType(String typeName, String propertyPath);
protected abstract boolean isSubtype(String entityName, String subtypeEntityName);
protected abstract String getSupertype(String entityName);
private EntityPersister createEntityPersister(String entityName) {
MockEntityPersister result = entityPersistersByName.get(entityName);
if (result!=null) {
return result;
}
result = createMockEntityPersister(entityName);
entityPersistersByName.put(entityName, result);
return result;
}
private CollectionPersister createCollectionPersister(String entityName) {
MockCollectionPersister result = collectionPersistersByName.get(entityName);
if (result!=null) {
return result;
}
result = createMockCollectionPersister(entityName);
collectionPersistersByName.put(entityName, result);
return result;
}
List<MockEntityPersister> getMockEntityPersisters() {
return entityPersistersByName.values()
.stream()
.filter(Objects::nonNull)
.collect(Collectors.toList());
}
@Override
public Type getIdentifierType(String className)
throws MappingException {
return createEntityPersister(className)
.getIdentifierType();
}
@Override
public String getIdentifierPropertyName(String className)
throws MappingException {
return createEntityPersister(className)
.getIdentifierPropertyName();
}
@Override
public Type getReferencedPropertyType(String className, String propertyName)
throws MappingException {
return createEntityPersister(className)
.getPropertyType(propertyName);
}
@Override
public MetamodelImplementor getMetamodel() {
return metamodel;
}
@Override
public StandardServiceRegistryImpl getServiceRegistry() {
return serviceRegistry;
}
@Override
public JdbcServices getJdbcServices() {
return MockJdbcServicesInitiator.jdbcServices;
// return serviceRegistry.getService(JdbcServices.class);
}
@Override
public String getName() {
return "mock";
}
@Override
public SessionFactoryOptions getSessionFactoryOptions() {
return this;
}
@Override
public Set<String> getDefinedFilterNames() {
return emptySet();
}
@Override
public CacheImplementor getCache() {
return new DisabledCaching(this);
}
@Override
public EntityNotFoundDelegate getEntityNotFoundDelegate() {
return new StandardEntityNotFoundDelegate();
}
@Override
public CustomEntityDirtinessStrategy getCustomEntityDirtinessStrategy() {
return new DefaultCustomEntityDirtinessStrategy();
}
@Override
public CurrentTenantIdentifierResolver getCurrentTenantIdentifierResolver() {
return null;
}
@Override
public FastSessionServices getFastSessionServices() {
throw new UnsupportedOperationException();
}
@Override
public void close() {}
@Override
public RootGraphImplementor<?> findEntityGraphByName(String s) {
throw new UnsupportedOperationException();
}
static Class<?> toPrimitiveClass(Class<?> type) {
switch (type.getName()) {
case "java.lang.Boolean":
return boolean.class;
case "java.lang.Character":
return char.class;
case "java.lang.Integer":
return int.class;
case "java.lang.Short":
return short.class;
case "java.lang.Byte":
return byte.class;
case "java.lang.Long":
return long.class;
case "java.lang.Float":
return float.class;
case "java.lang.Double":
return double.class;
default:
return Object.class;
}
}
@Override
public NativeQueryInterpreter getNativeQueryInterpreter() {
return new NativeQueryInterpreterStandardImpl();
}
@Override
public QueryInterpretationCache getInterpretationCache() {
return new QueryInterpretationCacheDisabledImpl(this::getStatistics);
}
@Override
public StatisticsImplementor getStatistics() {
return new StatisticsImpl(this);
}
@Override
public SqmFunctionRegistry getSqmFunctionRegistry() {
return functionRegistry;
}
@Override
public NodeBuilder getCriteriaBuilder() {
return new SqmCriteriaNodeBuilder(
"",
"",
this,
false,
ValueHandlingMode.INLINE,
() -> MockSessionFactory.this
);
}
@Override
public void validateNamedQueries() {
}
@Override
public NamedObjectRepository getNamedObjectRepository() {
return new NamedObjectRepositoryImpl(new HashMap<>(), new HashMap<>(), new HashMap<>(), new HashMap<>());
}
@Override
public HqlTranslator getHqlTranslator() {
return new StandardHqlTranslator(MockSessionFactory.this, () -> false);
}
@Override
public SqmTranslatorFactory getSqmTranslatorFactory() {
return new StandardSqmTranslatorFactory();
}
@Override
public QueryEngine getQueryEngine() {
return this;
}
@Override
public JpaMetamodelImplementor getJpaMetamodel() {
return metamodel.getJpaMetamodel();
}
@Override
public MappingMetamodelImplementor getMappingMetamodel() {
return metamodel;
}
@Override
public RuntimeMetamodelsImplementor getRuntimeMetamodels() {
RuntimeMetamodelsImpl runtimeMetamodels = new RuntimeMetamodelsImpl();
runtimeMetamodels.setJpaMetamodel( metamodel.getJpaMetamodel() );
runtimeMetamodels.setMappingMetamodel( metamodel );
return runtimeMetamodels;
}
@Override
public boolean isClosed() {
return false;
}
private static final SessionFactoryObserver[] NO_OBSERVERS = new SessionFactoryObserver[0];
private static final EntityNameResolver[] NO_RESOLVERS = new EntityNameResolver[0];
static MutableJpaCompliance jpaCompliance = new MutableJpaComplianceImpl(emptyMap());
@Override
public MutableJpaCompliance getJpaCompliance() {
return jpaCompliance;
}
@Override
public String getSessionFactoryName() {
return "mock";
}
@Override
public String getUuid() {
return "mock";
}
@Override
public SessionFactoryObserver[] getSessionFactoryObservers() {
return NO_OBSERVERS;
}
@Override
public EntityNameResolver[] getEntityNameResolvers() {
return NO_RESOLVERS;
}
@Override
public BatchFetchStyle getBatchFetchStyle() {
return BatchFetchStyle.LEGACY;
}
@Override
public boolean isDelayBatchFetchLoaderCreationsEnabled() {
return false;
}
@Override
public Integer getMaximumFetchDepth() {
return null;
}
@Override
public void setCheckNullability(boolean enabled) {}
private static class MockMappingDefaults implements MappingDefaults {
@Override
public String getImplicitSchemaName() {
return null;
}
@Override
public String getImplicitCatalogName() {
return null;
}
@Override
public boolean shouldImplicitlyQuoteIdentifiers() {
return false;
}
@Override
public String getImplicitIdColumnName() {
return null;
}
@Override
public String getImplicitTenantIdColumnName() {
return null;
}
@Override
public String getImplicitDiscriminatorColumnName() {
return null;
}
@Override
public String getImplicitPackageName() {
return null;
}
@Override
public boolean isAutoImportEnabled() {
return false;
}
@Override
public String getImplicitCascadeStyleName() {
return null;
}
@Override
public String getImplicitPropertyAccessorName() {
return null;
}
@Override
public boolean areEntitiesImplicitlyLazy() {
return false;
}
@Override
public boolean areCollectionsImplicitlyLazy() {
return false;
}
@Override
public AccessType getImplicitCacheAccessType() {
return null;
}
@Override
public CollectionClassification getImplicitListClassification() {
return null;
}
}
@Override
public Dialect getDialect() {
return MockJdbcServicesInitiator.genericDialect;
}
@Override
public int getPreferredSqlTypeCodeForBoolean() {
return SqlTypes.BOOLEAN;
}
@Override
public int getPreferredSqlTypeCodeForDuration() {
return SqlTypes.NUMERIC;
}
@Override
public int getPreferredSqlTypeCodeForUuid() {
return SqlTypes.UUID;
}
@Override
public int getPreferredSqlTypeCodeForInstant() {
return SqlTypes.TIMESTAMP_WITH_TIMEZONE;
}
@Override
public int getPreferredSqlTypeCodeForArray() {
return SqlTypes.ARRAY;
}
private class MockMappingMetamodelImpl extends MappingMetamodelImpl {
public MockMappingMetamodelImpl() {
super(typeConfiguration, serviceRegistry);
}
@Override
public EntityPersister getEntityDescriptor(String entityName) {
return createEntityPersister(entityName);
}
@Override
public EntityPersister entityPersister(String entityName)
throws MappingException {
return createEntityPersister(entityName);
}
@Override
public EntityPersister locateEntityPersister(String entityName)
throws MappingException {
return createEntityPersister(entityName);
}
@Override
public CollectionPersister getCollectionDescriptor(String role) {
return createCollectionPersister(role);
}
@Override
public CollectionPersister findCollectionDescriptor(String role) {
return createCollectionPersister(role);
}
@Override
public CollectionPersister collectionPersister(String role) {
return createCollectionPersister(role);
}
@Override
public JpaMetamodelImplementor getJpaMetamodel() {
return new MockJpaMetamodelImpl();
}
@Override
public EntityPersister findEntityDescriptor(String entityName) {
return createEntityPersister(entityName);
}
}
@Override
public SessionFactoryImplementor getSessionFactory() {
return MockSessionFactory.this;
}
@Override
public BootstrapContext getBootstrapContext() {
return this;
}
@Override
public MetadataImplementor getBootModel() {
return bootModel;
}
@Override
public MappingMetamodelImplementor getDomainModel() {
return metamodel;
}
@Override
public SqmFunctionRegistry getFunctionRegistry() {
return functionRegistry;
}
@Override
public Map<String, Object> getSettings() {
return emptyMap();
}
@Override
public SqlStringGenerationContext getSqlStringGenerationContext() {
throw new UnsupportedOperationException();
}
@Override
public IdentifierGeneratorFactory getIdentifierGeneratorFactory() {
return new StandardIdentifierGeneratorFactory(serviceRegistry, true);
}
@Override
public MappingDefaults getMappingDefaults() {
return new MockMappingDefaults();
}
@Override
public TimeZoneStorageStrategy getDefaultTimeZoneStorageStrategy() {
return TimeZoneStorageStrategy.NATIVE;
}
private class MockJpaMetamodelImpl extends JpaMetamodelImpl {
public MockJpaMetamodelImpl() {
super(typeConfiguration, metamodel, serviceRegistry);
}
@Override
public <X> EntityDomainType<X> entity(String entityName) {
if ( isEntityDefined(entityName) ) {
return new MockEntityDomainType<>(entityName);
}
else {
return null;
}
}
@Override
public String qualifyImportableName(String queryName) {
if (isClassDefined(queryName)) {
return queryName;
}
else if (isEntityDefined(queryName)) {
return qualifyName(queryName);
}
return null;
}
@Override
public <X> ManagedDomainType<X> findManagedType(Class<X> cls) {
throw new UnsupportedOperationException();
}
@Override
public <X> EntityDomainType<X> findEntityType(Class<X> cls) {
if ( isEntityDefined( cls.getName() ) ) {
return new MockEntityDomainType<>( cls.getName() );
}
else {
return null;
}
}
@Override
public <X> ManagedDomainType<X> managedType(Class<X> cls) {
throw new UnsupportedOperationException();
}
@Override
public <X> EntityDomainType<X> entity(Class<X> cls) {
throw new UnsupportedOperationException();
}
@Override
public JpaCompliance getJpaCompliance() {
return jpaCompliance;
}
}
class MockMappedDomainType<X> extends MappedSuperclassTypeImpl<X>{
public MockMappedDomainType(String typeName) {
super(typeName, false, true, false, null, null, metamodel.getJpaMetamodel());
}
@Override
public PersistentAttribute<X,?> findDeclaredAttribute(String name) {
String typeName = getTypeName();
return isFieldDefined(typeName, name)
? createAttribute(name, typeName, propertyType(typeName, name), this)
: null;
}
}
class MockEntityDomainType<X> extends EntityTypeImpl<X> {
public MockEntityDomainType(String entityName) {
super(entityName, entityName, false, true, false, null, null,
metamodel.getJpaMetamodel());
}
@Override
public SqmPathSource<?> findSubPathSource(String name, JpaMetamodelImplementor metamodel) {
SqmPathSource<?> source = super.findSubPathSource(name, metamodel);
if ( source != null ) {
return source;
}
String supertype = MockSessionFactory.this.getSupertype(getHibernateEntityName());
PersistentAttribute<? super Object, ?> superattribute
= new MockMappedDomainType<>(supertype).findAttribute(name);
if (superattribute != null) {
return (SqmPathSource<?>) superattribute;
}
for (Map.Entry<String, MockEntityPersister> entry : entityPersistersByName.entrySet()) {
if (!entry.getValue().getEntityName().equals(getHibernateEntityName())
&& isSubtype(entry.getValue().getEntityName(), getHibernateEntityName())) {
PersistentAttribute<? super Object, ?> subattribute
= new MockEntityDomainType<>(entry.getValue().getEntityName()).findAttribute(name);
if (subattribute != null) {
return (SqmPathSource<?>) subattribute;
}
}
}
return null;
}
@Override
public PersistentAttribute<? super X, ?> findAttribute(String name) {
PersistentAttribute<? super X, ?> attribute = super.findAttribute(name);
if (attribute != null) {
return attribute;
}
String supertype = MockSessionFactory.this.getSupertype(getHibernateEntityName());
PersistentAttribute<? super Object, ?> superattribute
= new MockMappedDomainType<>(supertype).findAttribute(name);
if (superattribute != null) {
return superattribute;
}
return null;
}
@Override
public PersistentAttribute<X,?> findDeclaredAttribute(String name) {
String entityName = getHibernateEntityName();
return isAttributeDefined(entityName, name)
? createAttribute(name, entityName, getReferencedPropertyType(entityName, name), this)
: null;
}
}
private AbstractAttribute createAttribute(String name, String entityName, Type type, ManagedDomainType<?> owner) {
if (type==null) {
throw new UnsupportedOperationException(entityName + "." + name);
}
else if ( type.isCollectionType() ) {
CollectionType collectionType = (CollectionType) type;
return createPluralAttribute(collectionType, entityName, name, owner);
}
else if ( type.isEntityType() ) {
return new SingularAttributeImpl<>(
owner,
name,
AttributeClassification.MANY_TO_ONE,
new MockEntityDomainType<>(type.getName()),
null,
null,
false,
false,
true,
false,
metadataContext
);
}
else if ( type.isComponentType() ) {
CompositeType compositeType = (CompositeType) type;
return new SingularAttributeImpl<>(
owner,
name,
AttributeClassification.EMBEDDED,
createEmbeddableDomainType(entityName, compositeType, owner),
null,
null,
false,
false,
true,
false,
metadataContext
);
}
else {
return new SingularAttributeImpl<>(
owner,
name,
AttributeClassification.BASIC,
(DomainType<?>) type,
type instanceof JdbcMapping
? ((JdbcMapping) type).getJavaTypeDescriptor()
: null,
null,
false,
false,
true,
false,
metadataContext
);
}
}
private DomainType<?> getElementDomainType(String entityName, CollectionType collectionType, ManagedDomainType<?> owner) {
Type elementType = collectionType.getElementType(MockSessionFactory.this);
return getDomainType(entityName, collectionType, owner, elementType);
}
private DomainType<?> getMapKeyDomainType(String entityName, CollectionType collectionType, ManagedDomainType<?> owner) {
Type keyType = getMappingMetamodel().getCollectionDescriptor( collectionType.getRole() ).getIndexType();
return getDomainType(entityName, collectionType, owner, keyType);
}
private DomainType<?> getDomainType(String entityName, CollectionType collectionType, ManagedDomainType<?> owner, Type elementType) {
if ( elementType.isEntityType() ) {
String associatedEntityName = collectionType.getAssociatedEntityName(MockSessionFactory.this);
return new MockEntityDomainType<>(associatedEntityName);
}
else if ( elementType.isComponentType() ) {
CompositeType compositeType = (CompositeType) elementType;
return createEmbeddableDomainType(entityName, compositeType, owner);
}
else if ( elementType instanceof DomainType ) {
return (DomainType<?>) elementType;
}
else {
return new BasicTypeImpl<>(new UnknownBasicJavaType<>(Object.class), ObjectJdbcType.INSTANCE);
}
}
private AbstractPluralAttribute createPluralAttribute(
CollectionType collectionType,
String entityName,
String name,
ManagedDomainType<?> owner) {
Property property = new Property();
property.setName(name);
JavaType<Object> collectionJavaType =
typeConfiguration.getJavaTypeRegistry()
.getDescriptor(collectionType.getReturnedClass());
DomainType<?> elementDomainType = getElementDomainType(entityName, collectionType, owner);
CollectionClassification classification = collectionType.getCollectionClassification();
switch (classification) {
case LIST:
return new ListAttributeImpl(
new PluralAttributeBuilder<>(
collectionJavaType,
true,
AttributeClassification.MANY_TO_MANY,
classification,
elementDomainType,
typeConfiguration.getBasicTypeRegistry()
.getRegisteredType(Integer.class),
owner,
property,
null
),
metadataContext
);
case BAG:
case ID_BAG:
return new BagAttributeImpl(
new PluralAttributeBuilder<>(
collectionJavaType,
true,
AttributeClassification.MANY_TO_MANY,
classification,
elementDomainType,
null,
owner,
property,
null
),
metadataContext
);
case SET:
case SORTED_SET:
case ORDERED_SET:
return new SetAttributeImpl(
new PluralAttributeBuilder<>(
collectionJavaType,
true,
AttributeClassification.MANY_TO_MANY,
classification,
elementDomainType,
null,
owner,
property,
null
),
metadataContext
);
case MAP:
case SORTED_MAP:
case ORDERED_MAP:
DomainType<?> keyDomainType = getMapKeyDomainType(entityName, collectionType, owner);
return new MapAttributeImpl(
new PluralAttributeBuilder<>(
collectionJavaType,
true,
AttributeClassification.MANY_TO_MANY,
classification,
elementDomainType,
keyDomainType,
owner,
property,
null
),
metadataContext
);
default:
return null;
}
}
private EmbeddableTypeImpl<Object> createEmbeddableDomainType(String entityName, CompositeType compositeType, ManagedDomainType<?> owner) {
return new EmbeddableTypeImpl<Object>(new UnknownBasicJavaType<>(Object.class), true, metamodel.getJpaMetamodel()) {
@Override
public PersistentAttribute<Object, Object> findAttribute(String name) {
int i = compositeType.getPropertyIndex(name);
Type subtype = compositeType.getSubtypes()[i];
return createAttribute(
name,
entityName, //TOOD: WRONG!!!
subtype,
owner
);
}
};
}
}
|
apache/tajo | 37,065 | tajo-plan/src/main/java/org/apache/tajo/plan/ExprAnnotator.java | /**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.tajo.plan;
import com.google.common.collect.Sets;
import org.apache.commons.collections.set.UnmodifiableSet;
import org.apache.tajo.OverridableConf;
import org.apache.tajo.algebra.*;
import org.apache.tajo.catalog.*;
import org.apache.tajo.catalog.CatalogUtil.Direction;
import org.apache.tajo.common.TajoDataTypes;
import org.apache.tajo.datum.*;
import org.apache.tajo.exception.*;
import org.apache.tajo.plan.algebra.BaseAlgebraVisitor;
import org.apache.tajo.plan.expr.*;
import org.apache.tajo.plan.logical.NodeType;
import org.apache.tajo.plan.logical.TableSubQueryNode;
import org.apache.tajo.plan.nameresolver.NameResolver;
import org.apache.tajo.plan.nameresolver.NameResolvingMode;
import org.apache.tajo.type.TypeFactory;
import org.apache.tajo.util.Pair;
import org.apache.tajo.util.TUtil;
import org.apache.tajo.util.datetime.DateTimeUtil;
import org.apache.tajo.util.datetime.TimeMeta;
import java.util.Arrays;
import java.util.Set;
import java.util.Stack;
import java.util.TimeZone;
import static org.apache.tajo.algebra.WindowSpec.WindowFrameEndBoundType;
import static org.apache.tajo.algebra.WindowSpec.WindowFrameStartBoundType;
import static org.apache.tajo.catalog.TypeConverter.convert;
import static org.apache.tajo.catalog.proto.CatalogProtos.FunctionType;
import static org.apache.tajo.common.TajoDataTypes.DataType;
import static org.apache.tajo.common.TajoDataTypes.Type.NULL_TYPE;
import static org.apache.tajo.function.FunctionUtil.buildSimpleFunctionSignature;
import static org.apache.tajo.plan.logical.WindowSpec.*;
import static org.apache.tajo.plan.verifier.SyntaxErrorUtil.makeSyntaxError;
import static org.apache.tajo.type.Type.Text;
/**
* <code>ExprAnnotator</code> makes an annotated expression called <code>EvalNode</code> from an
* {@link org.apache.tajo.algebra.Expr}. It visits descendants recursively from a given expression, and finally
* it returns an EvalNode.
*/
public class ExprAnnotator extends BaseAlgebraVisitor<ExprAnnotator.Context, EvalNode> {
private CatalogService catalog;
public ExprAnnotator(CatalogService catalog) {
this.catalog = catalog;
}
static class Context {
OverridableConf queryContext;
TimeZone timeZone;
LogicalPlan plan;
LogicalPlan.QueryBlock currentBlock;
NameResolvingMode columnRsvLevel;
boolean includeSelfDescTable;
public Context(LogicalPlanner.PlanContext planContext, NameResolvingMode colRsvLevel, boolean includeSeflDescTable) {
this.queryContext = planContext.queryContext;
this.timeZone = planContext.timeZone;
this.plan = planContext.plan;
this.currentBlock = planContext.queryBlock;
this.columnRsvLevel = colRsvLevel;
this.includeSelfDescTable = includeSeflDescTable;
}
}
public EvalNode createEvalNode(LogicalPlanner.PlanContext planContext, Expr expr,
NameResolvingMode colRsvLevel) throws TajoException {
return createEvalNode(planContext, expr, colRsvLevel, false);
}
public EvalNode createEvalNode(LogicalPlanner.PlanContext planContext, Expr expr,
NameResolvingMode colRsvLevel, boolean includeSeflDescTable) throws TajoException {
Context context = new Context(planContext, colRsvLevel, includeSeflDescTable);
return planContext.evalOptimizer.optimize(planContext, visit(context, new Stack<>(), expr));
}
public static void assertEval(boolean condition, String message) throws TajoException {
if (!condition) {
throw makeSyntaxError(message);
}
}
/**
* It checks both terms in binary expression. If one of both needs type conversion, it inserts a cast expression.
*
* @param lhs left hand side term
* @param rhs right hand side term
* @return a pair including left/right hand side terms
*/
private static Pair<EvalNode, EvalNode> convertTypesIfNecessary(Context ctx, EvalNode lhs, EvalNode rhs) {
TajoDataTypes.Type lhsType = lhs.getValueType().kind();
TajoDataTypes.Type rhsType = rhs.getValueType().kind();
// If one of both is NULL, it just returns the original types without casting.
if (lhsType == NULL_TYPE || rhsType == NULL_TYPE) {
return new Pair<>(lhs, rhs);
}
TajoDataTypes.Type toBeCasted = TUtil.getFromNestedMap(CatalogUtil.OPERATION_CASTING_MAP, lhsType, rhsType);
if (toBeCasted != null) { // if not null, one of either should be converted to another type.
// Overwrite lhs, rhs, or both with cast expression.
Direction direction = CatalogUtil.getCastingDirection(lhsType, rhsType);
if (lhsType != toBeCasted && (direction == Direction.BOTH || direction == Direction.LHS)) {
lhs = convertType(ctx, lhs, TypeFactory.create(toBeCasted));
}
if (rhsType != toBeCasted && (direction == Direction.BOTH || direction == Direction.RHS)) {
rhs = convertType(ctx, rhs, TypeFactory.create(toBeCasted));
}
}
return new Pair<>(lhs, rhs);
}
/**
* Insert a type conversion expression to a given expression.
* If the type of expression and <code>toType</code> is already the same, it just returns the original expression.
*
* @param evalNode an expression
* @param toType target type
* @return type converted expression.
*/
private static EvalNode convertType(Context ctx, EvalNode evalNode, org.apache.tajo.type.Type toType) {
// if original and toType is the same, we don't need type conversion.
if (evalNode.getValueType().equals(toType)) {
return evalNode;
}
// the conversion to null is not allowed.
if (evalNode.getValueType().isNull() || toType.isNull()) {
return evalNode;
}
if (evalNode.getType() == EvalType.BETWEEN) {
BetweenPredicateEval between = (BetweenPredicateEval) evalNode;
between.setPredicand(convertType(ctx, between.getPredicand(), toType));
between.setBegin(convertType(ctx, between.getBegin(), toType));
between.setEnd(convertType(ctx, between.getEnd(), toType));
return between;
} else if (evalNode.getType() == EvalType.CASE) {
CaseWhenEval caseWhenEval = (CaseWhenEval) evalNode;
for (CaseWhenEval.IfThenEval ifThen : caseWhenEval.getIfThenEvals()) {
ifThen.setResult(convertType(ctx, ifThen.getResult(), toType));
}
if (caseWhenEval.hasElse()) {
caseWhenEval.setElseResult(convertType(ctx, caseWhenEval.getElse(), toType));
}
return caseWhenEval;
} else if (evalNode.getType() == EvalType.ROW_CONSTANT) {
RowConstantEval original = (RowConstantEval) evalNode;
Datum[] datums = original.getValues();
Datum[] convertedDatum = new Datum[datums.length];
for (int i = 0; i < datums.length; i++) {
convertedDatum[i] = DatumFactory.cast(datums[i], toType, ctx.timeZone);
}
RowConstantEval convertedRowConstant = new RowConstantEval(convertedDatum);
return convertedRowConstant;
} else if (evalNode.getType() == EvalType.CONST) {
ConstEval original = (ConstEval) evalNode;
ConstEval newConst = new ConstEval(DatumFactory.cast(original.getValue(), toType, ctx.timeZone));
return newConst;
} else {
return new CastEval(ctx.queryContext, evalNode, toType);
}
}
///////////////////////////////////////////////////////////////////////////////////////////////////////////
// Logical Operator Section
///////////////////////////////////////////////////////////////////////////////////////////////////////////
@Override
public EvalNode visitAnd(Context ctx, Stack<Expr> stack, BinaryOperator expr) throws TajoException {
stack.push(expr);
EvalNode left = visit(ctx, stack, expr.getLeft());
EvalNode right = visit(ctx, stack, expr.getRight());
stack.pop();
return new BinaryEval(EvalType.AND, left, right);
}
@Override
public EvalNode visitOr(Context ctx, Stack<Expr> stack, BinaryOperator expr) throws TajoException {
stack.push(expr);
EvalNode left = visit(ctx, stack, expr.getLeft());
EvalNode right = visit(ctx, stack, expr.getRight());
stack.pop();
return new BinaryEval(EvalType.OR, left, right);
}
@Override
public EvalNode visitNot(Context ctx, Stack<Expr> stack, NotExpr expr) throws TajoException {
stack.push(expr);
EvalNode child = visit(ctx, stack, expr.getChild());
stack.pop();
return new NotEval(child);
}
///////////////////////////////////////////////////////////////////////////////////////////////////////////
// Comparison Predicates Section
///////////////////////////////////////////////////////////////////////////////////////////////////////////
@Override
public EvalNode visitEquals(Context ctx, Stack<Expr> stack, BinaryOperator expr) throws TajoException {
return visitCommonComparison(ctx, stack, expr);
}
@Override
public EvalNode visitNotEquals(Context ctx, Stack<Expr> stack, BinaryOperator expr) throws TajoException {
return visitCommonComparison(ctx, stack, expr);
}
@Override
public EvalNode visitLessThan(Context ctx, Stack<Expr> stack, BinaryOperator expr) throws TajoException {
return visitCommonComparison(ctx, stack, expr);
}
@Override
public EvalNode visitLessThanOrEquals(Context ctx, Stack<Expr> stack, BinaryOperator expr) throws TajoException {
return visitCommonComparison(ctx, stack, expr);
}
@Override
public EvalNode visitGreaterThan(Context ctx, Stack<Expr> stack, BinaryOperator expr) throws TajoException {
return visitCommonComparison(ctx, stack, expr);
}
@Override
public EvalNode visitGreaterThanOrEquals(Context ctx, Stack<Expr> stack, BinaryOperator expr)
throws TajoException {
return visitCommonComparison(ctx, stack, expr);
}
public EvalNode visitCommonComparison(Context ctx, Stack<Expr> stack, BinaryOperator expr) throws TajoException {
stack.push(expr);
EvalNode left = visit(ctx, stack, expr.getLeft());
EvalNode right = visit(ctx, stack, expr.getRight());
stack.pop();
EvalType evalType;
switch (expr.getType()) {
case Equals:
evalType = EvalType.EQUAL;
break;
case NotEquals:
evalType = EvalType.NOT_EQUAL;
break;
case LessThan:
evalType = EvalType.LTH;
break;
case LessThanOrEquals:
evalType = EvalType.LEQ;
break;
case GreaterThan:
evalType = EvalType.GTH;
break;
case GreaterThanOrEquals:
evalType = EvalType.GEQ;
break;
default:
throw new IllegalStateException("Wrong Expr Type: " + expr.getType());
}
return createBinaryNode(ctx, evalType, left, right);
}
///////////////////////////////////////////////////////////////////////////////////////////////////////////
// Other Predicates Section
///////////////////////////////////////////////////////////////////////////////////////////////////////////
@Override
public EvalNode visitBetween(Context ctx, Stack<Expr> stack, BetweenPredicate between) throws TajoException {
stack.push(between);
EvalNode predicand = visit(ctx, stack, between.predicand());
EvalNode begin = visit(ctx, stack, between.begin());
EvalNode end = visit(ctx, stack, between.end());
stack.pop();
// implicit type conversion
DataType widestType = CatalogUtil.getWidestType(
convert(predicand.getValueType()).getDataType(),
convert(begin.getValueType()).getDataType(),
convert(end.getValueType()).getDataType());
BetweenPredicateEval betweenEval = new BetweenPredicateEval(
between.isNot(),
between.isSymmetric(),
predicand, begin, end);
betweenEval = (BetweenPredicateEval) convertType(ctx, betweenEval, TypeConverter.convert(widestType));
return betweenEval;
}
@Override
public EvalNode visitCaseWhen(Context ctx, Stack<Expr> stack, CaseWhenPredicate caseWhen) throws TajoException {
CaseWhenEval caseWhenEval = new CaseWhenEval();
EvalNode condition;
EvalNode result;
for (CaseWhenPredicate.WhenExpr when : caseWhen.getWhens()) {
condition = visit(ctx, stack, when.getCondition());
result = visit(ctx, stack, when.getResult());
caseWhenEval.addIfCond(condition, result);
}
if (caseWhen.hasElseResult()) {
caseWhenEval.setElseResult(visit(ctx, stack, caseWhen.getElseResult()));
}
// Getting the widest type from all if-then expressions and else expression.
DataType widestType = convert(caseWhenEval.getIfThenEvals().get(0).getResult().getValueType()).getDataType();
for (int i = 1; i < caseWhenEval.getIfThenEvals().size(); i++) {
widestType = CatalogUtil.getWidestType(
convert(caseWhenEval.getIfThenEvals().get(i).getResult().getValueType()).getDataType(),
widestType);
}
if (caseWhen.hasElseResult()) {
widestType = CatalogUtil.getWidestType(
widestType, convert(caseWhenEval.getElse().getValueType()).getDataType());
}
assertEval(widestType != null, "Invalid Type Conversion for CaseWhen");
// implicit type conversion
caseWhenEval = (CaseWhenEval) convertType(ctx, caseWhenEval, TypeConverter.convert(widestType));
return caseWhenEval;
}
@Override
public EvalNode visitIsNullPredicate(Context ctx, Stack<Expr> stack, IsNullPredicate expr) throws TajoException {
stack.push(expr);
EvalNode child = visit(ctx, stack, expr.getPredicand());
stack.pop();
return new IsNullEval(expr.isNot(), child);
}
@Override
public EvalNode visitInPredicate(Context ctx, Stack<Expr> stack, InPredicate expr) throws TajoException {
stack.push(expr);
EvalNode lhs = visit(ctx, stack, expr.getLeft());
ValueSetEval valueSetEval = (ValueSetEval) visit(ctx, stack, expr.getInValue());
stack.pop();
Pair<EvalNode, EvalNode> pair = convertTypesIfNecessary(ctx, lhs, valueSetEval);
return new InEval(pair.getFirst(), (ValueSetEval) pair.getSecond(), expr.isNot());
}
@Override
public EvalNode visitValueListExpr(Context ctx, Stack<Expr> stack, ValueListExpr expr) throws TajoException {
Datum[] values = new Datum[expr.getValues().length];
EvalNode [] evalNodes = new EvalNode[expr.getValues().length];
for (int i = 0; i < expr.getValues().length; i++) {
evalNodes[i] = visit(ctx, stack, expr.getValues()[i]);
if (!EvalTreeUtil.checkIfCanBeConstant(evalNodes[i])) {
throw makeSyntaxError("Non constant values cannot be included in IN PREDICATE.");
}
values[i] = EvalTreeUtil.evaluateImmediately(null, evalNodes[i]);
}
return new RowConstantEval(values);
}
@Override
public EvalNode visitSimpleTableSubquery(Context ctx, Stack<Expr> stack, SimpleTableSubquery expr)
throws TajoException {
if (stack.peek().getType() == OpType.InPredicate) {
// In the case of in-subquery, stop visiting because the subquery expr is not expression.
return new SubqueryEval((TableSubQueryNode) ctx.currentBlock.getNodeFromExpr(expr));
} else {
return super.visitSimpleTableSubquery(ctx, stack, expr);
}
}
public EvalNode visitExistsPredicate(Context ctx, Stack<Expr> stack, ExistsPredicate expr) throws TajoException {
throw new NotImplementedException("EXISTS clause");
}
///////////////////////////////////////////////////////////////////////////////////////////////////////////
// String Operator or Pattern Matching Predicates Section
///////////////////////////////////////////////////////////////////////////////////////////////////////////
@Override
public EvalNode visitLikePredicate(Context ctx, Stack<Expr> stack, PatternMatchPredicate expr)
throws TajoException {
return visitPatternMatchPredicate(ctx, stack, expr);
}
@Override
public EvalNode visitSimilarToPredicate(Context ctx, Stack<Expr> stack, PatternMatchPredicate expr)
throws TajoException {
return visitPatternMatchPredicate(ctx, stack, expr);
}
@Override
public EvalNode visitRegexpPredicate(Context ctx, Stack<Expr> stack, PatternMatchPredicate expr)
throws TajoException {
return visitPatternMatchPredicate(ctx, stack, expr);
}
@Override
public EvalNode visitConcatenate(Context ctx, Stack<Expr> stack, BinaryOperator expr) throws TajoException {
stack.push(expr);
EvalNode lhs = visit(ctx, stack, expr.getLeft());
EvalNode rhs = visit(ctx, stack, expr.getRight());
stack.pop();
if (lhs.getValueType().kind() != TajoDataTypes.Type.TEXT) {
lhs = convertType(ctx, lhs, Text);
}
if (rhs.getValueType().kind() != TajoDataTypes.Type.TEXT) {
rhs = convertType(ctx, rhs, Text);
}
return new BinaryEval(EvalType.CONCATENATE, lhs, rhs);
}
private EvalNode visitPatternMatchPredicate(Context ctx, Stack<Expr> stack, PatternMatchPredicate expr)
throws TajoException {
EvalNode field = visit(ctx, stack, expr.getPredicand());
ConstEval pattern = (ConstEval) visit(ctx, stack, expr.getPattern());
// A pattern is a const value in pattern matching predicates.
// In a binary expression, the result is always null if a const value in left or right side is null.
if (pattern.getValue() instanceof NullDatum) {
return new ConstEval(NullDatum.get());
} else {
if (expr.getType() == OpType.LikePredicate) {
return new LikePredicateEval(expr.isNot(), field, pattern, expr.isCaseInsensitive());
} else if (expr.getType() == OpType.SimilarToPredicate) {
return new SimilarToPredicateEval(expr.isNot(), field, pattern);
} else {
return new RegexPredicateEval(expr.isNot(), field, pattern, expr.isCaseInsensitive());
}
}
}
///////////////////////////////////////////////////////////////////////////////////////////////////////////
// Arithmetic Operators
///////////////////////////////////////////////////////////////////////////////////////////////////////////
private static BinaryEval createBinaryNode(Context ctx, EvalType type, EvalNode lhs, EvalNode rhs) {
Pair<EvalNode, EvalNode> pair = convertTypesIfNecessary(ctx, lhs, rhs); // implicit type conversion if necessary
return new BinaryEval(type, pair.getFirst(), pair.getSecond());
}
@Override
public EvalNode visitPlus(Context ctx, Stack<Expr> stack, BinaryOperator expr) throws TajoException {
stack.push(expr);
EvalNode left = visit(ctx, stack, expr.getLeft());
EvalNode right = visit(ctx, stack, expr.getRight());
stack.pop();
return createBinaryNode(ctx, EvalType.PLUS, left, right);
}
@Override
public EvalNode visitMinus(Context ctx, Stack<Expr> stack, BinaryOperator expr) throws TajoException {
stack.push(expr);
EvalNode left = visit(ctx, stack, expr.getLeft());
EvalNode right = visit(ctx, stack, expr.getRight());
stack.pop();
return createBinaryNode(ctx, EvalType.MINUS, left, right);
}
@Override
public EvalNode visitMultiply(Context ctx, Stack<Expr> stack, BinaryOperator expr) throws TajoException {
stack.push(expr);
EvalNode left = visit(ctx, stack, expr.getLeft());
EvalNode right = visit(ctx, stack, expr.getRight());
stack.pop();
return createBinaryNode(ctx, EvalType.MULTIPLY, left, right);
}
@Override
public EvalNode visitDivide(Context ctx, Stack<Expr> stack, BinaryOperator expr) throws TajoException {
stack.push(expr);
EvalNode left = visit(ctx, stack, expr.getLeft());
EvalNode right = visit(ctx, stack, expr.getRight());
stack.pop();
return createBinaryNode(ctx, EvalType.DIVIDE, left, right);
}
@Override
public EvalNode visitModular(Context ctx, Stack<Expr> stack, BinaryOperator expr) throws TajoException {
stack.push(expr);
EvalNode left = visit(ctx, stack, expr.getLeft());
EvalNode right = visit(ctx, stack, expr.getRight());
stack.pop();
return createBinaryNode(ctx, EvalType.MODULAR, left, right);
}
///////////////////////////////////////////////////////////////////////////////////////////////////////////
// Other Expressions
///////////////////////////////////////////////////////////////////////////////////////////////////////////
@Override
public EvalNode visitSign(Context ctx, Stack<Expr> stack, SignedExpr expr) throws TajoException {
stack.push(expr);
EvalNode numericExpr = visit(ctx, stack, expr.getChild());
stack.pop();
if (expr.isNegative()) {
return new SignedEval(expr.isNegative(), numericExpr);
} else {
return numericExpr;
}
}
@Override
public EvalNode visitColumnReference(Context ctx, Stack<Expr> stack, ColumnReferenceExpr expr)
throws TajoException {
Column column;
switch (ctx.columnRsvLevel) {
case LEGACY:
case RELS_ONLY:
case RELS_AND_SUBEXPRS:
case SUBEXPRS_AND_RELS:
column = NameResolver.resolve(ctx.plan, ctx.currentBlock, expr, ctx.columnRsvLevel, ctx.includeSelfDescTable);
break;
default:
throw new TajoInternalError("Unsupported column resolving level: " + ctx.columnRsvLevel.name());
}
return new FieldEval(column);
}
@Override
public EvalNode visitTargetExpr(Context ctx, Stack<Expr> stack, NamedExpr expr) throws TajoException {
throw new TajoInternalError("ExprAnnotator cannot take NamedExpr");
}
///////////////////////////////////////////////////////////////////////////////////////////////////////////
// Functions and General Set Functions Section
///////////////////////////////////////////////////////////////////////////////////////////////////////////
@Override
public EvalNode visitFunction(Context ctx, Stack<Expr> stack, FunctionExpr expr) throws TajoException {
stack.push(expr); // <--- Push
// Given parameters
Expr[] params = expr.getParams();
if (params == null) {
params = new Expr[0];
}
EvalNode[] givenArgs = new EvalNode[params.length];
DataType[] paramTypes = new DataType[params.length];
for (int i = 0; i < params.length; i++) {
givenArgs[i] = visit(ctx, stack, params[i]);
paramTypes[i] = convert(givenArgs[i].getValueType()).getDataType();
}
stack.pop(); // <--- Pop
if (!catalog.containFunction(expr.getSignature(), paramTypes)) {
throw new UndefinedFunctionException(buildSimpleFunctionSignature(expr.getSignature(), paramTypes));
}
FunctionDesc funcDesc = catalog.getFunction(expr.getSignature(), paramTypes);
// trying the implicit type conversion between actual parameter types and the definition types.
if (CatalogUtil.checkIfVariableLengthParamDefinition(Arrays.asList(funcDesc.getParamTypes()))) {
DataType lastDataType = funcDesc.getParamTypes()[0];
for (int i = 0; i < givenArgs.length; i++) {
if (i < (funcDesc.getParamTypes().length - 1)) { // variable length
lastDataType = funcDesc.getParamTypes()[i];
} else {
lastDataType = CatalogUtil.newSimpleDataType(CatalogUtil.getPrimitiveTypeOf(lastDataType.getType()));
}
givenArgs[i] = convertType(ctx, givenArgs[i], TypeConverter.convert(lastDataType));
}
} else {
assertEval(funcDesc.getParamTypes().length == givenArgs.length,
"The number of parameters is mismatched to the function definition: " + funcDesc.toString());
// According to our function matching method, each given argument can be casted to the definition parameter.
for (int i = 0; i < givenArgs.length; i++) {
givenArgs[i] = convertType(ctx, givenArgs[i], TypeConverter.convert(funcDesc.getParamTypes()[i]));
}
}
FunctionType functionType = funcDesc.getFuncType();
if (functionType == FunctionType.GENERAL
|| functionType == FunctionType.UDF) {
return new GeneralFunctionEval(ctx.queryContext, funcDesc, givenArgs);
} else if (functionType == FunctionType.AGGREGATION
|| functionType == FunctionType.UDA) {
if (!ctx.currentBlock.hasNode(NodeType.GROUP_BY)) {
ctx.currentBlock.setAggregationRequire();
}
return new AggregationFunctionCallEval(funcDesc, givenArgs);
} else if (functionType == FunctionType.DISTINCT_AGGREGATION
|| functionType == FunctionType.DISTINCT_UDA) {
throw new UnsupportedException(funcDesc.toString());
} else {
throw new UnsupportedException("function type '" + functionType.name() + "'");
}
}
@Override
public EvalNode visitCountRowsFunction(Context ctx, Stack<Expr> stack, CountRowsFunctionExpr expr)
throws TajoException {
FunctionDesc countRows = catalog.getFunction("count", FunctionType.AGGREGATION,
new DataType[] {});
if (countRows == null) {
throw new UndefinedFunctionException(buildSimpleFunctionSignature(expr.getSignature(), new DataType[]{}));
}
ctx.currentBlock.setAggregationRequire();
return new AggregationFunctionCallEval(countRows, new EvalNode[] {});
}
@Override
public EvalNode visitGeneralSetFunction(Context ctx, Stack<Expr> stack, GeneralSetFunctionExpr setFunction)
throws TajoException {
Expr[] params = setFunction.getParams();
EvalNode[] givenArgs = new EvalNode[params.length];
DataType[] paramTypes = new DataType[params.length];
FunctionType functionType = setFunction.isDistinct() ?
FunctionType.DISTINCT_AGGREGATION : FunctionType.AGGREGATION;
givenArgs[0] = visit(ctx, stack, params[0]);
if (setFunction.getSignature().equalsIgnoreCase("count")) {
paramTypes[0] = CatalogUtil.newSimpleDataType(TajoDataTypes.Type.ANY);
} else {
paramTypes[0] = convert(givenArgs[0].getValueType()).getDataType();
}
if (!catalog.containFunction(setFunction.getSignature(), functionType, paramTypes)) {
throw new UndefinedFunctionException(buildSimpleFunctionSignature(setFunction.getSignature(), paramTypes));
}
FunctionDesc funcDesc = catalog.getFunction(setFunction.getSignature(), functionType, paramTypes);
if (!ctx.currentBlock.hasNode(NodeType.GROUP_BY)) {
ctx.currentBlock.setAggregationRequire();
}
return new AggregationFunctionCallEval(funcDesc, givenArgs);
}
public static final Set<String> WINDOW_FUNCTIONS =
UnmodifiableSet.decorate(
Sets.newHashSet("row_number", "rank", "dense_rank", "percent_rank", "cume_dist", "first_value", "lag"));
public EvalNode visitWindowFunction(Context ctx, Stack<Expr> stack, WindowFunctionExpr windowFunc)
throws TajoException {
WindowSpec windowSpec = windowFunc.getWindowSpec();
Expr key;
if (windowSpec.hasPartitionBy()) {
for (int i = 0; i < windowSpec.getPartitionKeys().length; i++) {
key = windowSpec.getPartitionKeys()[i];
visit(ctx, stack, key);
}
}
EvalNode [] sortKeys = null;
if (windowSpec.hasOrderBy()) {
sortKeys = new EvalNode[windowSpec.getSortSpecs().length];
for (int i = 0; i < windowSpec.getSortSpecs().length; i++) {
key = windowSpec.getSortSpecs()[i].getKey();
sortKeys[i] = visit(ctx, stack, key);
}
}
String funcName = windowFunc.getSignature();
boolean distinct = windowFunc.isDistinct();
Expr[] params = windowFunc.getParams();
EvalNode[] givenArgs = new EvalNode[params.length];
TajoDataTypes.DataType[] paramTypes = new TajoDataTypes.DataType[params.length];
FunctionType functionType;
WindowFrame frame = null;
if (params.length > 0) {
givenArgs[0] = visit(ctx, stack, params[0]);
if (windowFunc.getSignature().equalsIgnoreCase("count")) {
paramTypes[0] = CatalogUtil.newSimpleDataType(TajoDataTypes.Type.ANY);
} else if (windowFunc.getSignature().equalsIgnoreCase("row_number")) {
paramTypes[0] = CatalogUtil.newSimpleDataType(TajoDataTypes.Type.INT8);
} else {
paramTypes[0] = convert(givenArgs[0].getValueType()).getDataType();
}
for (int i = 1; i < params.length; i++) {
givenArgs[i] = visit(ctx, stack, params[i]);
paramTypes[i] = convert(givenArgs[i].getValueType()).getDataType();
}
} else {
if (windowFunc.getSignature().equalsIgnoreCase("rank")) {
givenArgs = sortKeys != null ? sortKeys : new EvalNode[0];
}
}
if (frame == null) {
if (windowSpec.hasOrderBy()) {
frame = new WindowFrame(new WindowStartBound(WindowFrameStartBoundType.UNBOUNDED_PRECEDING),
new WindowEndBound(WindowFrameEndBoundType.CURRENT_ROW));
} else if (windowFunc.getSignature().equalsIgnoreCase("row_number")) {
frame = new WindowFrame(new WindowStartBound(WindowFrameStartBoundType.UNBOUNDED_PRECEDING),
new WindowEndBound(WindowFrameEndBoundType.UNBOUNDED_FOLLOWING));
} else {
frame = new WindowFrame();
}
}
// TODO - containFunction and getFunction should support the function type mask which provides ORing multiple types.
// the below checking against WINDOW_FUNCTIONS is a workaround code for the above problem.
if (WINDOW_FUNCTIONS.contains(funcName.toLowerCase())) {
if (distinct) {
throw new UndefinedFunctionException("row_number() does not support distinct keyword.");
}
functionType = FunctionType.WINDOW;
} else {
functionType = distinct ? FunctionType.DISTINCT_AGGREGATION : FunctionType.AGGREGATION;
}
if (!catalog.containFunction(windowFunc.getSignature(), functionType, paramTypes)) {
throw new UndefinedFunctionException(buildSimpleFunctionSignature(funcName, paramTypes));
}
FunctionDesc funcDesc = catalog.getFunction(funcName, functionType, paramTypes);
return new WindowFunctionEval(funcDesc, givenArgs, frame);
}
///////////////////////////////////////////////////////////////////////////////////////////////////////////
// Literal Section
///////////////////////////////////////////////////////////////////////////////////////////////////////////
@Override
public EvalNode visitDataType(Context ctx, Stack<Expr> stack, DataTypeExpr expr) throws TajoException {
return super.visitDataType(ctx, stack, expr);
}
@Override
public EvalNode visitCastExpr(Context ctx, Stack<Expr> stack, CastExpr expr) throws TajoException {
EvalNode child = super.visitCastExpr(ctx, stack, expr);
// if it is a casting operation for a constant value, it will be pre-computed and casted to a constant value.
if (child.getType() == EvalType.CONST) {
ConstEval constEval = (ConstEval) child;
// some cast operation may require earlier evaluation with timezone.
return new ConstEval(
DatumFactory.cast(constEval.getValue(), LogicalPlanner.convertDataType(expr.getTarget()), ctx.timeZone));
} else {
return new CastEval(ctx.queryContext, child, LogicalPlanner.convertDataType(expr.getTarget()));
}
}
@Override
public EvalNode visitLiteral(Context ctx, Stack<Expr> stack, LiteralValue expr) throws TajoException {
switch (expr.getValueType()) {
case Boolean:
return new ConstEval(DatumFactory.createBool(Boolean.parseBoolean(expr.getValue())));
case String:
return new ConstEval(DatumFactory.createText(expr.getValue()));
case Unsigned_Integer:
return new ConstEval(DatumFactory.createInt4(expr.getValue()));
case Unsigned_Large_Integer:
return new ConstEval(DatumFactory.createInt8(expr.getValue()));
case Unsigned_Float:
return new ConstEval(DatumFactory.createFloat8(expr.getValue()));
default:
throw new RuntimeException("Unsupported type: " + expr.getValueType());
}
}
@Override
public EvalNode visitNullLiteral(Context ctx, Stack<Expr> stack, NullLiteral expr) throws TajoException {
return new ConstEval(NullDatum.get());
}
@Override
public EvalNode visitDateLiteral(Context context, Stack<Expr> stack, DateLiteral expr) throws TajoException {
DateValue dateValue = expr.getDate();
int[] dates = dateToIntArray(dateValue.getYears(), dateValue.getMonths(), dateValue.getDays());
TimeMeta tm = new TimeMeta();
tm.years = dates[0];
tm.monthOfYear = dates[1];
tm.dayOfMonth = dates[2];
DateTimeUtil.j2date(DateTimeUtil.date2j(dates[0], dates[1], dates[2]), tm);
return new ConstEval(new DateDatum(tm));
}
@Override
public EvalNode visitTimestampLiteral(Context ctx, Stack<Expr> stack, TimestampLiteral expr)
throws TajoException {
DateValue dateValue = expr.getDate();
TimeValue timeValue = expr.getTime();
int [] dates = dateToIntArray(dateValue.getYears(),
dateValue.getMonths(),
dateValue.getDays());
int [] times = timeToIntArray(timeValue.getHours(),
timeValue.getMinutes(),
timeValue.getSeconds(),
timeValue.getSecondsFraction());
long timestamp;
if (timeValue.hasSecondsFraction()) {
timestamp = DateTimeUtil.toJulianTimestamp(dates[0], dates[1], dates[2], times[0], times[1], times[2],
times[3] * 1000);
} else {
timestamp = DateTimeUtil.toJulianTimestamp(dates[0], dates[1], dates[2], times[0], times[1], times[2], 0);
}
TimeMeta tm = new TimeMeta();
DateTimeUtil.toJulianTimeMeta(timestamp, tm);
DateTimeUtil.toUTCTimezone(tm, ctx.timeZone);
return new ConstEval(new TimestampDatum(DateTimeUtil.toJulianTimestamp(tm)));
}
@Override
public EvalNode visitIntervalLiteral(Context ctx, Stack<Expr> stack, IntervalLiteral expr) throws TajoException {
return new ConstEval(new IntervalDatum(expr.getExprStr()));
}
@Override
public EvalNode visitTimeLiteral(Context ctx, Stack<Expr> stack, TimeLiteral expr) throws TajoException {
TimeValue timeValue = expr.getTime();
int [] times = timeToIntArray(timeValue.getHours(),
timeValue.getMinutes(),
timeValue.getSeconds(),
timeValue.getSecondsFraction());
long time;
if (timeValue.hasSecondsFraction()) {
time = DateTimeUtil.toTime(times[0], times[1], times[2], times[3] * 1000);
} else {
time = DateTimeUtil.toTime(times[0], times[1], times[2], 0);
}
TimeDatum timeDatum = new TimeDatum(time);
TimeMeta tm = timeDatum.asTimeMeta();
return new ConstEval(new TimeDatum(DateTimeUtil.toTime(tm)));
}
public static int [] dateToIntArray(String years, String months, String days)
throws TajoException {
int year = Integer.parseInt(years);
int month = Integer.parseInt(months);
int day = Integer.parseInt(days);
if (!(1 <= year && year <= 9999)) {
throw makeSyntaxError(String.format("Years (%d) must be between 1 and 9999 integer value", year));
}
if (!(1 <= month && month <= 12)) {
throw makeSyntaxError(String.format("Months (%d) must be between 1 and 12 integer value", month));
}
if (!(1<= day && day <= 31)) {
throw makeSyntaxError(String.format("Days (%d) must be between 1 and 31 integer value", day));
}
int [] results = new int[3];
results[0] = year;
results[1] = month;
results[2] = day;
return results;
}
public static int [] timeToIntArray(String hours, String minutes, String seconds, String fractionOfSecond)
throws TajoException {
int hour = Integer.parseInt(hours);
int minute = Integer.parseInt(minutes);
int second = Integer.parseInt(seconds);
int fraction = 0;
if (fractionOfSecond != null) {
fraction = Integer.parseInt(fractionOfSecond);
}
if (!(0 <= hour && hour <= 23)) {
throw makeSyntaxError(String.format("Hours (%d) must be between 0 and 24 integer value", hour));
}
if (!(0 <= minute && minute <= 59)) {
throw makeSyntaxError(String.format("Minutes (%d) must be between 0 and 59 integer value", minute));
}
if (!(0 <= second && second <= 59)) {
throw makeSyntaxError(String.format("Seconds (%d) must be between 0 and 59 integer value", second));
}
if (fraction != 0) {
if (!(0 <= fraction && fraction <= 999)) {
throw makeSyntaxError(String.format("Seconds (%d) must be between 0 and 999 integer value", fraction));
}
}
int [] results = new int[4];
results[0] = hour;
results[1] = minute;
results[2] = second;
results[3] = fraction;
return results;
}
}
|
googleapis/google-cloud-java | 36,900 | java-aiplatform/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/TrajectoryExactMatchInstance.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/aiplatform/v1beta1/evaluation_service.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.aiplatform.v1beta1;
/**
*
*
* <pre>
* Spec for TrajectoryExactMatch instance.
* </pre>
*
* Protobuf type {@code google.cloud.aiplatform.v1beta1.TrajectoryExactMatchInstance}
*/
public final class TrajectoryExactMatchInstance extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.aiplatform.v1beta1.TrajectoryExactMatchInstance)
TrajectoryExactMatchInstanceOrBuilder {
private static final long serialVersionUID = 0L;
// Use TrajectoryExactMatchInstance.newBuilder() to construct.
private TrajectoryExactMatchInstance(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private TrajectoryExactMatchInstance() {}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new TrajectoryExactMatchInstance();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.aiplatform.v1beta1.EvaluationServiceProto
.internal_static_google_cloud_aiplatform_v1beta1_TrajectoryExactMatchInstance_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.aiplatform.v1beta1.EvaluationServiceProto
.internal_static_google_cloud_aiplatform_v1beta1_TrajectoryExactMatchInstance_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.aiplatform.v1beta1.TrajectoryExactMatchInstance.class,
com.google.cloud.aiplatform.v1beta1.TrajectoryExactMatchInstance.Builder.class);
}
private int bitField0_;
public static final int PREDICTED_TRAJECTORY_FIELD_NUMBER = 1;
private com.google.cloud.aiplatform.v1beta1.Trajectory predictedTrajectory_;
/**
*
*
* <pre>
* Required. Spec for predicted tool call trajectory.
* </pre>
*
* <code>
* optional .google.cloud.aiplatform.v1beta1.Trajectory predicted_trajectory = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the predictedTrajectory field is set.
*/
@java.lang.Override
public boolean hasPredictedTrajectory() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* Required. Spec for predicted tool call trajectory.
* </pre>
*
* <code>
* optional .google.cloud.aiplatform.v1beta1.Trajectory predicted_trajectory = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The predictedTrajectory.
*/
@java.lang.Override
public com.google.cloud.aiplatform.v1beta1.Trajectory getPredictedTrajectory() {
return predictedTrajectory_ == null
? com.google.cloud.aiplatform.v1beta1.Trajectory.getDefaultInstance()
: predictedTrajectory_;
}
/**
*
*
* <pre>
* Required. Spec for predicted tool call trajectory.
* </pre>
*
* <code>
* optional .google.cloud.aiplatform.v1beta1.Trajectory predicted_trajectory = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
@java.lang.Override
public com.google.cloud.aiplatform.v1beta1.TrajectoryOrBuilder getPredictedTrajectoryOrBuilder() {
return predictedTrajectory_ == null
? com.google.cloud.aiplatform.v1beta1.Trajectory.getDefaultInstance()
: predictedTrajectory_;
}
public static final int REFERENCE_TRAJECTORY_FIELD_NUMBER = 2;
private com.google.cloud.aiplatform.v1beta1.Trajectory referenceTrajectory_;
/**
*
*
* <pre>
* Required. Spec for reference tool call trajectory.
* </pre>
*
* <code>
* optional .google.cloud.aiplatform.v1beta1.Trajectory reference_trajectory = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the referenceTrajectory field is set.
*/
@java.lang.Override
public boolean hasReferenceTrajectory() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
*
*
* <pre>
* Required. Spec for reference tool call trajectory.
* </pre>
*
* <code>
* optional .google.cloud.aiplatform.v1beta1.Trajectory reference_trajectory = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The referenceTrajectory.
*/
@java.lang.Override
public com.google.cloud.aiplatform.v1beta1.Trajectory getReferenceTrajectory() {
return referenceTrajectory_ == null
? com.google.cloud.aiplatform.v1beta1.Trajectory.getDefaultInstance()
: referenceTrajectory_;
}
/**
*
*
* <pre>
* Required. Spec for reference tool call trajectory.
* </pre>
*
* <code>
* optional .google.cloud.aiplatform.v1beta1.Trajectory reference_trajectory = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
@java.lang.Override
public com.google.cloud.aiplatform.v1beta1.TrajectoryOrBuilder getReferenceTrajectoryOrBuilder() {
return referenceTrajectory_ == null
? com.google.cloud.aiplatform.v1beta1.Trajectory.getDefaultInstance()
: referenceTrajectory_;
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (((bitField0_ & 0x00000001) != 0)) {
output.writeMessage(1, getPredictedTrajectory());
}
if (((bitField0_ & 0x00000002) != 0)) {
output.writeMessage(2, getReferenceTrajectory());
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) != 0)) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, getPredictedTrajectory());
}
if (((bitField0_ & 0x00000002) != 0)) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, getReferenceTrajectory());
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.aiplatform.v1beta1.TrajectoryExactMatchInstance)) {
return super.equals(obj);
}
com.google.cloud.aiplatform.v1beta1.TrajectoryExactMatchInstance other =
(com.google.cloud.aiplatform.v1beta1.TrajectoryExactMatchInstance) obj;
if (hasPredictedTrajectory() != other.hasPredictedTrajectory()) return false;
if (hasPredictedTrajectory()) {
if (!getPredictedTrajectory().equals(other.getPredictedTrajectory())) return false;
}
if (hasReferenceTrajectory() != other.hasReferenceTrajectory()) return false;
if (hasReferenceTrajectory()) {
if (!getReferenceTrajectory().equals(other.getReferenceTrajectory())) return false;
}
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (hasPredictedTrajectory()) {
hash = (37 * hash) + PREDICTED_TRAJECTORY_FIELD_NUMBER;
hash = (53 * hash) + getPredictedTrajectory().hashCode();
}
if (hasReferenceTrajectory()) {
hash = (37 * hash) + REFERENCE_TRAJECTORY_FIELD_NUMBER;
hash = (53 * hash) + getReferenceTrajectory().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.aiplatform.v1beta1.TrajectoryExactMatchInstance parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.aiplatform.v1beta1.TrajectoryExactMatchInstance parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.aiplatform.v1beta1.TrajectoryExactMatchInstance parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.aiplatform.v1beta1.TrajectoryExactMatchInstance parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.aiplatform.v1beta1.TrajectoryExactMatchInstance parseFrom(
byte[] data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.aiplatform.v1beta1.TrajectoryExactMatchInstance parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.aiplatform.v1beta1.TrajectoryExactMatchInstance parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.aiplatform.v1beta1.TrajectoryExactMatchInstance parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.aiplatform.v1beta1.TrajectoryExactMatchInstance parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.aiplatform.v1beta1.TrajectoryExactMatchInstance parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.aiplatform.v1beta1.TrajectoryExactMatchInstance parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.aiplatform.v1beta1.TrajectoryExactMatchInstance parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.aiplatform.v1beta1.TrajectoryExactMatchInstance prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Spec for TrajectoryExactMatch instance.
* </pre>
*
* Protobuf type {@code google.cloud.aiplatform.v1beta1.TrajectoryExactMatchInstance}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.aiplatform.v1beta1.TrajectoryExactMatchInstance)
com.google.cloud.aiplatform.v1beta1.TrajectoryExactMatchInstanceOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.aiplatform.v1beta1.EvaluationServiceProto
.internal_static_google_cloud_aiplatform_v1beta1_TrajectoryExactMatchInstance_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.aiplatform.v1beta1.EvaluationServiceProto
.internal_static_google_cloud_aiplatform_v1beta1_TrajectoryExactMatchInstance_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.aiplatform.v1beta1.TrajectoryExactMatchInstance.class,
com.google.cloud.aiplatform.v1beta1.TrajectoryExactMatchInstance.Builder.class);
}
// Construct using com.google.cloud.aiplatform.v1beta1.TrajectoryExactMatchInstance.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {
getPredictedTrajectoryFieldBuilder();
getReferenceTrajectoryFieldBuilder();
}
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
predictedTrajectory_ = null;
if (predictedTrajectoryBuilder_ != null) {
predictedTrajectoryBuilder_.dispose();
predictedTrajectoryBuilder_ = null;
}
referenceTrajectory_ = null;
if (referenceTrajectoryBuilder_ != null) {
referenceTrajectoryBuilder_.dispose();
referenceTrajectoryBuilder_ = null;
}
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.aiplatform.v1beta1.EvaluationServiceProto
.internal_static_google_cloud_aiplatform_v1beta1_TrajectoryExactMatchInstance_descriptor;
}
@java.lang.Override
public com.google.cloud.aiplatform.v1beta1.TrajectoryExactMatchInstance
getDefaultInstanceForType() {
return com.google.cloud.aiplatform.v1beta1.TrajectoryExactMatchInstance.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.aiplatform.v1beta1.TrajectoryExactMatchInstance build() {
com.google.cloud.aiplatform.v1beta1.TrajectoryExactMatchInstance result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.aiplatform.v1beta1.TrajectoryExactMatchInstance buildPartial() {
com.google.cloud.aiplatform.v1beta1.TrajectoryExactMatchInstance result =
new com.google.cloud.aiplatform.v1beta1.TrajectoryExactMatchInstance(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(
com.google.cloud.aiplatform.v1beta1.TrajectoryExactMatchInstance result) {
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.predictedTrajectory_ =
predictedTrajectoryBuilder_ == null
? predictedTrajectory_
: predictedTrajectoryBuilder_.build();
to_bitField0_ |= 0x00000001;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.referenceTrajectory_ =
referenceTrajectoryBuilder_ == null
? referenceTrajectory_
: referenceTrajectoryBuilder_.build();
to_bitField0_ |= 0x00000002;
}
result.bitField0_ |= to_bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.aiplatform.v1beta1.TrajectoryExactMatchInstance) {
return mergeFrom((com.google.cloud.aiplatform.v1beta1.TrajectoryExactMatchInstance) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(
com.google.cloud.aiplatform.v1beta1.TrajectoryExactMatchInstance other) {
if (other
== com.google.cloud.aiplatform.v1beta1.TrajectoryExactMatchInstance.getDefaultInstance())
return this;
if (other.hasPredictedTrajectory()) {
mergePredictedTrajectory(other.getPredictedTrajectory());
}
if (other.hasReferenceTrajectory()) {
mergeReferenceTrajectory(other.getReferenceTrajectory());
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
input.readMessage(
getPredictedTrajectoryFieldBuilder().getBuilder(), extensionRegistry);
bitField0_ |= 0x00000001;
break;
} // case 10
case 18:
{
input.readMessage(
getReferenceTrajectoryFieldBuilder().getBuilder(), extensionRegistry);
bitField0_ |= 0x00000002;
break;
} // case 18
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private com.google.cloud.aiplatform.v1beta1.Trajectory predictedTrajectory_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.aiplatform.v1beta1.Trajectory,
com.google.cloud.aiplatform.v1beta1.Trajectory.Builder,
com.google.cloud.aiplatform.v1beta1.TrajectoryOrBuilder>
predictedTrajectoryBuilder_;
/**
*
*
* <pre>
* Required. Spec for predicted tool call trajectory.
* </pre>
*
* <code>
* optional .google.cloud.aiplatform.v1beta1.Trajectory predicted_trajectory = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the predictedTrajectory field is set.
*/
public boolean hasPredictedTrajectory() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* Required. Spec for predicted tool call trajectory.
* </pre>
*
* <code>
* optional .google.cloud.aiplatform.v1beta1.Trajectory predicted_trajectory = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The predictedTrajectory.
*/
public com.google.cloud.aiplatform.v1beta1.Trajectory getPredictedTrajectory() {
if (predictedTrajectoryBuilder_ == null) {
return predictedTrajectory_ == null
? com.google.cloud.aiplatform.v1beta1.Trajectory.getDefaultInstance()
: predictedTrajectory_;
} else {
return predictedTrajectoryBuilder_.getMessage();
}
}
/**
*
*
* <pre>
* Required. Spec for predicted tool call trajectory.
* </pre>
*
* <code>
* optional .google.cloud.aiplatform.v1beta1.Trajectory predicted_trajectory = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setPredictedTrajectory(com.google.cloud.aiplatform.v1beta1.Trajectory value) {
if (predictedTrajectoryBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
predictedTrajectory_ = value;
} else {
predictedTrajectoryBuilder_.setMessage(value);
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. Spec for predicted tool call trajectory.
* </pre>
*
* <code>
* optional .google.cloud.aiplatform.v1beta1.Trajectory predicted_trajectory = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setPredictedTrajectory(
com.google.cloud.aiplatform.v1beta1.Trajectory.Builder builderForValue) {
if (predictedTrajectoryBuilder_ == null) {
predictedTrajectory_ = builderForValue.build();
} else {
predictedTrajectoryBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. Spec for predicted tool call trajectory.
* </pre>
*
* <code>
* optional .google.cloud.aiplatform.v1beta1.Trajectory predicted_trajectory = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder mergePredictedTrajectory(com.google.cloud.aiplatform.v1beta1.Trajectory value) {
if (predictedTrajectoryBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0)
&& predictedTrajectory_ != null
&& predictedTrajectory_
!= com.google.cloud.aiplatform.v1beta1.Trajectory.getDefaultInstance()) {
getPredictedTrajectoryBuilder().mergeFrom(value);
} else {
predictedTrajectory_ = value;
}
} else {
predictedTrajectoryBuilder_.mergeFrom(value);
}
if (predictedTrajectory_ != null) {
bitField0_ |= 0x00000001;
onChanged();
}
return this;
}
/**
*
*
* <pre>
* Required. Spec for predicted tool call trajectory.
* </pre>
*
* <code>
* optional .google.cloud.aiplatform.v1beta1.Trajectory predicted_trajectory = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder clearPredictedTrajectory() {
bitField0_ = (bitField0_ & ~0x00000001);
predictedTrajectory_ = null;
if (predictedTrajectoryBuilder_ != null) {
predictedTrajectoryBuilder_.dispose();
predictedTrajectoryBuilder_ = null;
}
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. Spec for predicted tool call trajectory.
* </pre>
*
* <code>
* optional .google.cloud.aiplatform.v1beta1.Trajectory predicted_trajectory = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.cloud.aiplatform.v1beta1.Trajectory.Builder getPredictedTrajectoryBuilder() {
bitField0_ |= 0x00000001;
onChanged();
return getPredictedTrajectoryFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* Required. Spec for predicted tool call trajectory.
* </pre>
*
* <code>
* optional .google.cloud.aiplatform.v1beta1.Trajectory predicted_trajectory = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.cloud.aiplatform.v1beta1.TrajectoryOrBuilder
getPredictedTrajectoryOrBuilder() {
if (predictedTrajectoryBuilder_ != null) {
return predictedTrajectoryBuilder_.getMessageOrBuilder();
} else {
return predictedTrajectory_ == null
? com.google.cloud.aiplatform.v1beta1.Trajectory.getDefaultInstance()
: predictedTrajectory_;
}
}
/**
*
*
* <pre>
* Required. Spec for predicted tool call trajectory.
* </pre>
*
* <code>
* optional .google.cloud.aiplatform.v1beta1.Trajectory predicted_trajectory = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.aiplatform.v1beta1.Trajectory,
com.google.cloud.aiplatform.v1beta1.Trajectory.Builder,
com.google.cloud.aiplatform.v1beta1.TrajectoryOrBuilder>
getPredictedTrajectoryFieldBuilder() {
if (predictedTrajectoryBuilder_ == null) {
predictedTrajectoryBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.aiplatform.v1beta1.Trajectory,
com.google.cloud.aiplatform.v1beta1.Trajectory.Builder,
com.google.cloud.aiplatform.v1beta1.TrajectoryOrBuilder>(
getPredictedTrajectory(), getParentForChildren(), isClean());
predictedTrajectory_ = null;
}
return predictedTrajectoryBuilder_;
}
private com.google.cloud.aiplatform.v1beta1.Trajectory referenceTrajectory_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.aiplatform.v1beta1.Trajectory,
com.google.cloud.aiplatform.v1beta1.Trajectory.Builder,
com.google.cloud.aiplatform.v1beta1.TrajectoryOrBuilder>
referenceTrajectoryBuilder_;
/**
*
*
* <pre>
* Required. Spec for reference tool call trajectory.
* </pre>
*
* <code>
* optional .google.cloud.aiplatform.v1beta1.Trajectory reference_trajectory = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the referenceTrajectory field is set.
*/
public boolean hasReferenceTrajectory() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
*
*
* <pre>
* Required. Spec for reference tool call trajectory.
* </pre>
*
* <code>
* optional .google.cloud.aiplatform.v1beta1.Trajectory reference_trajectory = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The referenceTrajectory.
*/
public com.google.cloud.aiplatform.v1beta1.Trajectory getReferenceTrajectory() {
if (referenceTrajectoryBuilder_ == null) {
return referenceTrajectory_ == null
? com.google.cloud.aiplatform.v1beta1.Trajectory.getDefaultInstance()
: referenceTrajectory_;
} else {
return referenceTrajectoryBuilder_.getMessage();
}
}
/**
*
*
* <pre>
* Required. Spec for reference tool call trajectory.
* </pre>
*
* <code>
* optional .google.cloud.aiplatform.v1beta1.Trajectory reference_trajectory = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setReferenceTrajectory(com.google.cloud.aiplatform.v1beta1.Trajectory value) {
if (referenceTrajectoryBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
referenceTrajectory_ = value;
} else {
referenceTrajectoryBuilder_.setMessage(value);
}
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. Spec for reference tool call trajectory.
* </pre>
*
* <code>
* optional .google.cloud.aiplatform.v1beta1.Trajectory reference_trajectory = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setReferenceTrajectory(
com.google.cloud.aiplatform.v1beta1.Trajectory.Builder builderForValue) {
if (referenceTrajectoryBuilder_ == null) {
referenceTrajectory_ = builderForValue.build();
} else {
referenceTrajectoryBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. Spec for reference tool call trajectory.
* </pre>
*
* <code>
* optional .google.cloud.aiplatform.v1beta1.Trajectory reference_trajectory = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder mergeReferenceTrajectory(com.google.cloud.aiplatform.v1beta1.Trajectory value) {
if (referenceTrajectoryBuilder_ == null) {
if (((bitField0_ & 0x00000002) != 0)
&& referenceTrajectory_ != null
&& referenceTrajectory_
!= com.google.cloud.aiplatform.v1beta1.Trajectory.getDefaultInstance()) {
getReferenceTrajectoryBuilder().mergeFrom(value);
} else {
referenceTrajectory_ = value;
}
} else {
referenceTrajectoryBuilder_.mergeFrom(value);
}
if (referenceTrajectory_ != null) {
bitField0_ |= 0x00000002;
onChanged();
}
return this;
}
/**
*
*
* <pre>
* Required. Spec for reference tool call trajectory.
* </pre>
*
* <code>
* optional .google.cloud.aiplatform.v1beta1.Trajectory reference_trajectory = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder clearReferenceTrajectory() {
bitField0_ = (bitField0_ & ~0x00000002);
referenceTrajectory_ = null;
if (referenceTrajectoryBuilder_ != null) {
referenceTrajectoryBuilder_.dispose();
referenceTrajectoryBuilder_ = null;
}
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. Spec for reference tool call trajectory.
* </pre>
*
* <code>
* optional .google.cloud.aiplatform.v1beta1.Trajectory reference_trajectory = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.cloud.aiplatform.v1beta1.Trajectory.Builder getReferenceTrajectoryBuilder() {
bitField0_ |= 0x00000002;
onChanged();
return getReferenceTrajectoryFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* Required. Spec for reference tool call trajectory.
* </pre>
*
* <code>
* optional .google.cloud.aiplatform.v1beta1.Trajectory reference_trajectory = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.cloud.aiplatform.v1beta1.TrajectoryOrBuilder
getReferenceTrajectoryOrBuilder() {
if (referenceTrajectoryBuilder_ != null) {
return referenceTrajectoryBuilder_.getMessageOrBuilder();
} else {
return referenceTrajectory_ == null
? com.google.cloud.aiplatform.v1beta1.Trajectory.getDefaultInstance()
: referenceTrajectory_;
}
}
/**
*
*
* <pre>
* Required. Spec for reference tool call trajectory.
* </pre>
*
* <code>
* optional .google.cloud.aiplatform.v1beta1.Trajectory reference_trajectory = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.aiplatform.v1beta1.Trajectory,
com.google.cloud.aiplatform.v1beta1.Trajectory.Builder,
com.google.cloud.aiplatform.v1beta1.TrajectoryOrBuilder>
getReferenceTrajectoryFieldBuilder() {
if (referenceTrajectoryBuilder_ == null) {
referenceTrajectoryBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.aiplatform.v1beta1.Trajectory,
com.google.cloud.aiplatform.v1beta1.Trajectory.Builder,
com.google.cloud.aiplatform.v1beta1.TrajectoryOrBuilder>(
getReferenceTrajectory(), getParentForChildren(), isClean());
referenceTrajectory_ = null;
}
return referenceTrajectoryBuilder_;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.aiplatform.v1beta1.TrajectoryExactMatchInstance)
}
// @@protoc_insertion_point(class_scope:google.cloud.aiplatform.v1beta1.TrajectoryExactMatchInstance)
private static final com.google.cloud.aiplatform.v1beta1.TrajectoryExactMatchInstance
DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.aiplatform.v1beta1.TrajectoryExactMatchInstance();
}
public static com.google.cloud.aiplatform.v1beta1.TrajectoryExactMatchInstance
getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<TrajectoryExactMatchInstance> PARSER =
new com.google.protobuf.AbstractParser<TrajectoryExactMatchInstance>() {
@java.lang.Override
public TrajectoryExactMatchInstance parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<TrajectoryExactMatchInstance> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<TrajectoryExactMatchInstance> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.aiplatform.v1beta1.TrajectoryExactMatchInstance
getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
openjdk/jdk8 | 37,006 | jdk/src/share/classes/javax/swing/text/InternationalFormatter.java | /*
* Copyright (c) 2000, 2013, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License version 2 only, as
* published by the Free Software Foundation. Oracle designates this
* particular file as subject to the "Classpath" exception as provided
* by Oracle in the LICENSE file that accompanied this code.
*
* This code is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* version 2 for more details (a copy is included in the LICENSE file that
* accompanied this code).
*
* You should have received a copy of the GNU General Public License version
* 2 along with this work; if not, write to the Free Software Foundation,
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
*
* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
* or visit www.oracle.com if you need additional information or have any
* questions.
*/
package javax.swing.text;
import java.awt.event.ActionEvent;
import java.io.*;
import java.text.*;
import java.text.AttributedCharacterIterator.Attribute;
import java.util.*;
import javax.swing.*;
/**
* <code>InternationalFormatter</code> extends <code>DefaultFormatter</code>,
* using an instance of <code>java.text.Format</code> to handle the
* conversion to a String, and the conversion from a String.
* <p>
* If <code>getAllowsInvalid()</code> is false, this will ask the
* <code>Format</code> to format the current text on every edit.
* <p>
* You can specify a minimum and maximum value by way of the
* <code>setMinimum</code> and <code>setMaximum</code> methods. In order
* for this to work the values returned from <code>stringToValue</code> must be
* comparable to the min/max values by way of the <code>Comparable</code>
* interface.
* <p>
* Be careful how you configure the <code>Format</code> and the
* <code>InternationalFormatter</code>, as it is possible to create a
* situation where certain values can not be input. Consider the date
* format 'M/d/yy', an <code>InternationalFormatter</code> that is always
* valid (<code>setAllowsInvalid(false)</code>), is in overwrite mode
* (<code>setOverwriteMode(true)</code>) and the date 7/1/99. In this
* case the user will not be able to enter a two digit month or day of
* month. To avoid this, the format should be 'MM/dd/yy'.
* <p>
* If <code>InternationalFormatter</code> is configured to only allow valid
* values (<code>setAllowsInvalid(false)</code>), every valid edit will result
* in the text of the <code>JFormattedTextField</code> being completely reset
* from the <code>Format</code>.
* The cursor position will also be adjusted as literal characters are
* added/removed from the resulting String.
* <p>
* <code>InternationalFormatter</code>'s behavior of
* <code>stringToValue</code> is slightly different than that of
* <code>DefaultTextFormatter</code>, it does the following:
* <ol>
* <li><code>parseObject</code> is invoked on the <code>Format</code>
* specified by <code>setFormat</code>
* <li>If a Class has been set for the values (<code>setValueClass</code>),
* supers implementation is invoked to convert the value returned
* from <code>parseObject</code> to the appropriate class.
* <li>If a <code>ParseException</code> has not been thrown, and the value
* is outside the min/max a <code>ParseException</code> is thrown.
* <li>The value is returned.
* </ol>
* <code>InternationalFormatter</code> implements <code>stringToValue</code>
* in this manner so that you can specify an alternate Class than
* <code>Format</code> may return.
* <p>
* <strong>Warning:</strong>
* Serialized objects of this class will not be compatible with
* future Swing releases. The current serialization support is
* appropriate for short term storage or RMI between applications running
* the same version of Swing. As of 1.4, support for long term storage
* of all JavaBeans™
* has been added to the <code>java.beans</code> package.
* Please see {@link java.beans.XMLEncoder}.
*
* @see java.text.Format
* @see java.lang.Comparable
*
* @since 1.4
*/
public class InternationalFormatter extends DefaultFormatter {
/**
* Used by <code>getFields</code>.
*/
private static final Format.Field[] EMPTY_FIELD_ARRAY =new Format.Field[0];
/**
* Object used to handle the conversion.
*/
private Format format;
/**
* Can be used to impose a maximum value.
*/
private Comparable max;
/**
* Can be used to impose a minimum value.
*/
private Comparable min;
/**
* <code>InternationalFormatter</code>'s behavior is dicatated by a
* <code>AttributedCharacterIterator</code> that is obtained from
* the <code>Format</code>. On every edit, assuming
* allows invalid is false, the <code>Format</code> instance is invoked
* with <code>formatToCharacterIterator</code>. A <code>BitSet</code> is
* also kept upto date with the non-literal characters, that is
* for every index in the <code>AttributedCharacterIterator</code> an
* entry in the bit set is updated based on the return value from
* <code>isLiteral(Map)</code>. <code>isLiteral(int)</code> then uses
* this cached information.
* <p>
* If allowsInvalid is false, every edit results in resetting the complete
* text of the JTextComponent.
* <p>
* InternationalFormatterFilter can also provide two actions suitable for
* incrementing and decrementing. To enable this a subclass must
* override <code>getSupportsIncrement</code> to return true, and
* override <code>adjustValue</code> to handle the changing of the
* value. If you want to support changing the value outside of
* the valid FieldPositions, you will need to override
* <code>canIncrement</code>.
*/
/**
* A bit is set for every index identified in the
* AttributedCharacterIterator that is not considered decoration.
* This should only be used if validMask is true.
*/
private transient BitSet literalMask;
/**
* Used to iterate over characters.
*/
private transient AttributedCharacterIterator iterator;
/**
* True if the Format was able to convert the value to a String and
* back.
*/
private transient boolean validMask;
/**
* Current value being displayed.
*/
private transient String string;
/**
* If true, DocumentFilter methods are unconditionally allowed,
* and no checking is done on their values. This is used when
* incrementing/decrementing via the actions.
*/
private transient boolean ignoreDocumentMutate;
/**
* Creates an <code>InternationalFormatter</code> with no
* <code>Format</code> specified.
*/
public InternationalFormatter() {
setOverwriteMode(false);
}
/**
* Creates an <code>InternationalFormatter</code> with the specified
* <code>Format</code> instance.
*
* @param format Format instance used for converting from/to Strings
*/
public InternationalFormatter(Format format) {
this();
setFormat(format);
}
/**
* Sets the format that dictates the legal values that can be edited
* and displayed.
*
* @param format <code>Format</code> instance used for converting
* from/to Strings
*/
public void setFormat(Format format) {
this.format = format;
}
/**
* Returns the format that dictates the legal values that can be edited
* and displayed.
*
* @return Format instance used for converting from/to Strings
*/
public Format getFormat() {
return format;
}
/**
* Sets the minimum permissible value. If the <code>valueClass</code> has
* not been specified, and <code>minimum</code> is non null, the
* <code>valueClass</code> will be set to that of the class of
* <code>minimum</code>.
*
* @param minimum Minimum legal value that can be input
* @see #setValueClass
*/
public void setMinimum(Comparable minimum) {
if (getValueClass() == null && minimum != null) {
setValueClass(minimum.getClass());
}
min = minimum;
}
/**
* Returns the minimum permissible value.
*
* @return Minimum legal value that can be input
*/
public Comparable getMinimum() {
return min;
}
/**
* Sets the maximum permissible value. If the <code>valueClass</code> has
* not been specified, and <code>max</code> is non null, the
* <code>valueClass</code> will be set to that of the class of
* <code>max</code>.
*
* @param max Maximum legal value that can be input
* @see #setValueClass
*/
public void setMaximum(Comparable max) {
if (getValueClass() == null && max != null) {
setValueClass(max.getClass());
}
this.max = max;
}
/**
* Returns the maximum permissible value.
*
* @return Maximum legal value that can be input
*/
public Comparable getMaximum() {
return max;
}
/**
* Installs the <code>DefaultFormatter</code> onto a particular
* <code>JFormattedTextField</code>.
* This will invoke <code>valueToString</code> to convert the
* current value from the <code>JFormattedTextField</code> to
* a String. This will then install the <code>Action</code>s from
* <code>getActions</code>, the <code>DocumentFilter</code>
* returned from <code>getDocumentFilter</code> and the
* <code>NavigationFilter</code> returned from
* <code>getNavigationFilter</code> onto the
* <code>JFormattedTextField</code>.
* <p>
* Subclasses will typically only need to override this if they
* wish to install additional listeners on the
* <code>JFormattedTextField</code>.
* <p>
* If there is a <code>ParseException</code> in converting the
* current value to a String, this will set the text to an empty
* String, and mark the <code>JFormattedTextField</code> as being
* in an invalid state.
* <p>
* While this is a public method, this is typically only useful
* for subclassers of <code>JFormattedTextField</code>.
* <code>JFormattedTextField</code> will invoke this method at
* the appropriate times when the value changes, or its internal
* state changes.
*
* @param ftf JFormattedTextField to format for, may be null indicating
* uninstall from current JFormattedTextField.
*/
public void install(JFormattedTextField ftf) {
super.install(ftf);
updateMaskIfNecessary();
// invoked again as the mask should now be valid.
positionCursorAtInitialLocation();
}
/**
* Returns a String representation of the Object <code>value</code>.
* This invokes <code>format</code> on the current <code>Format</code>.
*
* @throws ParseException if there is an error in the conversion
* @param value Value to convert
* @return String representation of value
*/
public String valueToString(Object value) throws ParseException {
if (value == null) {
return "";
}
Format f = getFormat();
if (f == null) {
return value.toString();
}
return f.format(value);
}
/**
* Returns the <code>Object</code> representation of the
* <code>String</code> <code>text</code>.
*
* @param text <code>String</code> to convert
* @return <code>Object</code> representation of text
* @throws ParseException if there is an error in the conversion
*/
public Object stringToValue(String text) throws ParseException {
Object value = stringToValue(text, getFormat());
// Convert to the value class if the Value returned from the
// Format does not match.
if (value != null && getValueClass() != null &&
!getValueClass().isInstance(value)) {
value = super.stringToValue(value.toString());
}
try {
if (!isValidValue(value, true)) {
throw new ParseException("Value not within min/max range", 0);
}
} catch (ClassCastException cce) {
throw new ParseException("Class cast exception comparing values: "
+ cce, 0);
}
return value;
}
/**
* Returns the <code>Format.Field</code> constants associated with
* the text at <code>offset</code>. If <code>offset</code> is not
* a valid location into the current text, this will return an
* empty array.
*
* @param offset offset into text to be examined
* @return Format.Field constants associated with the text at the
* given position.
*/
public Format.Field[] getFields(int offset) {
if (getAllowsInvalid()) {
// This will work if the currently edited value is valid.
updateMask();
}
Map<Attribute, Object> attrs = getAttributes(offset);
if (attrs != null && attrs.size() > 0) {
ArrayList<Attribute> al = new ArrayList<Attribute>();
al.addAll(attrs.keySet());
return al.toArray(EMPTY_FIELD_ARRAY);
}
return EMPTY_FIELD_ARRAY;
}
/**
* Creates a copy of the DefaultFormatter.
*
* @return copy of the DefaultFormatter
*/
public Object clone() throws CloneNotSupportedException {
InternationalFormatter formatter = (InternationalFormatter)super.
clone();
formatter.literalMask = null;
formatter.iterator = null;
formatter.validMask = false;
formatter.string = null;
return formatter;
}
/**
* If <code>getSupportsIncrement</code> returns true, this returns
* two Actions suitable for incrementing/decrementing the value.
*/
protected Action[] getActions() {
if (getSupportsIncrement()) {
return new Action[] { new IncrementAction("increment", 1),
new IncrementAction("decrement", -1) };
}
return null;
}
/**
* Invokes <code>parseObject</code> on <code>f</code>, returning
* its value.
*/
Object stringToValue(String text, Format f) throws ParseException {
if (f == null) {
return text;
}
return f.parseObject(text);
}
/**
* Returns true if <code>value</code> is between the min/max.
*
* @param wantsCCE If false, and a ClassCastException is thrown in
* comparing the values, the exception is consumed and
* false is returned.
*/
boolean isValidValue(Object value, boolean wantsCCE) {
Comparable min = getMinimum();
try {
if (min != null && min.compareTo(value) > 0) {
return false;
}
} catch (ClassCastException cce) {
if (wantsCCE) {
throw cce;
}
return false;
}
Comparable max = getMaximum();
try {
if (max != null && max.compareTo(value) < 0) {
return false;
}
} catch (ClassCastException cce) {
if (wantsCCE) {
throw cce;
}
return false;
}
return true;
}
/**
* Returns a Set of the attribute identifiers at <code>index</code>.
*/
Map<Attribute, Object> getAttributes(int index) {
if (isValidMask()) {
AttributedCharacterIterator iterator = getIterator();
if (index >= 0 && index <= iterator.getEndIndex()) {
iterator.setIndex(index);
return iterator.getAttributes();
}
}
return null;
}
/**
* Returns the start of the first run that contains the attribute
* <code>id</code>. This will return <code>-1</code> if the attribute
* can not be found.
*/
int getAttributeStart(AttributedCharacterIterator.Attribute id) {
if (isValidMask()) {
AttributedCharacterIterator iterator = getIterator();
iterator.first();
while (iterator.current() != CharacterIterator.DONE) {
if (iterator.getAttribute(id) != null) {
return iterator.getIndex();
}
iterator.next();
}
}
return -1;
}
/**
* Returns the <code>AttributedCharacterIterator</code> used to
* format the last value.
*/
AttributedCharacterIterator getIterator() {
return iterator;
}
/**
* Updates the AttributedCharacterIterator and bitset, if necessary.
*/
void updateMaskIfNecessary() {
if (!getAllowsInvalid() && (getFormat() != null)) {
if (!isValidMask()) {
updateMask();
}
else {
String newString = getFormattedTextField().getText();
if (!newString.equals(string)) {
updateMask();
}
}
}
}
/**
* Updates the AttributedCharacterIterator by invoking
* <code>formatToCharacterIterator</code> on the <code>Format</code>.
* If this is successful,
* <code>updateMask(AttributedCharacterIterator)</code>
* is then invoked to update the internal bitmask.
*/
void updateMask() {
if (getFormat() != null) {
Document doc = getFormattedTextField().getDocument();
validMask = false;
if (doc != null) {
try {
string = doc.getText(0, doc.getLength());
} catch (BadLocationException ble) {
string = null;
}
if (string != null) {
try {
Object value = stringToValue(string);
AttributedCharacterIterator iterator = getFormat().
formatToCharacterIterator(value);
updateMask(iterator);
}
catch (ParseException pe) {}
catch (IllegalArgumentException iae) {}
catch (NullPointerException npe) {}
}
}
}
}
/**
* Returns the number of literal characters before <code>index</code>.
*/
int getLiteralCountTo(int index) {
int lCount = 0;
for (int counter = 0; counter < index; counter++) {
if (isLiteral(counter)) {
lCount++;
}
}
return lCount;
}
/**
* Returns true if the character at index is a literal, that is
* not editable.
*/
boolean isLiteral(int index) {
if (isValidMask() && index < string.length()) {
return literalMask.get(index);
}
return false;
}
/**
* Returns the literal character at index.
*/
char getLiteral(int index) {
if (isValidMask() && string != null && index < string.length()) {
return string.charAt(index);
}
return (char)0;
}
/**
* Returns true if the character at offset is navigable too. This
* is implemented in terms of <code>isLiteral</code>, subclasses
* may wish to provide different behavior.
*/
boolean isNavigatable(int offset) {
return !isLiteral(offset);
}
/**
* Overriden to update the mask after invoking supers implementation.
*/
void updateValue(Object value) {
super.updateValue(value);
updateMaskIfNecessary();
}
/**
* Overriden to unconditionally allow the replace if
* ignoreDocumentMutate is true.
*/
void replace(DocumentFilter.FilterBypass fb, int offset,
int length, String text,
AttributeSet attrs) throws BadLocationException {
if (ignoreDocumentMutate) {
fb.replace(offset, length, text, attrs);
return;
}
super.replace(fb, offset, length, text, attrs);
}
/**
* Returns the index of the next non-literal character starting at
* index. If index is not a literal, it will be returned.
*
* @param direction Amount to increment looking for non-literal
*/
private int getNextNonliteralIndex(int index, int direction) {
int max = getFormattedTextField().getDocument().getLength();
while (index >= 0 && index < max) {
if (isLiteral(index)) {
index += direction;
}
else {
return index;
}
}
return (direction == -1) ? 0 : max;
}
/**
* Overriden in an attempt to honor the literals.
* <p>If we do not allow invalid values and are in overwrite mode, this
* {@code rh.length} is corrected as to preserve trailing literals.
* If not in overwrite mode, and there is text to insert it is
* inserted at the next non literal index going forward. If there
* is only text to remove, it is removed from the next non literal
* index going backward.
*/
boolean canReplace(ReplaceHolder rh) {
if (!getAllowsInvalid()) {
String text = rh.text;
int tl = (text != null) ? text.length() : 0;
JTextComponent c = getFormattedTextField();
if (tl == 0 && rh.length == 1 && c.getSelectionStart() != rh.offset) {
// Backspace, adjust to actually delete next non-literal.
rh.offset = getNextNonliteralIndex(rh.offset, -1);
} else if (getOverwriteMode()) {
int pos = rh.offset;
int textPos = pos;
boolean overflown = false;
for (int i = 0; i < rh.length; i++) {
while (isLiteral(pos)) pos++;
if (pos >= string.length()) {
pos = textPos;
overflown = true;
break;
}
textPos = ++pos;
}
if (overflown || c.getSelectedText() == null) {
rh.length = pos - rh.offset;
}
}
else if (tl > 0) {
// insert (or insert and remove)
rh.offset = getNextNonliteralIndex(rh.offset, 1);
}
else {
// remove only
rh.offset = getNextNonliteralIndex(rh.offset, -1);
}
((ExtendedReplaceHolder)rh).endOffset = rh.offset;
((ExtendedReplaceHolder)rh).endTextLength = (rh.text != null) ?
rh.text.length() : 0;
}
else {
((ExtendedReplaceHolder)rh).endOffset = rh.offset;
((ExtendedReplaceHolder)rh).endTextLength = (rh.text != null) ?
rh.text.length() : 0;
}
boolean can = super.canReplace(rh);
if (can && !getAllowsInvalid()) {
((ExtendedReplaceHolder)rh).resetFromValue(this);
}
return can;
}
/**
* When in !allowsInvalid mode the text is reset on every edit, thus
* supers implementation will position the cursor at the wrong position.
* As such, this invokes supers implementation and then invokes
* <code>repositionCursor</code> to correctly reset the cursor.
*/
boolean replace(ReplaceHolder rh) throws BadLocationException {
int start = -1;
int direction = 1;
int literalCount = -1;
if (rh.length > 0 && (rh.text == null || rh.text.length() == 0) &&
(getFormattedTextField().getSelectionStart() != rh.offset ||
rh.length > 1)) {
direction = -1;
}
if (!getAllowsInvalid()) {
if ((rh.text == null || rh.text.length() == 0) && rh.length > 0) {
// remove
start = getFormattedTextField().getSelectionStart();
}
else {
start = rh.offset;
}
literalCount = getLiteralCountTo(start);
}
if (super.replace(rh)) {
if (start != -1) {
int end = ((ExtendedReplaceHolder)rh).endOffset;
end += ((ExtendedReplaceHolder)rh).endTextLength;
repositionCursor(literalCount, end, direction);
}
else {
start = ((ExtendedReplaceHolder)rh).endOffset;
if (direction == 1) {
start += ((ExtendedReplaceHolder)rh).endTextLength;
}
repositionCursor(start, direction);
}
return true;
}
return false;
}
/**
* Repositions the cursor. <code>startLiteralCount</code> gives
* the number of literals to the start of the deleted range, end
* gives the ending location to adjust from, direction gives
* the direction relative to <code>end</code> to position the
* cursor from.
*/
private void repositionCursor(int startLiteralCount, int end,
int direction) {
int endLiteralCount = getLiteralCountTo(end);
if (endLiteralCount != end) {
end -= startLiteralCount;
for (int counter = 0; counter < end; counter++) {
if (isLiteral(counter)) {
end++;
}
}
}
repositionCursor(end, 1 /*direction*/);
}
/**
* Returns the character from the mask that has been buffered
* at <code>index</code>.
*/
char getBufferedChar(int index) {
if (isValidMask()) {
if (string != null && index < string.length()) {
return string.charAt(index);
}
}
return (char)0;
}
/**
* Returns true if the current mask is valid.
*/
boolean isValidMask() {
return validMask;
}
/**
* Returns true if <code>attributes</code> is null or empty.
*/
boolean isLiteral(Map attributes) {
return ((attributes == null) || attributes.size() == 0);
}
/**
* Updates the interal bitset from <code>iterator</code>. This will
* set <code>validMask</code> to true if <code>iterator</code> is
* non-null.
*/
private void updateMask(AttributedCharacterIterator iterator) {
if (iterator != null) {
validMask = true;
this.iterator = iterator;
// Update the literal mask
if (literalMask == null) {
literalMask = new BitSet();
}
else {
for (int counter = literalMask.length() - 1; counter >= 0;
counter--) {
literalMask.clear(counter);
}
}
iterator.first();
while (iterator.current() != CharacterIterator.DONE) {
Map attributes = iterator.getAttributes();
boolean set = isLiteral(attributes);
int start = iterator.getIndex();
int end = iterator.getRunLimit();
while (start < end) {
if (set) {
literalMask.set(start);
}
else {
literalMask.clear(start);
}
start++;
}
iterator.setIndex(start);
}
}
}
/**
* Returns true if <code>field</code> is non-null.
* Subclasses that wish to allow incrementing to happen outside of
* the known fields will need to override this.
*/
boolean canIncrement(Object field, int cursorPosition) {
return (field != null);
}
/**
* Selects the fields identified by <code>attributes</code>.
*/
void selectField(Object f, int count) {
AttributedCharacterIterator iterator = getIterator();
if (iterator != null &&
(f instanceof AttributedCharacterIterator.Attribute)) {
AttributedCharacterIterator.Attribute field =
(AttributedCharacterIterator.Attribute)f;
iterator.first();
while (iterator.current() != CharacterIterator.DONE) {
while (iterator.getAttribute(field) == null &&
iterator.next() != CharacterIterator.DONE);
if (iterator.current() != CharacterIterator.DONE) {
int limit = iterator.getRunLimit(field);
if (--count <= 0) {
getFormattedTextField().select(iterator.getIndex(),
limit);
break;
}
iterator.setIndex(limit);
iterator.next();
}
}
}
}
/**
* Returns the field that will be adjusted by adjustValue.
*/
Object getAdjustField(int start, Map attributes) {
return null;
}
/**
* Returns the number of occurrences of <code>f</code> before
* the location <code>start</code> in the current
* <code>AttributedCharacterIterator</code>.
*/
private int getFieldTypeCountTo(Object f, int start) {
AttributedCharacterIterator iterator = getIterator();
int count = 0;
if (iterator != null &&
(f instanceof AttributedCharacterIterator.Attribute)) {
AttributedCharacterIterator.Attribute field =
(AttributedCharacterIterator.Attribute)f;
iterator.first();
while (iterator.getIndex() < start) {
while (iterator.getAttribute(field) == null &&
iterator.next() != CharacterIterator.DONE);
if (iterator.current() != CharacterIterator.DONE) {
iterator.setIndex(iterator.getRunLimit(field));
iterator.next();
count++;
}
else {
break;
}
}
}
return count;
}
/**
* Subclasses supporting incrementing must override this to handle
* the actual incrementing. <code>value</code> is the current value,
* <code>attributes</code> gives the field the cursor is in (may be
* null depending upon <code>canIncrement</code>) and
* <code>direction</code> is the amount to increment by.
*/
Object adjustValue(Object value, Map attributes, Object field,
int direction) throws
BadLocationException, ParseException {
return null;
}
/**
* Returns false, indicating InternationalFormatter does not allow
* incrementing of the value. Subclasses that wish to support
* incrementing/decrementing the value should override this and
* return true. Subclasses should also override
* <code>adjustValue</code>.
*/
boolean getSupportsIncrement() {
return false;
}
/**
* Resets the value of the JFormattedTextField to be
* <code>value</code>.
*/
void resetValue(Object value) throws BadLocationException, ParseException {
Document doc = getFormattedTextField().getDocument();
String string = valueToString(value);
try {
ignoreDocumentMutate = true;
doc.remove(0, doc.getLength());
doc.insertString(0, string, null);
} finally {
ignoreDocumentMutate = false;
}
updateValue(value);
}
/**
* Subclassed to update the internal representation of the mask after
* the default read operation has completed.
*/
private void readObject(ObjectInputStream s)
throws IOException, ClassNotFoundException {
s.defaultReadObject();
updateMaskIfNecessary();
}
/**
* Overriden to return an instance of <code>ExtendedReplaceHolder</code>.
*/
ReplaceHolder getReplaceHolder(DocumentFilter.FilterBypass fb, int offset,
int length, String text,
AttributeSet attrs) {
if (replaceHolder == null) {
replaceHolder = new ExtendedReplaceHolder();
}
return super.getReplaceHolder(fb, offset, length, text, attrs);
}
/**
* As InternationalFormatter replaces the complete text on every edit,
* ExtendedReplaceHolder keeps track of the offset and length passed
* into canReplace.
*/
static class ExtendedReplaceHolder extends ReplaceHolder {
/** Offset of the insert/remove. This may differ from offset in
* that if !allowsInvalid the text is replaced on every edit. */
int endOffset;
/** Length of the text. This may differ from text.length in
* that if !allowsInvalid the text is replaced on every edit. */
int endTextLength;
/**
* Resets the region to delete to be the complete document and
* the text from invoking valueToString on the current value.
*/
void resetFromValue(InternationalFormatter formatter) {
// Need to reset the complete string as Format's result can
// be completely different.
offset = 0;
try {
text = formatter.valueToString(value);
} catch (ParseException pe) {
// Should never happen, otherwise canReplace would have
// returned value.
text = "";
}
length = fb.getDocument().getLength();
}
}
/**
* IncrementAction is used to increment the value by a certain amount.
* It calls into <code>adjustValue</code> to handle the actual
* incrementing of the value.
*/
private class IncrementAction extends AbstractAction {
private int direction;
IncrementAction(String name, int direction) {
super(name);
this.direction = direction;
}
public void actionPerformed(ActionEvent ae) {
if (getFormattedTextField().isEditable()) {
if (getAllowsInvalid()) {
// This will work if the currently edited value is valid.
updateMask();
}
boolean validEdit = false;
if (isValidMask()) {
int start = getFormattedTextField().getSelectionStart();
if (start != -1) {
AttributedCharacterIterator iterator = getIterator();
iterator.setIndex(start);
Map attributes = iterator.getAttributes();
Object field = getAdjustField(start, attributes);
if (canIncrement(field, start)) {
try {
Object value = stringToValue(
getFormattedTextField().getText());
int fieldTypeCount = getFieldTypeCountTo(
field, start);
value = adjustValue(value, attributes,
field, direction);
if (value != null && isValidValue(value, false)) {
resetValue(value);
updateMask();
if (isValidMask()) {
selectField(field, fieldTypeCount);
}
validEdit = true;
}
}
catch (ParseException pe) { }
catch (BadLocationException ble) { }
}
}
}
if (!validEdit) {
invalidEdit();
}
}
}
}
}
|
googleapis/google-cloud-java | 36,764 | java-dialogflow/proto-google-cloud-dialogflow-v2beta1/src/main/java/com/google/cloud/dialogflow/v2beta1/RestoreAgentRequest.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/dialogflow/v2beta1/agent.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.dialogflow.v2beta1;
/**
*
*
* <pre>
* The request message for
* [Agents.RestoreAgent][google.cloud.dialogflow.v2beta1.Agents.RestoreAgent].
* </pre>
*
* Protobuf type {@code google.cloud.dialogflow.v2beta1.RestoreAgentRequest}
*/
public final class RestoreAgentRequest extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.dialogflow.v2beta1.RestoreAgentRequest)
RestoreAgentRequestOrBuilder {
private static final long serialVersionUID = 0L;
// Use RestoreAgentRequest.newBuilder() to construct.
private RestoreAgentRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private RestoreAgentRequest() {
parent_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new RestoreAgentRequest();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.dialogflow.v2beta1.AgentProto
.internal_static_google_cloud_dialogflow_v2beta1_RestoreAgentRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.dialogflow.v2beta1.AgentProto
.internal_static_google_cloud_dialogflow_v2beta1_RestoreAgentRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.dialogflow.v2beta1.RestoreAgentRequest.class,
com.google.cloud.dialogflow.v2beta1.RestoreAgentRequest.Builder.class);
}
private int agentCase_ = 0;
@SuppressWarnings("serial")
private java.lang.Object agent_;
public enum AgentCase
implements
com.google.protobuf.Internal.EnumLite,
com.google.protobuf.AbstractMessage.InternalOneOfEnum {
AGENT_URI(2),
AGENT_CONTENT(3),
AGENT_NOT_SET(0);
private final int value;
private AgentCase(int value) {
this.value = value;
}
/**
* @param value The number of the enum to look for.
* @return The enum associated with the given number.
* @deprecated Use {@link #forNumber(int)} instead.
*/
@java.lang.Deprecated
public static AgentCase valueOf(int value) {
return forNumber(value);
}
public static AgentCase forNumber(int value) {
switch (value) {
case 2:
return AGENT_URI;
case 3:
return AGENT_CONTENT;
case 0:
return AGENT_NOT_SET;
default:
return null;
}
}
public int getNumber() {
return this.value;
}
};
public AgentCase getAgentCase() {
return AgentCase.forNumber(agentCase_);
}
public static final int PARENT_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private volatile java.lang.Object parent_ = "";
/**
*
*
* <pre>
* Required. The project that the agent to restore is associated with.
* Format: `projects/<Project ID>` or
* `projects/<Project ID>/locations/<Location ID>`.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The parent.
*/
@java.lang.Override
public java.lang.String getParent() {
java.lang.Object ref = parent_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
parent_ = s;
return s;
}
}
/**
*
*
* <pre>
* Required. The project that the agent to restore is associated with.
* Format: `projects/<Project ID>` or
* `projects/<Project ID>/locations/<Location ID>`.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for parent.
*/
@java.lang.Override
public com.google.protobuf.ByteString getParentBytes() {
java.lang.Object ref = parent_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
parent_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int AGENT_URI_FIELD_NUMBER = 2;
/**
*
*
* <pre>
* The URI to a Google Cloud Storage file containing the agent to restore.
* Note: The URI must start with "gs://".
*
* Dialogflow performs a read operation for the Cloud Storage object
* on the caller's behalf, so your request authentication must
* have read permissions for the object. For more information, see
* [Dialogflow access
* control](https://cloud.google.com/dialogflow/cx/docs/concept/access-control#storage).
* </pre>
*
* <code>string agent_uri = 2;</code>
*
* @return Whether the agentUri field is set.
*/
public boolean hasAgentUri() {
return agentCase_ == 2;
}
/**
*
*
* <pre>
* The URI to a Google Cloud Storage file containing the agent to restore.
* Note: The URI must start with "gs://".
*
* Dialogflow performs a read operation for the Cloud Storage object
* on the caller's behalf, so your request authentication must
* have read permissions for the object. For more information, see
* [Dialogflow access
* control](https://cloud.google.com/dialogflow/cx/docs/concept/access-control#storage).
* </pre>
*
* <code>string agent_uri = 2;</code>
*
* @return The agentUri.
*/
public java.lang.String getAgentUri() {
java.lang.Object ref = "";
if (agentCase_ == 2) {
ref = agent_;
}
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
if (agentCase_ == 2) {
agent_ = s;
}
return s;
}
}
/**
*
*
* <pre>
* The URI to a Google Cloud Storage file containing the agent to restore.
* Note: The URI must start with "gs://".
*
* Dialogflow performs a read operation for the Cloud Storage object
* on the caller's behalf, so your request authentication must
* have read permissions for the object. For more information, see
* [Dialogflow access
* control](https://cloud.google.com/dialogflow/cx/docs/concept/access-control#storage).
* </pre>
*
* <code>string agent_uri = 2;</code>
*
* @return The bytes for agentUri.
*/
public com.google.protobuf.ByteString getAgentUriBytes() {
java.lang.Object ref = "";
if (agentCase_ == 2) {
ref = agent_;
}
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
if (agentCase_ == 2) {
agent_ = b;
}
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int AGENT_CONTENT_FIELD_NUMBER = 3;
/**
*
*
* <pre>
* Zip compressed raw byte content for agent.
* </pre>
*
* <code>bytes agent_content = 3;</code>
*
* @return Whether the agentContent field is set.
*/
@java.lang.Override
public boolean hasAgentContent() {
return agentCase_ == 3;
}
/**
*
*
* <pre>
* Zip compressed raw byte content for agent.
* </pre>
*
* <code>bytes agent_content = 3;</code>
*
* @return The agentContent.
*/
@java.lang.Override
public com.google.protobuf.ByteString getAgentContent() {
if (agentCase_ == 3) {
return (com.google.protobuf.ByteString) agent_;
}
return com.google.protobuf.ByteString.EMPTY;
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 1, parent_);
}
if (agentCase_ == 2) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 2, agent_);
}
if (agentCase_ == 3) {
output.writeBytes(3, (com.google.protobuf.ByteString) agent_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, parent_);
}
if (agentCase_ == 2) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, agent_);
}
if (agentCase_ == 3) {
size +=
com.google.protobuf.CodedOutputStream.computeBytesSize(
3, (com.google.protobuf.ByteString) agent_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.dialogflow.v2beta1.RestoreAgentRequest)) {
return super.equals(obj);
}
com.google.cloud.dialogflow.v2beta1.RestoreAgentRequest other =
(com.google.cloud.dialogflow.v2beta1.RestoreAgentRequest) obj;
if (!getParent().equals(other.getParent())) return false;
if (!getAgentCase().equals(other.getAgentCase())) return false;
switch (agentCase_) {
case 2:
if (!getAgentUri().equals(other.getAgentUri())) return false;
break;
case 3:
if (!getAgentContent().equals(other.getAgentContent())) return false;
break;
case 0:
default:
}
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + PARENT_FIELD_NUMBER;
hash = (53 * hash) + getParent().hashCode();
switch (agentCase_) {
case 2:
hash = (37 * hash) + AGENT_URI_FIELD_NUMBER;
hash = (53 * hash) + getAgentUri().hashCode();
break;
case 3:
hash = (37 * hash) + AGENT_CONTENT_FIELD_NUMBER;
hash = (53 * hash) + getAgentContent().hashCode();
break;
case 0:
default:
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.dialogflow.v2beta1.RestoreAgentRequest parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.dialogflow.v2beta1.RestoreAgentRequest parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.dialogflow.v2beta1.RestoreAgentRequest parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.dialogflow.v2beta1.RestoreAgentRequest parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.dialogflow.v2beta1.RestoreAgentRequest parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.dialogflow.v2beta1.RestoreAgentRequest parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.dialogflow.v2beta1.RestoreAgentRequest parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.dialogflow.v2beta1.RestoreAgentRequest parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.dialogflow.v2beta1.RestoreAgentRequest parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.dialogflow.v2beta1.RestoreAgentRequest parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.dialogflow.v2beta1.RestoreAgentRequest parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.dialogflow.v2beta1.RestoreAgentRequest parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.dialogflow.v2beta1.RestoreAgentRequest prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* The request message for
* [Agents.RestoreAgent][google.cloud.dialogflow.v2beta1.Agents.RestoreAgent].
* </pre>
*
* Protobuf type {@code google.cloud.dialogflow.v2beta1.RestoreAgentRequest}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.dialogflow.v2beta1.RestoreAgentRequest)
com.google.cloud.dialogflow.v2beta1.RestoreAgentRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.dialogflow.v2beta1.AgentProto
.internal_static_google_cloud_dialogflow_v2beta1_RestoreAgentRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.dialogflow.v2beta1.AgentProto
.internal_static_google_cloud_dialogflow_v2beta1_RestoreAgentRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.dialogflow.v2beta1.RestoreAgentRequest.class,
com.google.cloud.dialogflow.v2beta1.RestoreAgentRequest.Builder.class);
}
// Construct using com.google.cloud.dialogflow.v2beta1.RestoreAgentRequest.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
parent_ = "";
agentCase_ = 0;
agent_ = null;
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.dialogflow.v2beta1.AgentProto
.internal_static_google_cloud_dialogflow_v2beta1_RestoreAgentRequest_descriptor;
}
@java.lang.Override
public com.google.cloud.dialogflow.v2beta1.RestoreAgentRequest getDefaultInstanceForType() {
return com.google.cloud.dialogflow.v2beta1.RestoreAgentRequest.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.dialogflow.v2beta1.RestoreAgentRequest build() {
com.google.cloud.dialogflow.v2beta1.RestoreAgentRequest result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.dialogflow.v2beta1.RestoreAgentRequest buildPartial() {
com.google.cloud.dialogflow.v2beta1.RestoreAgentRequest result =
new com.google.cloud.dialogflow.v2beta1.RestoreAgentRequest(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
buildPartialOneofs(result);
onBuilt();
return result;
}
private void buildPartial0(com.google.cloud.dialogflow.v2beta1.RestoreAgentRequest result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.parent_ = parent_;
}
}
private void buildPartialOneofs(
com.google.cloud.dialogflow.v2beta1.RestoreAgentRequest result) {
result.agentCase_ = agentCase_;
result.agent_ = this.agent_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.dialogflow.v2beta1.RestoreAgentRequest) {
return mergeFrom((com.google.cloud.dialogflow.v2beta1.RestoreAgentRequest) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.dialogflow.v2beta1.RestoreAgentRequest other) {
if (other == com.google.cloud.dialogflow.v2beta1.RestoreAgentRequest.getDefaultInstance())
return this;
if (!other.getParent().isEmpty()) {
parent_ = other.parent_;
bitField0_ |= 0x00000001;
onChanged();
}
switch (other.getAgentCase()) {
case AGENT_URI:
{
agentCase_ = 2;
agent_ = other.agent_;
onChanged();
break;
}
case AGENT_CONTENT:
{
setAgentContent(other.getAgentContent());
break;
}
case AGENT_NOT_SET:
{
break;
}
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
parent_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000001;
break;
} // case 10
case 18:
{
java.lang.String s = input.readStringRequireUtf8();
agentCase_ = 2;
agent_ = s;
break;
} // case 18
case 26:
{
agent_ = input.readBytes();
agentCase_ = 3;
break;
} // case 26
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int agentCase_ = 0;
private java.lang.Object agent_;
public AgentCase getAgentCase() {
return AgentCase.forNumber(agentCase_);
}
public Builder clearAgent() {
agentCase_ = 0;
agent_ = null;
onChanged();
return this;
}
private int bitField0_;
private java.lang.Object parent_ = "";
/**
*
*
* <pre>
* Required. The project that the agent to restore is associated with.
* Format: `projects/<Project ID>` or
* `projects/<Project ID>/locations/<Location ID>`.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The parent.
*/
public java.lang.String getParent() {
java.lang.Object ref = parent_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
parent_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Required. The project that the agent to restore is associated with.
* Format: `projects/<Project ID>` or
* `projects/<Project ID>/locations/<Location ID>`.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for parent.
*/
public com.google.protobuf.ByteString getParentBytes() {
java.lang.Object ref = parent_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
parent_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Required. The project that the agent to restore is associated with.
* Format: `projects/<Project ID>` or
* `projects/<Project ID>/locations/<Location ID>`.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The parent to set.
* @return This builder for chaining.
*/
public Builder setParent(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
parent_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The project that the agent to restore is associated with.
* Format: `projects/<Project ID>` or
* `projects/<Project ID>/locations/<Location ID>`.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return This builder for chaining.
*/
public Builder clearParent() {
parent_ = getDefaultInstance().getParent();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The project that the agent to restore is associated with.
* Format: `projects/<Project ID>` or
* `projects/<Project ID>/locations/<Location ID>`.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The bytes for parent to set.
* @return This builder for chaining.
*/
public Builder setParentBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
parent_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* The URI to a Google Cloud Storage file containing the agent to restore.
* Note: The URI must start with "gs://".
*
* Dialogflow performs a read operation for the Cloud Storage object
* on the caller's behalf, so your request authentication must
* have read permissions for the object. For more information, see
* [Dialogflow access
* control](https://cloud.google.com/dialogflow/cx/docs/concept/access-control#storage).
* </pre>
*
* <code>string agent_uri = 2;</code>
*
* @return Whether the agentUri field is set.
*/
@java.lang.Override
public boolean hasAgentUri() {
return agentCase_ == 2;
}
/**
*
*
* <pre>
* The URI to a Google Cloud Storage file containing the agent to restore.
* Note: The URI must start with "gs://".
*
* Dialogflow performs a read operation for the Cloud Storage object
* on the caller's behalf, so your request authentication must
* have read permissions for the object. For more information, see
* [Dialogflow access
* control](https://cloud.google.com/dialogflow/cx/docs/concept/access-control#storage).
* </pre>
*
* <code>string agent_uri = 2;</code>
*
* @return The agentUri.
*/
@java.lang.Override
public java.lang.String getAgentUri() {
java.lang.Object ref = "";
if (agentCase_ == 2) {
ref = agent_;
}
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
if (agentCase_ == 2) {
agent_ = s;
}
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* The URI to a Google Cloud Storage file containing the agent to restore.
* Note: The URI must start with "gs://".
*
* Dialogflow performs a read operation for the Cloud Storage object
* on the caller's behalf, so your request authentication must
* have read permissions for the object. For more information, see
* [Dialogflow access
* control](https://cloud.google.com/dialogflow/cx/docs/concept/access-control#storage).
* </pre>
*
* <code>string agent_uri = 2;</code>
*
* @return The bytes for agentUri.
*/
@java.lang.Override
public com.google.protobuf.ByteString getAgentUriBytes() {
java.lang.Object ref = "";
if (agentCase_ == 2) {
ref = agent_;
}
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
if (agentCase_ == 2) {
agent_ = b;
}
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* The URI to a Google Cloud Storage file containing the agent to restore.
* Note: The URI must start with "gs://".
*
* Dialogflow performs a read operation for the Cloud Storage object
* on the caller's behalf, so your request authentication must
* have read permissions for the object. For more information, see
* [Dialogflow access
* control](https://cloud.google.com/dialogflow/cx/docs/concept/access-control#storage).
* </pre>
*
* <code>string agent_uri = 2;</code>
*
* @param value The agentUri to set.
* @return This builder for chaining.
*/
public Builder setAgentUri(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
agentCase_ = 2;
agent_ = value;
onChanged();
return this;
}
/**
*
*
* <pre>
* The URI to a Google Cloud Storage file containing the agent to restore.
* Note: The URI must start with "gs://".
*
* Dialogflow performs a read operation for the Cloud Storage object
* on the caller's behalf, so your request authentication must
* have read permissions for the object. For more information, see
* [Dialogflow access
* control](https://cloud.google.com/dialogflow/cx/docs/concept/access-control#storage).
* </pre>
*
* <code>string agent_uri = 2;</code>
*
* @return This builder for chaining.
*/
public Builder clearAgentUri() {
if (agentCase_ == 2) {
agentCase_ = 0;
agent_ = null;
onChanged();
}
return this;
}
/**
*
*
* <pre>
* The URI to a Google Cloud Storage file containing the agent to restore.
* Note: The URI must start with "gs://".
*
* Dialogflow performs a read operation for the Cloud Storage object
* on the caller's behalf, so your request authentication must
* have read permissions for the object. For more information, see
* [Dialogflow access
* control](https://cloud.google.com/dialogflow/cx/docs/concept/access-control#storage).
* </pre>
*
* <code>string agent_uri = 2;</code>
*
* @param value The bytes for agentUri to set.
* @return This builder for chaining.
*/
public Builder setAgentUriBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
agentCase_ = 2;
agent_ = value;
onChanged();
return this;
}
/**
*
*
* <pre>
* Zip compressed raw byte content for agent.
* </pre>
*
* <code>bytes agent_content = 3;</code>
*
* @return Whether the agentContent field is set.
*/
public boolean hasAgentContent() {
return agentCase_ == 3;
}
/**
*
*
* <pre>
* Zip compressed raw byte content for agent.
* </pre>
*
* <code>bytes agent_content = 3;</code>
*
* @return The agentContent.
*/
public com.google.protobuf.ByteString getAgentContent() {
if (agentCase_ == 3) {
return (com.google.protobuf.ByteString) agent_;
}
return com.google.protobuf.ByteString.EMPTY;
}
/**
*
*
* <pre>
* Zip compressed raw byte content for agent.
* </pre>
*
* <code>bytes agent_content = 3;</code>
*
* @param value The agentContent to set.
* @return This builder for chaining.
*/
public Builder setAgentContent(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
agentCase_ = 3;
agent_ = value;
onChanged();
return this;
}
/**
*
*
* <pre>
* Zip compressed raw byte content for agent.
* </pre>
*
* <code>bytes agent_content = 3;</code>
*
* @return This builder for chaining.
*/
public Builder clearAgentContent() {
if (agentCase_ == 3) {
agentCase_ = 0;
agent_ = null;
onChanged();
}
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.dialogflow.v2beta1.RestoreAgentRequest)
}
// @@protoc_insertion_point(class_scope:google.cloud.dialogflow.v2beta1.RestoreAgentRequest)
private static final com.google.cloud.dialogflow.v2beta1.RestoreAgentRequest DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.dialogflow.v2beta1.RestoreAgentRequest();
}
public static com.google.cloud.dialogflow.v2beta1.RestoreAgentRequest getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<RestoreAgentRequest> PARSER =
new com.google.protobuf.AbstractParser<RestoreAgentRequest>() {
@java.lang.Override
public RestoreAgentRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<RestoreAgentRequest> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<RestoreAgentRequest> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.dialogflow.v2beta1.RestoreAgentRequest getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleapis/google-cloud-java | 36,843 | java-retail/proto-google-cloud-retail-v2/src/main/java/com/google/cloud/retail/v2/ListServingConfigsResponse.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/retail/v2/serving_config_service.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.retail.v2;
/**
*
*
* <pre>
* Response for ListServingConfigs method.
* </pre>
*
* Protobuf type {@code google.cloud.retail.v2.ListServingConfigsResponse}
*/
public final class ListServingConfigsResponse extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.retail.v2.ListServingConfigsResponse)
ListServingConfigsResponseOrBuilder {
private static final long serialVersionUID = 0L;
// Use ListServingConfigsResponse.newBuilder() to construct.
private ListServingConfigsResponse(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private ListServingConfigsResponse() {
servingConfigs_ = java.util.Collections.emptyList();
nextPageToken_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new ListServingConfigsResponse();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.retail.v2.ServingConfigServiceProto
.internal_static_google_cloud_retail_v2_ListServingConfigsResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.retail.v2.ServingConfigServiceProto
.internal_static_google_cloud_retail_v2_ListServingConfigsResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.retail.v2.ListServingConfigsResponse.class,
com.google.cloud.retail.v2.ListServingConfigsResponse.Builder.class);
}
public static final int SERVING_CONFIGS_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private java.util.List<com.google.cloud.retail.v2.ServingConfig> servingConfigs_;
/**
*
*
* <pre>
* All the ServingConfigs for a given catalog.
* </pre>
*
* <code>repeated .google.cloud.retail.v2.ServingConfig serving_configs = 1;</code>
*/
@java.lang.Override
public java.util.List<com.google.cloud.retail.v2.ServingConfig> getServingConfigsList() {
return servingConfigs_;
}
/**
*
*
* <pre>
* All the ServingConfigs for a given catalog.
* </pre>
*
* <code>repeated .google.cloud.retail.v2.ServingConfig serving_configs = 1;</code>
*/
@java.lang.Override
public java.util.List<? extends com.google.cloud.retail.v2.ServingConfigOrBuilder>
getServingConfigsOrBuilderList() {
return servingConfigs_;
}
/**
*
*
* <pre>
* All the ServingConfigs for a given catalog.
* </pre>
*
* <code>repeated .google.cloud.retail.v2.ServingConfig serving_configs = 1;</code>
*/
@java.lang.Override
public int getServingConfigsCount() {
return servingConfigs_.size();
}
/**
*
*
* <pre>
* All the ServingConfigs for a given catalog.
* </pre>
*
* <code>repeated .google.cloud.retail.v2.ServingConfig serving_configs = 1;</code>
*/
@java.lang.Override
public com.google.cloud.retail.v2.ServingConfig getServingConfigs(int index) {
return servingConfigs_.get(index);
}
/**
*
*
* <pre>
* All the ServingConfigs for a given catalog.
* </pre>
*
* <code>repeated .google.cloud.retail.v2.ServingConfig serving_configs = 1;</code>
*/
@java.lang.Override
public com.google.cloud.retail.v2.ServingConfigOrBuilder getServingConfigsOrBuilder(int index) {
return servingConfigs_.get(index);
}
public static final int NEXT_PAGE_TOKEN_FIELD_NUMBER = 2;
@SuppressWarnings("serial")
private volatile java.lang.Object nextPageToken_ = "";
/**
*
*
* <pre>
* Pagination token, if not returned indicates the last page.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The nextPageToken.
*/
@java.lang.Override
public java.lang.String getNextPageToken() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
nextPageToken_ = s;
return s;
}
}
/**
*
*
* <pre>
* Pagination token, if not returned indicates the last page.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The bytes for nextPageToken.
*/
@java.lang.Override
public com.google.protobuf.ByteString getNextPageTokenBytes() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
nextPageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
for (int i = 0; i < servingConfigs_.size(); i++) {
output.writeMessage(1, servingConfigs_.get(i));
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 2, nextPageToken_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
for (int i = 0; i < servingConfigs_.size(); i++) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, servingConfigs_.get(i));
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, nextPageToken_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.retail.v2.ListServingConfigsResponse)) {
return super.equals(obj);
}
com.google.cloud.retail.v2.ListServingConfigsResponse other =
(com.google.cloud.retail.v2.ListServingConfigsResponse) obj;
if (!getServingConfigsList().equals(other.getServingConfigsList())) return false;
if (!getNextPageToken().equals(other.getNextPageToken())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (getServingConfigsCount() > 0) {
hash = (37 * hash) + SERVING_CONFIGS_FIELD_NUMBER;
hash = (53 * hash) + getServingConfigsList().hashCode();
}
hash = (37 * hash) + NEXT_PAGE_TOKEN_FIELD_NUMBER;
hash = (53 * hash) + getNextPageToken().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.retail.v2.ListServingConfigsResponse parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.retail.v2.ListServingConfigsResponse parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.retail.v2.ListServingConfigsResponse parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.retail.v2.ListServingConfigsResponse parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.retail.v2.ListServingConfigsResponse parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.retail.v2.ListServingConfigsResponse parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.retail.v2.ListServingConfigsResponse parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.retail.v2.ListServingConfigsResponse parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.retail.v2.ListServingConfigsResponse parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.retail.v2.ListServingConfigsResponse parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.retail.v2.ListServingConfigsResponse parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.retail.v2.ListServingConfigsResponse parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.retail.v2.ListServingConfigsResponse prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Response for ListServingConfigs method.
* </pre>
*
* Protobuf type {@code google.cloud.retail.v2.ListServingConfigsResponse}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.retail.v2.ListServingConfigsResponse)
com.google.cloud.retail.v2.ListServingConfigsResponseOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.retail.v2.ServingConfigServiceProto
.internal_static_google_cloud_retail_v2_ListServingConfigsResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.retail.v2.ServingConfigServiceProto
.internal_static_google_cloud_retail_v2_ListServingConfigsResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.retail.v2.ListServingConfigsResponse.class,
com.google.cloud.retail.v2.ListServingConfigsResponse.Builder.class);
}
// Construct using com.google.cloud.retail.v2.ListServingConfigsResponse.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
if (servingConfigsBuilder_ == null) {
servingConfigs_ = java.util.Collections.emptyList();
} else {
servingConfigs_ = null;
servingConfigsBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000001);
nextPageToken_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.retail.v2.ServingConfigServiceProto
.internal_static_google_cloud_retail_v2_ListServingConfigsResponse_descriptor;
}
@java.lang.Override
public com.google.cloud.retail.v2.ListServingConfigsResponse getDefaultInstanceForType() {
return com.google.cloud.retail.v2.ListServingConfigsResponse.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.retail.v2.ListServingConfigsResponse build() {
com.google.cloud.retail.v2.ListServingConfigsResponse result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.retail.v2.ListServingConfigsResponse buildPartial() {
com.google.cloud.retail.v2.ListServingConfigsResponse result =
new com.google.cloud.retail.v2.ListServingConfigsResponse(this);
buildPartialRepeatedFields(result);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartialRepeatedFields(
com.google.cloud.retail.v2.ListServingConfigsResponse result) {
if (servingConfigsBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0)) {
servingConfigs_ = java.util.Collections.unmodifiableList(servingConfigs_);
bitField0_ = (bitField0_ & ~0x00000001);
}
result.servingConfigs_ = servingConfigs_;
} else {
result.servingConfigs_ = servingConfigsBuilder_.build();
}
}
private void buildPartial0(com.google.cloud.retail.v2.ListServingConfigsResponse result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000002) != 0)) {
result.nextPageToken_ = nextPageToken_;
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.retail.v2.ListServingConfigsResponse) {
return mergeFrom((com.google.cloud.retail.v2.ListServingConfigsResponse) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.retail.v2.ListServingConfigsResponse other) {
if (other == com.google.cloud.retail.v2.ListServingConfigsResponse.getDefaultInstance())
return this;
if (servingConfigsBuilder_ == null) {
if (!other.servingConfigs_.isEmpty()) {
if (servingConfigs_.isEmpty()) {
servingConfigs_ = other.servingConfigs_;
bitField0_ = (bitField0_ & ~0x00000001);
} else {
ensureServingConfigsIsMutable();
servingConfigs_.addAll(other.servingConfigs_);
}
onChanged();
}
} else {
if (!other.servingConfigs_.isEmpty()) {
if (servingConfigsBuilder_.isEmpty()) {
servingConfigsBuilder_.dispose();
servingConfigsBuilder_ = null;
servingConfigs_ = other.servingConfigs_;
bitField0_ = (bitField0_ & ~0x00000001);
servingConfigsBuilder_ =
com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders
? getServingConfigsFieldBuilder()
: null;
} else {
servingConfigsBuilder_.addAllMessages(other.servingConfigs_);
}
}
}
if (!other.getNextPageToken().isEmpty()) {
nextPageToken_ = other.nextPageToken_;
bitField0_ |= 0x00000002;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
com.google.cloud.retail.v2.ServingConfig m =
input.readMessage(
com.google.cloud.retail.v2.ServingConfig.parser(), extensionRegistry);
if (servingConfigsBuilder_ == null) {
ensureServingConfigsIsMutable();
servingConfigs_.add(m);
} else {
servingConfigsBuilder_.addMessage(m);
}
break;
} // case 10
case 18:
{
nextPageToken_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000002;
break;
} // case 18
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.util.List<com.google.cloud.retail.v2.ServingConfig> servingConfigs_ =
java.util.Collections.emptyList();
private void ensureServingConfigsIsMutable() {
if (!((bitField0_ & 0x00000001) != 0)) {
servingConfigs_ =
new java.util.ArrayList<com.google.cloud.retail.v2.ServingConfig>(servingConfigs_);
bitField0_ |= 0x00000001;
}
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.retail.v2.ServingConfig,
com.google.cloud.retail.v2.ServingConfig.Builder,
com.google.cloud.retail.v2.ServingConfigOrBuilder>
servingConfigsBuilder_;
/**
*
*
* <pre>
* All the ServingConfigs for a given catalog.
* </pre>
*
* <code>repeated .google.cloud.retail.v2.ServingConfig serving_configs = 1;</code>
*/
public java.util.List<com.google.cloud.retail.v2.ServingConfig> getServingConfigsList() {
if (servingConfigsBuilder_ == null) {
return java.util.Collections.unmodifiableList(servingConfigs_);
} else {
return servingConfigsBuilder_.getMessageList();
}
}
/**
*
*
* <pre>
* All the ServingConfigs for a given catalog.
* </pre>
*
* <code>repeated .google.cloud.retail.v2.ServingConfig serving_configs = 1;</code>
*/
public int getServingConfigsCount() {
if (servingConfigsBuilder_ == null) {
return servingConfigs_.size();
} else {
return servingConfigsBuilder_.getCount();
}
}
/**
*
*
* <pre>
* All the ServingConfigs for a given catalog.
* </pre>
*
* <code>repeated .google.cloud.retail.v2.ServingConfig serving_configs = 1;</code>
*/
public com.google.cloud.retail.v2.ServingConfig getServingConfigs(int index) {
if (servingConfigsBuilder_ == null) {
return servingConfigs_.get(index);
} else {
return servingConfigsBuilder_.getMessage(index);
}
}
/**
*
*
* <pre>
* All the ServingConfigs for a given catalog.
* </pre>
*
* <code>repeated .google.cloud.retail.v2.ServingConfig serving_configs = 1;</code>
*/
public Builder setServingConfigs(int index, com.google.cloud.retail.v2.ServingConfig value) {
if (servingConfigsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureServingConfigsIsMutable();
servingConfigs_.set(index, value);
onChanged();
} else {
servingConfigsBuilder_.setMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* All the ServingConfigs for a given catalog.
* </pre>
*
* <code>repeated .google.cloud.retail.v2.ServingConfig serving_configs = 1;</code>
*/
public Builder setServingConfigs(
int index, com.google.cloud.retail.v2.ServingConfig.Builder builderForValue) {
if (servingConfigsBuilder_ == null) {
ensureServingConfigsIsMutable();
servingConfigs_.set(index, builderForValue.build());
onChanged();
} else {
servingConfigsBuilder_.setMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* All the ServingConfigs for a given catalog.
* </pre>
*
* <code>repeated .google.cloud.retail.v2.ServingConfig serving_configs = 1;</code>
*/
public Builder addServingConfigs(com.google.cloud.retail.v2.ServingConfig value) {
if (servingConfigsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureServingConfigsIsMutable();
servingConfigs_.add(value);
onChanged();
} else {
servingConfigsBuilder_.addMessage(value);
}
return this;
}
/**
*
*
* <pre>
* All the ServingConfigs for a given catalog.
* </pre>
*
* <code>repeated .google.cloud.retail.v2.ServingConfig serving_configs = 1;</code>
*/
public Builder addServingConfigs(int index, com.google.cloud.retail.v2.ServingConfig value) {
if (servingConfigsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureServingConfigsIsMutable();
servingConfigs_.add(index, value);
onChanged();
} else {
servingConfigsBuilder_.addMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* All the ServingConfigs for a given catalog.
* </pre>
*
* <code>repeated .google.cloud.retail.v2.ServingConfig serving_configs = 1;</code>
*/
public Builder addServingConfigs(
com.google.cloud.retail.v2.ServingConfig.Builder builderForValue) {
if (servingConfigsBuilder_ == null) {
ensureServingConfigsIsMutable();
servingConfigs_.add(builderForValue.build());
onChanged();
} else {
servingConfigsBuilder_.addMessage(builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* All the ServingConfigs for a given catalog.
* </pre>
*
* <code>repeated .google.cloud.retail.v2.ServingConfig serving_configs = 1;</code>
*/
public Builder addServingConfigs(
int index, com.google.cloud.retail.v2.ServingConfig.Builder builderForValue) {
if (servingConfigsBuilder_ == null) {
ensureServingConfigsIsMutable();
servingConfigs_.add(index, builderForValue.build());
onChanged();
} else {
servingConfigsBuilder_.addMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* All the ServingConfigs for a given catalog.
* </pre>
*
* <code>repeated .google.cloud.retail.v2.ServingConfig serving_configs = 1;</code>
*/
public Builder addAllServingConfigs(
java.lang.Iterable<? extends com.google.cloud.retail.v2.ServingConfig> values) {
if (servingConfigsBuilder_ == null) {
ensureServingConfigsIsMutable();
com.google.protobuf.AbstractMessageLite.Builder.addAll(values, servingConfigs_);
onChanged();
} else {
servingConfigsBuilder_.addAllMessages(values);
}
return this;
}
/**
*
*
* <pre>
* All the ServingConfigs for a given catalog.
* </pre>
*
* <code>repeated .google.cloud.retail.v2.ServingConfig serving_configs = 1;</code>
*/
public Builder clearServingConfigs() {
if (servingConfigsBuilder_ == null) {
servingConfigs_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
} else {
servingConfigsBuilder_.clear();
}
return this;
}
/**
*
*
* <pre>
* All the ServingConfigs for a given catalog.
* </pre>
*
* <code>repeated .google.cloud.retail.v2.ServingConfig serving_configs = 1;</code>
*/
public Builder removeServingConfigs(int index) {
if (servingConfigsBuilder_ == null) {
ensureServingConfigsIsMutable();
servingConfigs_.remove(index);
onChanged();
} else {
servingConfigsBuilder_.remove(index);
}
return this;
}
/**
*
*
* <pre>
* All the ServingConfigs for a given catalog.
* </pre>
*
* <code>repeated .google.cloud.retail.v2.ServingConfig serving_configs = 1;</code>
*/
public com.google.cloud.retail.v2.ServingConfig.Builder getServingConfigsBuilder(int index) {
return getServingConfigsFieldBuilder().getBuilder(index);
}
/**
*
*
* <pre>
* All the ServingConfigs for a given catalog.
* </pre>
*
* <code>repeated .google.cloud.retail.v2.ServingConfig serving_configs = 1;</code>
*/
public com.google.cloud.retail.v2.ServingConfigOrBuilder getServingConfigsOrBuilder(int index) {
if (servingConfigsBuilder_ == null) {
return servingConfigs_.get(index);
} else {
return servingConfigsBuilder_.getMessageOrBuilder(index);
}
}
/**
*
*
* <pre>
* All the ServingConfigs for a given catalog.
* </pre>
*
* <code>repeated .google.cloud.retail.v2.ServingConfig serving_configs = 1;</code>
*/
public java.util.List<? extends com.google.cloud.retail.v2.ServingConfigOrBuilder>
getServingConfigsOrBuilderList() {
if (servingConfigsBuilder_ != null) {
return servingConfigsBuilder_.getMessageOrBuilderList();
} else {
return java.util.Collections.unmodifiableList(servingConfigs_);
}
}
/**
*
*
* <pre>
* All the ServingConfigs for a given catalog.
* </pre>
*
* <code>repeated .google.cloud.retail.v2.ServingConfig serving_configs = 1;</code>
*/
public com.google.cloud.retail.v2.ServingConfig.Builder addServingConfigsBuilder() {
return getServingConfigsFieldBuilder()
.addBuilder(com.google.cloud.retail.v2.ServingConfig.getDefaultInstance());
}
/**
*
*
* <pre>
* All the ServingConfigs for a given catalog.
* </pre>
*
* <code>repeated .google.cloud.retail.v2.ServingConfig serving_configs = 1;</code>
*/
public com.google.cloud.retail.v2.ServingConfig.Builder addServingConfigsBuilder(int index) {
return getServingConfigsFieldBuilder()
.addBuilder(index, com.google.cloud.retail.v2.ServingConfig.getDefaultInstance());
}
/**
*
*
* <pre>
* All the ServingConfigs for a given catalog.
* </pre>
*
* <code>repeated .google.cloud.retail.v2.ServingConfig serving_configs = 1;</code>
*/
public java.util.List<com.google.cloud.retail.v2.ServingConfig.Builder>
getServingConfigsBuilderList() {
return getServingConfigsFieldBuilder().getBuilderList();
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.retail.v2.ServingConfig,
com.google.cloud.retail.v2.ServingConfig.Builder,
com.google.cloud.retail.v2.ServingConfigOrBuilder>
getServingConfigsFieldBuilder() {
if (servingConfigsBuilder_ == null) {
servingConfigsBuilder_ =
new com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.retail.v2.ServingConfig,
com.google.cloud.retail.v2.ServingConfig.Builder,
com.google.cloud.retail.v2.ServingConfigOrBuilder>(
servingConfigs_,
((bitField0_ & 0x00000001) != 0),
getParentForChildren(),
isClean());
servingConfigs_ = null;
}
return servingConfigsBuilder_;
}
private java.lang.Object nextPageToken_ = "";
/**
*
*
* <pre>
* Pagination token, if not returned indicates the last page.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The nextPageToken.
*/
public java.lang.String getNextPageToken() {
java.lang.Object ref = nextPageToken_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
nextPageToken_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Pagination token, if not returned indicates the last page.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The bytes for nextPageToken.
*/
public com.google.protobuf.ByteString getNextPageTokenBytes() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
nextPageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Pagination token, if not returned indicates the last page.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @param value The nextPageToken to set.
* @return This builder for chaining.
*/
public Builder setNextPageToken(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
nextPageToken_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Pagination token, if not returned indicates the last page.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return This builder for chaining.
*/
public Builder clearNextPageToken() {
nextPageToken_ = getDefaultInstance().getNextPageToken();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
return this;
}
/**
*
*
* <pre>
* Pagination token, if not returned indicates the last page.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @param value The bytes for nextPageToken to set.
* @return This builder for chaining.
*/
public Builder setNextPageTokenBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
nextPageToken_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.retail.v2.ListServingConfigsResponse)
}
// @@protoc_insertion_point(class_scope:google.cloud.retail.v2.ListServingConfigsResponse)
private static final com.google.cloud.retail.v2.ListServingConfigsResponse DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.retail.v2.ListServingConfigsResponse();
}
public static com.google.cloud.retail.v2.ListServingConfigsResponse getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<ListServingConfigsResponse> PARSER =
new com.google.protobuf.AbstractParser<ListServingConfigsResponse>() {
@java.lang.Override
public ListServingConfigsResponse parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<ListServingConfigsResponse> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<ListServingConfigsResponse> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.retail.v2.ListServingConfigsResponse getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
apache/ignite-3 | 36,489 | modules/sql-engine/src/integrationTest/java/org/apache/ignite/internal/sql/engine/ItAggregatesTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.ignite.internal.sql.engine;
import static org.apache.ignite.internal.catalog.CatalogService.DEFAULT_STORAGE_PROFILE;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertThrows;
import static org.junit.jupiter.api.Assertions.assertTrue;
import java.math.BigDecimal;
import java.math.RoundingMode;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.Locale;
import java.util.Random;
import java.util.stream.Collectors;
import java.util.stream.IntStream;
import java.util.stream.Stream;
import org.apache.ignite.internal.lang.IgniteStringFormatter;
import org.apache.ignite.internal.sql.BaseSqlIntegrationTest;
import org.apache.ignite.internal.sql.engine.hint.IgniteHint;
import org.apache.ignite.internal.sql.engine.type.IgniteTypeSystem;
import org.apache.ignite.internal.sql.engine.util.HintUtils;
import org.apache.ignite.internal.sql.engine.util.QueryChecker;
import org.apache.ignite.internal.testframework.WithSystemProperty;
import org.apache.ignite.lang.IgniteException;
import org.apache.ignite.sql.SqlException;
import org.junit.jupiter.api.Assumptions;
import org.junit.jupiter.api.BeforeAll;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.params.ParameterizedTest;
import org.junit.jupiter.params.provider.Arguments;
import org.junit.jupiter.params.provider.MethodSource;
/**
* Group of tests to verify aggregation functions.
*/
public class ItAggregatesTest extends BaseSqlIntegrationTest {
private static final String[] PERMUTATION_RULES = {
"MapReduceHashAggregateConverterRule", "MapReduceSortAggregateConverterRule",
"ColocatedHashAggregateConverterRule", "ColocatedSortAggregateConverterRule"
};
private static final int ROWS = 103;
// most commonly used values
private static final BigDecimal ONE_WITH_SCALE_16 = new BigDecimal("1").setScale(16, RoundingMode.UNNECESSARY);
private static final BigDecimal ONE_AND_HALF_WITH_SCALE_16 = new BigDecimal("1.5").setScale(16, RoundingMode.UNNECESSARY);
private static final BigDecimal TWO_WITH_SCALE_16 = new BigDecimal("2").setScale(16, RoundingMode.UNNECESSARY);
/**
* Before all.
*/
@BeforeAll
static void initTestData() {
createAndPopulateTable();
sql("CREATE ZONE test_zone (replicas 2, partitions 10) storage profiles ['" + DEFAULT_STORAGE_PROFILE + "']");
sql("CREATE TABLE test (id INT PRIMARY KEY, grp0 INT, grp1 INT, val0 INT, val1 INT) ZONE TEST_ZONE");
sql("CREATE TABLE test_one_col_idx (pk INT PRIMARY KEY, col0 INT)");
for (int i = 0; i < ROWS; i++) {
sql("INSERT INTO test (id, grp0, grp1, val0, val1) VALUES (?, ?, ?, ?, ?)", i, i / 10, i / 100, 1, 2);
sql("INSERT INTO test_one_col_idx (pk, col0) VALUES (?, ?)", i, i);
}
sql("CREATE TABLE t1_colo_val1(id INT, val0 VARCHAR, val1 VARCHAR, val2 VARCHAR, PRIMARY KEY(id, val1)) "
+ "COLOCATE BY (val1)");
sql("CREATE TABLE t2_colo_va1(id INT, val0 VARCHAR, val1 VARCHAR, val2 VARCHAR, PRIMARY KEY(id, val1)) "
+ "COLOCATE BY (val1)");
for (int i = 0; i < 100; i++) {
sql("INSERT INTO t1_colo_val1 VALUES (?, ?, ?, ?)", i, "val" + i, "val" + i % 2, "val" + i);
}
sql("INSERT INTO t2_colo_va1 VALUES (0, 'val0', 'val0', 'val0'), (1, 'val1', 'val1', 'val1')");
sql("CREATE TABLE test_a_b_s (id INTEGER PRIMARY KEY, a INTEGER, b INTEGER, s VARCHAR);");
sql("INSERT INTO test_a_b_s VALUES (1, 11, 1, 'hello'), (2, 12, 2, 'world'), (3, 11, 3, NULL)");
sql("INSERT INTO test_a_b_s VALUES (4, 11, 3, 'hello'), (5, 12, 2, 'world'), (6, 10, 5, 'ahello'), (7, 13, 6, 'world')");
sql("CREATE TABLE test_str_int_real_dec "
+ "(id INTEGER PRIMARY KEY, str_col VARCHAR, int_col INTEGER, real_col REAL, dec_col DECIMAL)");
sql("CREATE TABLE IF NOT EXISTS numbers ("
+ "id INTEGER PRIMARY KEY, "
+ "tinyint_col TINYINT, "
+ "smallint_col SMALLINT, "
+ "int_col INTEGER, "
+ "bigint_col BIGINT, "
+ "float_col REAL, "
+ "double_col DOUBLE, "
+ "dec2_col DECIMAL(2), "
+ "dec4_2_col DECIMAL(4,2), "
+ "dec20_18_col DECIMAL(20,18), "
+ "dec10_2_col DECIMAL(10,2) "
+ ")");
sql("CREATE TABLE IF NOT EXISTS not_null_numbers ("
+ "id INTEGER PRIMARY KEY, "
+ "int_col INTEGER NOT NULL, "
+ "dec4_2_col DECIMAL(4,2) NOT NULL"
+ ")");
gatherStatistics();
}
@ParameterizedTest
@MethodSource("provideRules")
public void aggregateWithSumAndHaving(String[] rules) {
var res = sql(
appendDisabledRules("SELECT SUM(val0), SUM(val1), grp0 FROM TEST GROUP BY grp0 HAVING SUM(val1) > 10", rules));
assertEquals(ROWS / 10, res.size());
res.forEach(r -> {
long s0 = (Long) r.get(0);
long s1 = (Long) r.get(1);
assertEquals(s0 * 2, s1);
});
}
@ParameterizedTest
@MethodSource("provideRules")
public void correctCollationsOnAgg(String[] rules) {
var cursors = sql(
appendDisabledRules("SELECT PK FROM TEST_ONE_COL_IDX WHERE col0 IN (SELECT col0 FROM TEST_ONE_COL_IDX)", rules));
assertEquals(ROWS, cursors.size());
}
@ParameterizedTest
@MethodSource("provideRules")
public void countOfNonNumericField(String[] rules) {
assertQuery("select count(name) from person").disableRules(rules).returns(4L).check();
assertQuery("select count(*) from person").disableRules(rules).returns(5L).check();
assertQuery("select count(1) from person").disableRules(rules).returns(5L).check();
assertQuery("select count(null) from person").disableRules(rules).returns(0L).check();
assertQuery("select count(*) from person where salary < 0").disableRules(rules).returns(0L).check();
assertQuery("select count(*) from person where salary < 0 and salary > 0").disableRules(rules).returns(0L).check();
assertQuery("select count(case when name like 'R%' then 1 else null end) from person").disableRules(rules).returns(2L).check();
assertQuery("select count(case when name not like 'I%' then 1 else null end) from person").disableRules(rules).returns(2L).check();
assertQuery("select count(name) from person where salary > 10").disableRules(rules).returns(1L).check();
assertQuery("select count(*) from person where salary > 10").disableRules(rules).returns(2L).check();
assertQuery("select count(1) from person where salary > 10").disableRules(rules).returns(2L).check();
assertQuery("select count(*) from person where name is not null").disableRules(rules).returns(4L).check();
assertQuery("select count(name) filter (where salary > 10) from person").disableRules(rules).returns(1L).check();
assertQuery("select count(*) filter (where salary > 10) from person").disableRules(rules).returns(2L).check();
assertQuery("select count(1) filter (where salary > 10) from person").disableRules(rules).returns(2L).check();
assertQuery("select salary, count(name) from person group by salary order by salary")
.disableRules(rules)
.returns(10d, 3L)
.returns(15d, 1L)
.check();
// same query, but grouping by alias
assertQuery("select salary as sal, count(name) from person group by sal order by sal")
.disableRules(rules)
.returns(10d, 3L)
.returns(15d, 1L)
.check();
// same query, but grouping by ordinal
assertQuery("select salary, count(name) from person group by 1 order by 1")
.disableRules(rules)
.returns(10d, 3L)
.returns(15d, 1L)
.check();
assertQuery("select salary * salary / 5, count(name) from person group by (salary * salary / 5) order by (salary * salary / 5)")
.disableRules(rules)
.returns(20d, 3L)
.returns(45d, 1L)
.check();
// same query, but grouping by alias
assertQuery("select (salary * salary / 5) as sal, count(name) from person group by sal order by sal")
.disableRules(rules)
.returns(20d, 3L)
.returns(45d, 1L)
.check();
// same query, but grouping by ordinal
assertQuery("select salary * salary / 5, count(name) from person group by 1 order by 1")
.disableRules(rules)
.returns(20d, 3L)
.returns(45d, 1L)
.check();
assertQuery("select salary, count(*) from person group by salary order by salary")
.disableRules(rules)
.returns(10d, 3L)
.returns(15d, 2L)
.check();
assertQuery("select salary, count(1) from person group by salary order by salary")
.disableRules(rules)
.returns(10d, 3L)
.returns(15d, 2L)
.check();
assertQuery("select salary, count(1), sum(1) from person group by salary order by salary")
.disableRules(rules)
.returns(10d, 3L, 3L)
.returns(15d, 2L, 2L)
.check();
assertQuery("select salary, name, count(1), sum(salary) from person group by salary, name order by salary")
.disableRules(rules)
.returns(10d, "Igor", 1L, 10d)
.returns(10d, "Roma", 2L, 20d)
.returns(15d, "Ilya", 1L, 15d)
.returns(15d, null, 1L, 15d)
.check();
assertQuery("select salary, count(name) from person group by salary having salary < 10 order by salary")
.disableRules(rules)
.check();
assertQuery("select count(name), name from person group by name")
.disableRules(rules)
.returns(1L, "Igor")
.returns(1L, "Ilya")
.returns(2L, "Roma")
.returns(0L, null)
.check();
assertQuery("select avg(salary) from person")
.disableRules(rules)
.returns(12.0)
.check();
assertQuery("select name, salary from person where person.salary > (select avg(person.salary) from person)")
.disableRules(rules)
.returns(null, 15d)
.returns("Ilya", 15d)
.check();
assertQuery("select avg(salary) from (select avg(salary) as salary from person union all select salary from person)")
.disableRules(rules)
.returns(12d)
.check();
}
@ParameterizedTest
@MethodSource("provideRules")
public void testMultipleRowsFromSingleAggr(String[] rules) {
Assumptions.assumeTrue(
Arrays.stream(rules).noneMatch(rule -> rule.contains("ColocatedHash"))
|| Arrays.stream(rules).noneMatch(rule -> rule.contains("MapReduceHash")),
"Sorted aggregates are currently disabled on correlated path because "
+ "they may cause deadlock"
);
assertThrows(
IgniteException.class,
() -> assertQuery("SELECT (SELECT name FROM person)").disableRules(rules).check()
);
assertThrows(
IgniteException.class,
() -> assertQuery("SELECT t.id, (SELECT x FROM TABLE(system_range(1, 5))) FROM person t").disableRules(rules).check()
);
assertThrows(
IgniteException.class,
() -> assertQuery("SELECT t.id, (SELECT x FROM "
+ "TABLE(system_range(t.id, t.id + 1))) FROM person t").disableRules(rules).check()
);
assertQuery("SELECT t.id, (SELECT x FROM TABLE(system_range(t.id, t.id))) FROM person t").disableRules(rules).check();
}
@ParameterizedTest
@MethodSource("provideRules")
public void testAnyValAggr(String[] rules) {
var res = sql(appendDisabledRules("select any_value(name) from person", rules));
assertEquals(1, res.size());
Object val = res.get(0).get(0);
assertTrue("Igor".equals(val) || "Roma".equals(val) || "Ilya".equals(val), "Unexpected value: " + val);
// Test with grouping.
res = sql(appendDisabledRules("select any_value(name), salary from person group by salary order by salary", rules));
assertEquals(2, res.size());
val = res.get(0).get(0);
assertTrue("Igor".equals(val) || "Roma".equals(val), "Unexpected value: " + val);
val = res.get(1).get(0);
assertEquals("Ilya", val);
}
@Test
public void testColocatedAggregate() {
String sql = "SELECT val1, count(val2) FROM t1_colo_val1 GROUP BY val1";
assertQuery(sql)
.matches(QueryChecker.matches(".*Exchange.*Colocated.*Aggregate.*"))
.returns("val0", 50L)
.returns("val1", 50L)
.check();
sql = "SELECT t2_colo_va1.val1, agg.cnt "
+ "FROM t2_colo_va1 JOIN (SELECT val1, COUNT(val2) AS cnt FROM t1_colo_val1 GROUP BY val1) "
+ "AS agg ON t2_colo_va1.val1 = agg.val1";
assertQuery(sql)
.disableRules("HashJoinConverter", "MergeJoinConverter")
.matches(QueryChecker.matches(".*Exchange.*Join.*Colocated.*Aggregate.*"))
.returns("val0", 50L)
.returns("val1", 50L)
.check();
}
@ParameterizedTest
@MethodSource("provideRules")
public void testColocatedAggregate(String[] rules) {
String sql = "SELECT val1, count(val2) FROM t1_colo_val1 GROUP BY val1";
assertQuery(sql)
.disableRules(rules)
.returns("val0", 50L)
.returns("val1", 50L)
.check();
sql = "SELECT t2_colo_va1.val1, agg.cnt "
+ "FROM t2_colo_va1 JOIN (SELECT val1, COUNT(val2) AS cnt FROM t1_colo_val1 GROUP BY val1) "
+ "AS agg ON t2_colo_va1.val1 = agg.val1";
assertQuery(sql)
.disableRules(rules)
.returns("val0", 50L)
.returns("val1", 50L)
.check();
}
@Test
public void testEverySomeAggregate() {
sql("DELETE FROM test_a_b_s");
sql("INSERT INTO test_a_b_s(id, a, b) VALUES (1, null, 0)");
sql("INSERT INTO test_a_b_s(id, a, b) VALUES (2, 0, null)");
sql("INSERT INTO test_a_b_s(id, a, b) VALUES (3, null, null)");
sql("INSERT INTO test_a_b_s(id, a, b) VALUES (4, 0, 1)");
sql("INSERT INTO test_a_b_s(id, a, b) VALUES (5, 1, 1)");
sql("INSERT INTO test_a_b_s(id, a, b) VALUES (6, 1, 2)");
sql("INSERT INTO test_a_b_s(id, a, b) VALUES (7, 2, 2)");
assertQuery("SELECT EVERY(a < b) FROM test_a_b_s").returns(false).check();
assertQuery("SELECT SOME(a < b) FROM test_a_b_s").returns(true).check();
assertQuery("SELECT EVERY(a <= b) FROM test_a_b_s").returns(true).check();
assertQuery("SELECT SOME(a > b) FROM test_a_b_s").returns(false).check();
}
@Test
public void distinctAggregateWithoutAggregateFunction() {
var sql = "select distinct name from person";
assertQuery(sql)
.matches(QueryChecker.matches(".*ReduceHashAggregate.*Exchange.*MapHashAggregate.*"))
.returns("Igor")
.returns("Ilya")
.returns("Roma")
.returns(null)
.check();
assertQuery(sql, AggregateType.HASH)
.matches(QueryChecker.matches(".*ReduceHashAggregate.*Exchange.*MapHashAggregate.*"))
.returns("Igor")
.returns("Ilya")
.returns("Roma")
.returns(null)
.check();
assertQuery(sql, AggregateType.SORT)
.matches(QueryChecker.matches(".*ReduceSortAggregate.*Exchange.*MapSortAggregate.*"))
.returns("Igor")
.returns("Ilya")
.returns("Roma")
.returns(null)
.check();
}
@ParameterizedTest
@MethodSource("provideRules")
@WithSystemProperty(key = "IMPLICIT_PK_ENABLED", value = "true")
public void testDifferentAgg(String[] rules) {
assertQuery("SELECT DISTINCT(a) as a FROM test_a_b_s ORDER BY a")
.disableRules(rules)
.returns(10)
.returns(11)
.returns(12)
.returns(13)
.check();
assertQuery("SELECT COUNT(*) FROM test_a_b_s")
.disableRules(rules)
.returns(7L)
.check();
assertQuery("SELECT COUNT(a), COUNT(DISTINCT(b)) FROM test_a_b_s")
.disableRules(rules)
.returns(7L, 5L)
.check();
assertQuery("SELECT COUNT(a) as a, s FROM test_a_b_s GROUP BY s ORDER BY a, s")
.disableRules(rules)
.returns(1L, "ahello")
.returns(1L, null)
.returns(2L, "hello")
.returns(3L, "world")
.check();
assertQuery("SELECT COUNT(a) as a, SUBSTRING(AVG(a)::VARCHAR, 1, 6) as b, MIN(a), MIN(b), s FROM test_a_b_s "
+ "GROUP BY s ORDER BY a, b")
.disableRules(rules)
.returns(1L, "10.000", 10, 5, "ahello")
.returns(1L, "11.000", 11, 3, null)
.returns(2L, "11.000", 11, 1, "hello")
.returns(3L, "12.333", 12, 2, "world")
.check();
assertQuery("SELECT COUNT(a) as a, SUBSTRING(AVG(a)::VARCHAR, 1, 6) as bb, MIN(a), MIN(b), s FROM test_a_b_s "
+ "GROUP BY s, b ORDER BY a, s")
.disableRules(rules)
.returns(1L, "10.000", 10, 5, "ahello")
.returns(1L, "11.000", 11, 1, "hello")
.returns(1L, "11.000", 11, 3, "hello")
.returns(1L, "13.000", 13, 6, "world")
.returns(1L, "11.000", 11, 3, null)
.returns(2L, "12.000", 12, 2, "world")
.check();
assertQuery("SELECT COUNT(a) FROM test_a_b_s")
.disableRules(rules)
.returns(7L)
.check();
assertQuery("SELECT COUNT(DISTINCT(a)) FROM test_a_b_s")
.disableRules(rules)
.returns(4L)
.check();
assertQuery("SELECT COUNT(a), COUNT(s), COUNT(*) FROM test_a_b_s")
.disableRules(rules)
.returns(7L, 6L, 7L)
.check();
assertQuery("SELECT SUBSTRING(AVG(a)::VARCHAR, 1, 6) FROM test_a_b_s")
.disableRules(rules)
.returns("11.428")
.check();
assertQuery("SELECT MIN(a) FROM test_a_b_s")
.disableRules(rules)
.returns(10)
.check();
assertQuery("SELECT COUNT(a), COUNT(DISTINCT(a)) FROM test_a_b_s")
.disableRules(rules)
.returns(7L, 4L)
.check();
assertQuery("SELECT COUNT(a), COUNT(DISTINCT a), SUM(a), SUM(DISTINCT a) FROM test_a_b_s")
.disableRules(rules)
.returns(7L, 4L, 80L, 46L)
.check();
}
@ParameterizedTest
@MethodSource("provideRules")
public void checkEmptyTable(String[] rules) {
sql("DELETE FROM test_a_b_s");
assertQuery("SELECT min(b) FROM test_a_b_s GROUP BY a")
.disableRules(rules)
.returnNothing().check();
}
@ParameterizedTest
@MethodSource("rulesForGroupingSets")
public void testGroupingSets(String[] rules) {
sql("DELETE FROM test_str_int_real_dec");
sql("INSERT INTO test_str_int_real_dec(id, str_col, int_col) VALUES (1, 's1', 10)");
sql("INSERT INTO test_str_int_real_dec(id, str_col, int_col) VALUES (2, 's1', 20)");
sql("INSERT INTO test_str_int_real_dec(id, str_col, int_col) VALUES (3, 's2', 10)");
sql("INSERT INTO test_str_int_real_dec(id, str_col, int_col) VALUES (4, 's3', 40)");
assertQuery("SELECT str_col, SUM(int_col), COUNT(str_col) FROM test_str_int_real_dec GROUP BY GROUPING SETS "
+ "( (str_col, int_col), (str_col), (int_col), () ) HAVING SUM(int_col) > 0")
.disableRules(rules)
// empty group
.returns(null, 80L, 4L)
// group (str_col, int_col)
.returns("s1", 10L, 1L)
.returns("s1", 20L, 1L)
.returns("s2", 10L, 1L)
.returns("s3", 40L, 1L)
// group (str_col)
.returns("s1", 30L, 2L)
.returns("s2", 10L, 1L)
.returns("s3", 40L, 1L)
// group (int_col)
.returns(null, 40L, 1L)
.returns(null, 20L, 2L)
.returns(null, 20L, 1L)
.check();
}
@ParameterizedTest
@MethodSource("rulesForGroupingSets")
public void testGroupingFunction(String[] rules) {
sql("DELETE FROM test_str_int_real_dec");
sql("DELETE FROM test_str_int_real_dec");
sql("INSERT INTO test_str_int_real_dec(id, str_col, int_col) VALUES (1, 's1', 10)");
sql("INSERT INTO test_str_int_real_dec(id, str_col, int_col) VALUES (2, 's1', 20)");
sql("INSERT INTO test_str_int_real_dec(id, str_col, int_col) VALUES (3, 's2', 10)");
sql("INSERT INTO test_str_int_real_dec(id, str_col, int_col) VALUES (4, 's3', 40)");
assertQuery("SELECT GROUPING(str_col), str_col FROM test_str_int_real_dec GROUP BY GROUPING SETS ((str_col))")
.disableRules(rules)
.returns(1L, "s1")
.returns(1L, "s2")
.returns(1L, "s3")
.check();
assertQuery("SELECT GROUPING(str_col), str_col FROM test_str_int_real_dec GROUP BY GROUPING SETS ((str_col), (str_col))")
.disableRules(rules)
.returns(1L, "s1")
.returns(1L, "s1")
.returns(1L, "s2")
.returns(1L, "s2")
.returns(1L, "s3")
.returns(1L, "s3")
.check();
assertQuery("SELECT GROUPING(int_col, str_col), GROUPING(int_col),"
+ "str_col, SUM(int_col), COUNT(str_col) FROM test_str_int_real_dec GROUP BY GROUPING SETS "
+ "( (str_col, int_col), (str_col), (int_col), () ) HAVING SUM(int_col) > 0")
.disableRules(rules)
// group (str_col, int_col)
.returns(3L, 1L, "s1", 10L, 1L)
.returns(3L, 1L, "s1", 20L, 1L)
.returns(3L, 1L, "s2", 10L, 1L)
.returns(3L, 1L, "s3", 40L, 1L)
// group (str_col)
.returns(1L, 0L, "s1", 30L, 2L)
.returns(1L, 0L, "s2", 10L, 1L)
.returns(1L, 0L, "s3", 40L, 1L)
// group (int_col)
.returns(2L, 1L, null, 20L, 2L)
.returns(2L, 1L, null, 20L, 1L)
.returns(2L, 1L, null, 40L, 1L)
// empty group
.returns(0L, 0L, null, 80L, 4L)
.check();
String invalidQuery = IgniteStringFormatter.format(
"SELECT GROUPING({}), str_col FROM test_str_int_real_dec GROUP BY GROUPING SETS ((str_col))",
IntStream.rangeClosed(1, 64).mapToObj(i -> "str_col").collect(Collectors.joining(",")));
assertThrows(SqlException.class, () -> sql(invalidQuery),
"Invalid number of arguments to function ''GROUPING''. Was expecting number of agruments in range [1, 63]");
}
@ParameterizedTest
@MethodSource("rulesForGroupingSets")
public void testDuplicateGroupingSets(String[] rules) {
sql("DELETE FROM test_str_int_real_dec");
sql("INSERT INTO test_str_int_real_dec(id, str_col, int_col) VALUES (1, 's1', 10)");
sql("INSERT INTO test_str_int_real_dec(id, str_col, int_col) VALUES (2, 's1', 20)");
sql("INSERT INTO test_str_int_real_dec(id, str_col, int_col) VALUES (3, 's2', 10)");
assertQuery("SELECT str_col FROM test_str_int_real_dec GROUP BY GROUPING SETS ((str_col), (), (str_col), ()) ORDER BY str_col")
.disableRules(rules)
.returns("s1")
.returns("s2")
.returns(null)
.returns("s1")
.returns("s2")
.returns(null)
.check();
}
@SuppressWarnings("BigDecimalMethodWithoutRoundingCalled")
@ParameterizedTest
@MethodSource("provideRules")
public void testAvg(String[] rules) {
sql("DELETE FROM numbers");
sql("INSERT INTO numbers VALUES (1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1), (2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2)");
BigDecimal avgDec = ONE_AND_HALF_WITH_SCALE_16;
BigDecimal avgDecBigScale = new BigDecimal("1.5").setScale(18, RoundingMode.UNNECESSARY);
Double avgDouble = 1.5d;
assertQuery("SELECT "
+ "AVG(tinyint_col), AVG(smallint_col), AVG(int_col), AVG(bigint_col), "
+ "AVG(float_col), AVG(double_col), AVG(dec2_col), AVG(dec4_2_col), AVG(dec20_18_col) "
+ "FROM numbers")
.disableRules(rules)
.returns(avgDec, avgDec, avgDec, avgDec, avgDouble, avgDouble, avgDec, avgDec, avgDecBigScale)
.check();
assertQuery("SELECT AVG(int_col) FILTER (WHERE smallint_col % 2 = 0)"
+ " , AVG(int_col) FILTER (WHERE smallint_col % 2 = 1)"
+ " FROM numbers")
.disableRules(rules)
.returns(new BigDecimal(2).setScale(16), new BigDecimal(1).setScale(16))
.check();
sql("DELETE FROM numbers");
sql("INSERT INTO numbers (id, dec4_2_col) VALUES (1, 1), (2, 2)");
assertQuery("SELECT AVG(dec4_2_col) FROM numbers")
.disableRules(rules)
.returns(avgDec)
.check();
sql("DELETE FROM numbers");
sql("INSERT INTO numbers (id, dec4_2_col) VALUES (1, 1), (2, 2.3333)");
assertQuery("SELECT AVG(dec4_2_col) FROM numbers")
.disableRules(rules)
.returns(new BigDecimal("1.665").setScale(16, RoundingMode.UNNECESSARY))
.check();
sql("DELETE FROM numbers");
sql("INSERT INTO numbers (id, int_col, dec4_2_col) VALUES (1, null, null)");
assertQuery("SELECT AVG(int_col), AVG(dec4_2_col) FROM numbers")
.disableRules(rules)
.returns(null, null)
.check();
sql("DELETE FROM numbers");
sql("INSERT INTO numbers (id, int_col, dec4_2_col) VALUES (1, 1, 1), (2, null, null)");
assertQuery("SELECT AVG(int_col), AVG(dec4_2_col) FROM numbers")
.disableRules(rules)
.returns(ONE_WITH_SCALE_16, ONE_WITH_SCALE_16)
.check();
}
@Test
public void testAvgRandom() {
long seed = System.nanoTime();
Random random = new Random(seed);
sql("DELETE FROM numbers");
List<BigDecimal> numbers = new ArrayList<>();
log.info("Seed: {}", seed);
for (int i = 1; i < 20; i++) {
int val = random.nextInt(100) + 1;
BigDecimal num = BigDecimal.valueOf(val);
numbers.add(num);
String query = "INSERT INTO numbers (id, int_col, dec10_2_col) VALUES(?, ?, ?)";
sql(query, i, num.setScale(0, RoundingMode.HALF_UP).intValue(), num);
}
BigDecimal avg = numbers.stream()
.reduce(new BigDecimal("0.00"), BigDecimal::add)
.divide(BigDecimal.valueOf(numbers.size()), 16, IgniteTypeSystem.INSTANCE.roundingMode());
for (String[] rules : makePermutations(PERMUTATION_RULES)) {
assertQuery("SELECT AVG(int_col), AVG(dec10_2_col) FROM numbers")
.disableRules(rules)
.returns(avg, avg)
.check();
}
}
@ParameterizedTest
@MethodSource("provideRules")
public void testAvgNullNotNull(String[] rules) {
sql("DELETE FROM not_null_numbers");
sql("INSERT INTO not_null_numbers (id, int_col, dec4_2_col) VALUES (1, 1, 1), (2, 2, 2)");
assertQuery("SELECT AVG(int_col), AVG(dec4_2_col) FROM not_null_numbers")
.disableRules(rules)
.returns(ONE_AND_HALF_WITH_SCALE_16, ONE_AND_HALF_WITH_SCALE_16)
.check();
// Return type of an AVG aggregate can never be null.
assertQuery("SELECT AVG(int_col) FROM not_null_numbers GROUP BY int_col")
.disableRules(rules)
.returns(ONE_WITH_SCALE_16)
.returns(TWO_WITH_SCALE_16)
.check();
assertQuery("SELECT AVG(dec4_2_col) FROM not_null_numbers GROUP BY dec4_2_col")
.disableRules(rules)
.returns(ONE_WITH_SCALE_16)
.returns(TWO_WITH_SCALE_16)
.check();
sql("DELETE FROM numbers");
sql("INSERT INTO numbers (id, int_col, dec4_2_col) VALUES (1, 1, 1), (2, 2, 2)");
assertQuery("SELECT AVG(int_col) FROM numbers GROUP BY int_col")
.disableRules(rules)
.returns(ONE_WITH_SCALE_16)
.returns(TWO_WITH_SCALE_16)
.check();
assertQuery("SELECT AVG(dec4_2_col) FROM numbers GROUP BY dec4_2_col")
.disableRules(rules)
.returns(ONE_WITH_SCALE_16)
.returns(TWO_WITH_SCALE_16)
.check();
}
@ParameterizedTest
@MethodSource("provideRules")
public void testAvgOnEmptyGroup(String[] rules) {
sql("DELETE FROM numbers");
assertQuery("SELECT "
+ "AVG(tinyint_col), AVG(smallint_col), AVG(int_col), AVG(bigint_col), "
+ "AVG(float_col), AVG(double_col), AVG(dec2_col), AVG(dec4_2_col), AVG(dec20_18_col) "
+ "FROM numbers")
.disableRules(rules)
.returns(null, null, null, null, null, null, null, null, null)
.check();
}
@ParameterizedTest
@MethodSource("provideRules")
public void testAvgFromLiterals(String[] rules) {
BigDecimal avgDec = ONE_AND_HALF_WITH_SCALE_16;
BigDecimal avgDecBigScale = new BigDecimal("1.5").setScale(18, RoundingMode.UNNECESSARY);
Double avgDouble = 1.5d;
assertQuery("SELECT "
+ "AVG(tinyint_col), AVG(smallint_col), AVG(int_col), AVG(bigint_col), AVG(float_col), "
+ "AVG(double_col), AVG(dec2_col), AVG(dec4_2_col), AVG(dec20_18_col) "
+ "FROM (VALUES "
+ "(1::TINYINT, 1::SMALLINT, 1::INTEGER, 1::BIGINT, 1::REAL,"
+ " 1::DOUBLE, 1::DECIMAL(2), 1.00::DECIMAL(4,2), 1.00::DECIMAL(20,18)), "
+ "(2::TINYINT, 2::SMALLINT, 2::INTEGER, 2::BIGINT, 2::REAL,"
+ " 2::DOUBLE, 2::DECIMAL(2), 2.00::DECIMAL(4,2), 2.00::DECIMAL(20,18)) "
+ ") "
+ "t(tinyint_col, smallint_col, int_col, bigint_col, float_col, double_col, dec2_col, dec4_2_col, dec20_18_col)")
.disableRules(rules)
.returns(avgDec, avgDec, avgDec, avgDec, avgDouble, avgDouble, avgDec, avgDec, avgDecBigScale)
.check();
assertQuery("SELECT "
+ "AVG(1::TINYINT), AVG(2::SMALLINT), AVG(3::INTEGER), AVG(4::BIGINT), AVG(5::REAL), "
+ "AVG(6::DOUBLE), AVG(7::DECIMAL(2)), AVG(8.00::DECIMAL(4,2)), AVG(9.00::DECIMAL(20,18))")
.disableRules(rules)
.returns(
ONE_WITH_SCALE_16,
TWO_WITH_SCALE_16,
new BigDecimal("3").setScale(16, RoundingMode.UNNECESSARY),
new BigDecimal("4").setScale(16, RoundingMode.UNNECESSARY),
5.0d,
6.0d,
new BigDecimal("7").setScale(16, RoundingMode.UNNECESSARY),
new BigDecimal("8").setScale(16, RoundingMode.UNNECESSARY),
new BigDecimal("9").setScale(18, RoundingMode.UNNECESSARY)
)
.check();
assertQuery("SELECT AVG(dec2_col), AVG(dec4_2_col) FROM\n"
+ "(SELECT \n"
+ " 1::DECIMAL(2) as dec2_col, 2.00::DECIMAL(4, 2) as dec4_2_col\n"
+ " UNION\n"
+ " SELECT 2::DECIMAL(2) as dec2_col, 3.00::DECIMAL(4,2) as dec4_2_col\n"
+ ") as t")
.returns(ONE_AND_HALF_WITH_SCALE_16, new BigDecimal("2.5").setScale(16, RoundingMode.UNNECESSARY))
.check();
}
@ParameterizedTest
@MethodSource("provideRules")
public void testAggDistinctGroupSet(String[] rules) {
sql("DELETE FROM test_a_b_s");
sql("INSERT INTO test_a_b_s (id, a, b) VALUES (1, 11, 2), (2, 12, 2), (3, 12, 3)");
assertQuery("SELECT COUNT(a), COUNT(DISTINCT(b)) FROM test_a_b_s")
.disableRules(rules)
.returns(3L, 2L)
.check();
}
private static Stream<Arguments> rulesForGroupingSets() {
List<Object[]> rules = Arrays.asList(
// Use map/reduce aggregates for grouping sets
new String[]{"ColocatedHashAggregateConverterRule", "ColocatedSortAggregateConverterRule"},
// Use colocated aggregates grouping sets
new String[]{"MapReduceHashAggregateConverterRule", "MapReduceSortAggregateConverterRule"}
);
return rules.stream().map(Object.class::cast).map(Arguments::of);
}
static String[][] makePermutations(String[] rules) {
String[][] out = new String[rules.length][rules.length - 1];
for (int i = 0; i < rules.length; ++i) {
int pos = 0;
for (int ruleIdx = 0; ruleIdx < rules.length; ++ruleIdx) {
if (ruleIdx == i) {
continue;
}
out[i][pos++] = rules[ruleIdx];
}
}
return out;
}
private static Stream<Arguments> provideRules() {
return Arrays.stream(makePermutations(PERMUTATION_RULES)).map(Object.class::cast).map(Arguments::of);
}
private String appendDisabledRules(String sql, String[] rules) {
sql = sql.toLowerCase(Locale.ENGLISH);
int pos = sql.indexOf("select");
assert pos >= 0;
String newSql = sql.substring(0, pos + "select".length() + 1);
newSql += HintUtils.toHint(IgniteHint.DISABLE_RULE, rules);
newSql += sql.substring(pos + "select".length() + 1);
return newSql;
}
}
|
googleapis/google-cloud-java | 36,941 | java-retail/proto-google-cloud-retail-v2/src/main/java/com/google/cloud/retail/v2/BatchUpdateGenerativeQuestionConfigsResponse.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/retail/v2/generative_question_service.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.retail.v2;
/**
*
*
* <pre>
* Aggregated response for UpdateGenerativeQuestionConfig method.
* </pre>
*
* Protobuf type {@code google.cloud.retail.v2.BatchUpdateGenerativeQuestionConfigsResponse}
*/
public final class BatchUpdateGenerativeQuestionConfigsResponse
extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.retail.v2.BatchUpdateGenerativeQuestionConfigsResponse)
BatchUpdateGenerativeQuestionConfigsResponseOrBuilder {
private static final long serialVersionUID = 0L;
// Use BatchUpdateGenerativeQuestionConfigsResponse.newBuilder() to construct.
private BatchUpdateGenerativeQuestionConfigsResponse(
com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private BatchUpdateGenerativeQuestionConfigsResponse() {
generativeQuestionConfigs_ = java.util.Collections.emptyList();
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new BatchUpdateGenerativeQuestionConfigsResponse();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.retail.v2.GenerativeQuestionServiceProto
.internal_static_google_cloud_retail_v2_BatchUpdateGenerativeQuestionConfigsResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.retail.v2.GenerativeQuestionServiceProto
.internal_static_google_cloud_retail_v2_BatchUpdateGenerativeQuestionConfigsResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.retail.v2.BatchUpdateGenerativeQuestionConfigsResponse.class,
com.google.cloud.retail.v2.BatchUpdateGenerativeQuestionConfigsResponse.Builder.class);
}
public static final int GENERATIVE_QUESTION_CONFIGS_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private java.util.List<com.google.cloud.retail.v2.GenerativeQuestionConfig>
generativeQuestionConfigs_;
/**
*
*
* <pre>
* Optional. The updates question configs.
* </pre>
*
* <code>
* repeated .google.cloud.retail.v2.GenerativeQuestionConfig generative_question_configs = 1 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
@java.lang.Override
public java.util.List<com.google.cloud.retail.v2.GenerativeQuestionConfig>
getGenerativeQuestionConfigsList() {
return generativeQuestionConfigs_;
}
/**
*
*
* <pre>
* Optional. The updates question configs.
* </pre>
*
* <code>
* repeated .google.cloud.retail.v2.GenerativeQuestionConfig generative_question_configs = 1 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
@java.lang.Override
public java.util.List<? extends com.google.cloud.retail.v2.GenerativeQuestionConfigOrBuilder>
getGenerativeQuestionConfigsOrBuilderList() {
return generativeQuestionConfigs_;
}
/**
*
*
* <pre>
* Optional. The updates question configs.
* </pre>
*
* <code>
* repeated .google.cloud.retail.v2.GenerativeQuestionConfig generative_question_configs = 1 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
@java.lang.Override
public int getGenerativeQuestionConfigsCount() {
return generativeQuestionConfigs_.size();
}
/**
*
*
* <pre>
* Optional. The updates question configs.
* </pre>
*
* <code>
* repeated .google.cloud.retail.v2.GenerativeQuestionConfig generative_question_configs = 1 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
@java.lang.Override
public com.google.cloud.retail.v2.GenerativeQuestionConfig getGenerativeQuestionConfigs(
int index) {
return generativeQuestionConfigs_.get(index);
}
/**
*
*
* <pre>
* Optional. The updates question configs.
* </pre>
*
* <code>
* repeated .google.cloud.retail.v2.GenerativeQuestionConfig generative_question_configs = 1 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
@java.lang.Override
public com.google.cloud.retail.v2.GenerativeQuestionConfigOrBuilder
getGenerativeQuestionConfigsOrBuilder(int index) {
return generativeQuestionConfigs_.get(index);
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
for (int i = 0; i < generativeQuestionConfigs_.size(); i++) {
output.writeMessage(1, generativeQuestionConfigs_.get(i));
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
for (int i = 0; i < generativeQuestionConfigs_.size(); i++) {
size +=
com.google.protobuf.CodedOutputStream.computeMessageSize(
1, generativeQuestionConfigs_.get(i));
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.retail.v2.BatchUpdateGenerativeQuestionConfigsResponse)) {
return super.equals(obj);
}
com.google.cloud.retail.v2.BatchUpdateGenerativeQuestionConfigsResponse other =
(com.google.cloud.retail.v2.BatchUpdateGenerativeQuestionConfigsResponse) obj;
if (!getGenerativeQuestionConfigsList().equals(other.getGenerativeQuestionConfigsList()))
return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (getGenerativeQuestionConfigsCount() > 0) {
hash = (37 * hash) + GENERATIVE_QUESTION_CONFIGS_FIELD_NUMBER;
hash = (53 * hash) + getGenerativeQuestionConfigsList().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.retail.v2.BatchUpdateGenerativeQuestionConfigsResponse parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.retail.v2.BatchUpdateGenerativeQuestionConfigsResponse parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.retail.v2.BatchUpdateGenerativeQuestionConfigsResponse parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.retail.v2.BatchUpdateGenerativeQuestionConfigsResponse parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.retail.v2.BatchUpdateGenerativeQuestionConfigsResponse parseFrom(
byte[] data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.retail.v2.BatchUpdateGenerativeQuestionConfigsResponse parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.retail.v2.BatchUpdateGenerativeQuestionConfigsResponse parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.retail.v2.BatchUpdateGenerativeQuestionConfigsResponse parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.retail.v2.BatchUpdateGenerativeQuestionConfigsResponse
parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.retail.v2.BatchUpdateGenerativeQuestionConfigsResponse
parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.retail.v2.BatchUpdateGenerativeQuestionConfigsResponse parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.retail.v2.BatchUpdateGenerativeQuestionConfigsResponse parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.retail.v2.BatchUpdateGenerativeQuestionConfigsResponse prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Aggregated response for UpdateGenerativeQuestionConfig method.
* </pre>
*
* Protobuf type {@code google.cloud.retail.v2.BatchUpdateGenerativeQuestionConfigsResponse}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.retail.v2.BatchUpdateGenerativeQuestionConfigsResponse)
com.google.cloud.retail.v2.BatchUpdateGenerativeQuestionConfigsResponseOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.retail.v2.GenerativeQuestionServiceProto
.internal_static_google_cloud_retail_v2_BatchUpdateGenerativeQuestionConfigsResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.retail.v2.GenerativeQuestionServiceProto
.internal_static_google_cloud_retail_v2_BatchUpdateGenerativeQuestionConfigsResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.retail.v2.BatchUpdateGenerativeQuestionConfigsResponse.class,
com.google.cloud.retail.v2.BatchUpdateGenerativeQuestionConfigsResponse.Builder
.class);
}
// Construct using
// com.google.cloud.retail.v2.BatchUpdateGenerativeQuestionConfigsResponse.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
if (generativeQuestionConfigsBuilder_ == null) {
generativeQuestionConfigs_ = java.util.Collections.emptyList();
} else {
generativeQuestionConfigs_ = null;
generativeQuestionConfigsBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000001);
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.retail.v2.GenerativeQuestionServiceProto
.internal_static_google_cloud_retail_v2_BatchUpdateGenerativeQuestionConfigsResponse_descriptor;
}
@java.lang.Override
public com.google.cloud.retail.v2.BatchUpdateGenerativeQuestionConfigsResponse
getDefaultInstanceForType() {
return com.google.cloud.retail.v2.BatchUpdateGenerativeQuestionConfigsResponse
.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.retail.v2.BatchUpdateGenerativeQuestionConfigsResponse build() {
com.google.cloud.retail.v2.BatchUpdateGenerativeQuestionConfigsResponse result =
buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.retail.v2.BatchUpdateGenerativeQuestionConfigsResponse buildPartial() {
com.google.cloud.retail.v2.BatchUpdateGenerativeQuestionConfigsResponse result =
new com.google.cloud.retail.v2.BatchUpdateGenerativeQuestionConfigsResponse(this);
buildPartialRepeatedFields(result);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartialRepeatedFields(
com.google.cloud.retail.v2.BatchUpdateGenerativeQuestionConfigsResponse result) {
if (generativeQuestionConfigsBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0)) {
generativeQuestionConfigs_ =
java.util.Collections.unmodifiableList(generativeQuestionConfigs_);
bitField0_ = (bitField0_ & ~0x00000001);
}
result.generativeQuestionConfigs_ = generativeQuestionConfigs_;
} else {
result.generativeQuestionConfigs_ = generativeQuestionConfigsBuilder_.build();
}
}
private void buildPartial0(
com.google.cloud.retail.v2.BatchUpdateGenerativeQuestionConfigsResponse result) {
int from_bitField0_ = bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other
instanceof com.google.cloud.retail.v2.BatchUpdateGenerativeQuestionConfigsResponse) {
return mergeFrom(
(com.google.cloud.retail.v2.BatchUpdateGenerativeQuestionConfigsResponse) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(
com.google.cloud.retail.v2.BatchUpdateGenerativeQuestionConfigsResponse other) {
if (other
== com.google.cloud.retail.v2.BatchUpdateGenerativeQuestionConfigsResponse
.getDefaultInstance()) return this;
if (generativeQuestionConfigsBuilder_ == null) {
if (!other.generativeQuestionConfigs_.isEmpty()) {
if (generativeQuestionConfigs_.isEmpty()) {
generativeQuestionConfigs_ = other.generativeQuestionConfigs_;
bitField0_ = (bitField0_ & ~0x00000001);
} else {
ensureGenerativeQuestionConfigsIsMutable();
generativeQuestionConfigs_.addAll(other.generativeQuestionConfigs_);
}
onChanged();
}
} else {
if (!other.generativeQuestionConfigs_.isEmpty()) {
if (generativeQuestionConfigsBuilder_.isEmpty()) {
generativeQuestionConfigsBuilder_.dispose();
generativeQuestionConfigsBuilder_ = null;
generativeQuestionConfigs_ = other.generativeQuestionConfigs_;
bitField0_ = (bitField0_ & ~0x00000001);
generativeQuestionConfigsBuilder_ =
com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders
? getGenerativeQuestionConfigsFieldBuilder()
: null;
} else {
generativeQuestionConfigsBuilder_.addAllMessages(other.generativeQuestionConfigs_);
}
}
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
com.google.cloud.retail.v2.GenerativeQuestionConfig m =
input.readMessage(
com.google.cloud.retail.v2.GenerativeQuestionConfig.parser(),
extensionRegistry);
if (generativeQuestionConfigsBuilder_ == null) {
ensureGenerativeQuestionConfigsIsMutable();
generativeQuestionConfigs_.add(m);
} else {
generativeQuestionConfigsBuilder_.addMessage(m);
}
break;
} // case 10
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.util.List<com.google.cloud.retail.v2.GenerativeQuestionConfig>
generativeQuestionConfigs_ = java.util.Collections.emptyList();
private void ensureGenerativeQuestionConfigsIsMutable() {
if (!((bitField0_ & 0x00000001) != 0)) {
generativeQuestionConfigs_ =
new java.util.ArrayList<com.google.cloud.retail.v2.GenerativeQuestionConfig>(
generativeQuestionConfigs_);
bitField0_ |= 0x00000001;
}
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.retail.v2.GenerativeQuestionConfig,
com.google.cloud.retail.v2.GenerativeQuestionConfig.Builder,
com.google.cloud.retail.v2.GenerativeQuestionConfigOrBuilder>
generativeQuestionConfigsBuilder_;
/**
*
*
* <pre>
* Optional. The updates question configs.
* </pre>
*
* <code>
* repeated .google.cloud.retail.v2.GenerativeQuestionConfig generative_question_configs = 1 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
public java.util.List<com.google.cloud.retail.v2.GenerativeQuestionConfig>
getGenerativeQuestionConfigsList() {
if (generativeQuestionConfigsBuilder_ == null) {
return java.util.Collections.unmodifiableList(generativeQuestionConfigs_);
} else {
return generativeQuestionConfigsBuilder_.getMessageList();
}
}
/**
*
*
* <pre>
* Optional. The updates question configs.
* </pre>
*
* <code>
* repeated .google.cloud.retail.v2.GenerativeQuestionConfig generative_question_configs = 1 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
public int getGenerativeQuestionConfigsCount() {
if (generativeQuestionConfigsBuilder_ == null) {
return generativeQuestionConfigs_.size();
} else {
return generativeQuestionConfigsBuilder_.getCount();
}
}
/**
*
*
* <pre>
* Optional. The updates question configs.
* </pre>
*
* <code>
* repeated .google.cloud.retail.v2.GenerativeQuestionConfig generative_question_configs = 1 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
public com.google.cloud.retail.v2.GenerativeQuestionConfig getGenerativeQuestionConfigs(
int index) {
if (generativeQuestionConfigsBuilder_ == null) {
return generativeQuestionConfigs_.get(index);
} else {
return generativeQuestionConfigsBuilder_.getMessage(index);
}
}
/**
*
*
* <pre>
* Optional. The updates question configs.
* </pre>
*
* <code>
* repeated .google.cloud.retail.v2.GenerativeQuestionConfig generative_question_configs = 1 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
public Builder setGenerativeQuestionConfigs(
int index, com.google.cloud.retail.v2.GenerativeQuestionConfig value) {
if (generativeQuestionConfigsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureGenerativeQuestionConfigsIsMutable();
generativeQuestionConfigs_.set(index, value);
onChanged();
} else {
generativeQuestionConfigsBuilder_.setMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* Optional. The updates question configs.
* </pre>
*
* <code>
* repeated .google.cloud.retail.v2.GenerativeQuestionConfig generative_question_configs = 1 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
public Builder setGenerativeQuestionConfigs(
int index, com.google.cloud.retail.v2.GenerativeQuestionConfig.Builder builderForValue) {
if (generativeQuestionConfigsBuilder_ == null) {
ensureGenerativeQuestionConfigsIsMutable();
generativeQuestionConfigs_.set(index, builderForValue.build());
onChanged();
} else {
generativeQuestionConfigsBuilder_.setMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* Optional. The updates question configs.
* </pre>
*
* <code>
* repeated .google.cloud.retail.v2.GenerativeQuestionConfig generative_question_configs = 1 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
public Builder addGenerativeQuestionConfigs(
com.google.cloud.retail.v2.GenerativeQuestionConfig value) {
if (generativeQuestionConfigsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureGenerativeQuestionConfigsIsMutable();
generativeQuestionConfigs_.add(value);
onChanged();
} else {
generativeQuestionConfigsBuilder_.addMessage(value);
}
return this;
}
/**
*
*
* <pre>
* Optional. The updates question configs.
* </pre>
*
* <code>
* repeated .google.cloud.retail.v2.GenerativeQuestionConfig generative_question_configs = 1 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
public Builder addGenerativeQuestionConfigs(
int index, com.google.cloud.retail.v2.GenerativeQuestionConfig value) {
if (generativeQuestionConfigsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureGenerativeQuestionConfigsIsMutable();
generativeQuestionConfigs_.add(index, value);
onChanged();
} else {
generativeQuestionConfigsBuilder_.addMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* Optional. The updates question configs.
* </pre>
*
* <code>
* repeated .google.cloud.retail.v2.GenerativeQuestionConfig generative_question_configs = 1 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
public Builder addGenerativeQuestionConfigs(
com.google.cloud.retail.v2.GenerativeQuestionConfig.Builder builderForValue) {
if (generativeQuestionConfigsBuilder_ == null) {
ensureGenerativeQuestionConfigsIsMutable();
generativeQuestionConfigs_.add(builderForValue.build());
onChanged();
} else {
generativeQuestionConfigsBuilder_.addMessage(builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* Optional. The updates question configs.
* </pre>
*
* <code>
* repeated .google.cloud.retail.v2.GenerativeQuestionConfig generative_question_configs = 1 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
public Builder addGenerativeQuestionConfigs(
int index, com.google.cloud.retail.v2.GenerativeQuestionConfig.Builder builderForValue) {
if (generativeQuestionConfigsBuilder_ == null) {
ensureGenerativeQuestionConfigsIsMutable();
generativeQuestionConfigs_.add(index, builderForValue.build());
onChanged();
} else {
generativeQuestionConfigsBuilder_.addMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* Optional. The updates question configs.
* </pre>
*
* <code>
* repeated .google.cloud.retail.v2.GenerativeQuestionConfig generative_question_configs = 1 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
public Builder addAllGenerativeQuestionConfigs(
java.lang.Iterable<? extends com.google.cloud.retail.v2.GenerativeQuestionConfig> values) {
if (generativeQuestionConfigsBuilder_ == null) {
ensureGenerativeQuestionConfigsIsMutable();
com.google.protobuf.AbstractMessageLite.Builder.addAll(values, generativeQuestionConfigs_);
onChanged();
} else {
generativeQuestionConfigsBuilder_.addAllMessages(values);
}
return this;
}
/**
*
*
* <pre>
* Optional. The updates question configs.
* </pre>
*
* <code>
* repeated .google.cloud.retail.v2.GenerativeQuestionConfig generative_question_configs = 1 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
public Builder clearGenerativeQuestionConfigs() {
if (generativeQuestionConfigsBuilder_ == null) {
generativeQuestionConfigs_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
} else {
generativeQuestionConfigsBuilder_.clear();
}
return this;
}
/**
*
*
* <pre>
* Optional. The updates question configs.
* </pre>
*
* <code>
* repeated .google.cloud.retail.v2.GenerativeQuestionConfig generative_question_configs = 1 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
public Builder removeGenerativeQuestionConfigs(int index) {
if (generativeQuestionConfigsBuilder_ == null) {
ensureGenerativeQuestionConfigsIsMutable();
generativeQuestionConfigs_.remove(index);
onChanged();
} else {
generativeQuestionConfigsBuilder_.remove(index);
}
return this;
}
/**
*
*
* <pre>
* Optional. The updates question configs.
* </pre>
*
* <code>
* repeated .google.cloud.retail.v2.GenerativeQuestionConfig generative_question_configs = 1 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
public com.google.cloud.retail.v2.GenerativeQuestionConfig.Builder
getGenerativeQuestionConfigsBuilder(int index) {
return getGenerativeQuestionConfigsFieldBuilder().getBuilder(index);
}
/**
*
*
* <pre>
* Optional. The updates question configs.
* </pre>
*
* <code>
* repeated .google.cloud.retail.v2.GenerativeQuestionConfig generative_question_configs = 1 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
public com.google.cloud.retail.v2.GenerativeQuestionConfigOrBuilder
getGenerativeQuestionConfigsOrBuilder(int index) {
if (generativeQuestionConfigsBuilder_ == null) {
return generativeQuestionConfigs_.get(index);
} else {
return generativeQuestionConfigsBuilder_.getMessageOrBuilder(index);
}
}
/**
*
*
* <pre>
* Optional. The updates question configs.
* </pre>
*
* <code>
* repeated .google.cloud.retail.v2.GenerativeQuestionConfig generative_question_configs = 1 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
public java.util.List<? extends com.google.cloud.retail.v2.GenerativeQuestionConfigOrBuilder>
getGenerativeQuestionConfigsOrBuilderList() {
if (generativeQuestionConfigsBuilder_ != null) {
return generativeQuestionConfigsBuilder_.getMessageOrBuilderList();
} else {
return java.util.Collections.unmodifiableList(generativeQuestionConfigs_);
}
}
/**
*
*
* <pre>
* Optional. The updates question configs.
* </pre>
*
* <code>
* repeated .google.cloud.retail.v2.GenerativeQuestionConfig generative_question_configs = 1 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
public com.google.cloud.retail.v2.GenerativeQuestionConfig.Builder
addGenerativeQuestionConfigsBuilder() {
return getGenerativeQuestionConfigsFieldBuilder()
.addBuilder(com.google.cloud.retail.v2.GenerativeQuestionConfig.getDefaultInstance());
}
/**
*
*
* <pre>
* Optional. The updates question configs.
* </pre>
*
* <code>
* repeated .google.cloud.retail.v2.GenerativeQuestionConfig generative_question_configs = 1 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
public com.google.cloud.retail.v2.GenerativeQuestionConfig.Builder
addGenerativeQuestionConfigsBuilder(int index) {
return getGenerativeQuestionConfigsFieldBuilder()
.addBuilder(
index, com.google.cloud.retail.v2.GenerativeQuestionConfig.getDefaultInstance());
}
/**
*
*
* <pre>
* Optional. The updates question configs.
* </pre>
*
* <code>
* repeated .google.cloud.retail.v2.GenerativeQuestionConfig generative_question_configs = 1 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
public java.util.List<com.google.cloud.retail.v2.GenerativeQuestionConfig.Builder>
getGenerativeQuestionConfigsBuilderList() {
return getGenerativeQuestionConfigsFieldBuilder().getBuilderList();
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.retail.v2.GenerativeQuestionConfig,
com.google.cloud.retail.v2.GenerativeQuestionConfig.Builder,
com.google.cloud.retail.v2.GenerativeQuestionConfigOrBuilder>
getGenerativeQuestionConfigsFieldBuilder() {
if (generativeQuestionConfigsBuilder_ == null) {
generativeQuestionConfigsBuilder_ =
new com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.retail.v2.GenerativeQuestionConfig,
com.google.cloud.retail.v2.GenerativeQuestionConfig.Builder,
com.google.cloud.retail.v2.GenerativeQuestionConfigOrBuilder>(
generativeQuestionConfigs_,
((bitField0_ & 0x00000001) != 0),
getParentForChildren(),
isClean());
generativeQuestionConfigs_ = null;
}
return generativeQuestionConfigsBuilder_;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.retail.v2.BatchUpdateGenerativeQuestionConfigsResponse)
}
// @@protoc_insertion_point(class_scope:google.cloud.retail.v2.BatchUpdateGenerativeQuestionConfigsResponse)
private static final com.google.cloud.retail.v2.BatchUpdateGenerativeQuestionConfigsResponse
DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE =
new com.google.cloud.retail.v2.BatchUpdateGenerativeQuestionConfigsResponse();
}
public static com.google.cloud.retail.v2.BatchUpdateGenerativeQuestionConfigsResponse
getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<BatchUpdateGenerativeQuestionConfigsResponse>
PARSER =
new com.google.protobuf.AbstractParser<BatchUpdateGenerativeQuestionConfigsResponse>() {
@java.lang.Override
public BatchUpdateGenerativeQuestionConfigsResponse parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException()
.setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<BatchUpdateGenerativeQuestionConfigsResponse> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<BatchUpdateGenerativeQuestionConfigsResponse>
getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.retail.v2.BatchUpdateGenerativeQuestionConfigsResponse
getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleapis/google-cloud-java | 36,820 | java-data-fusion/proto-google-cloud-data-fusion-v1beta1/src/main/java/com/google/cloud/datafusion/v1beta1/ListDnsPeeringsResponse.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/datafusion/v1beta1/v1beta1.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.datafusion.v1beta1;
/**
*
*
* <pre>
* List dns peering response.
* </pre>
*
* Protobuf type {@code google.cloud.datafusion.v1beta1.ListDnsPeeringsResponse}
*/
public final class ListDnsPeeringsResponse extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.datafusion.v1beta1.ListDnsPeeringsResponse)
ListDnsPeeringsResponseOrBuilder {
private static final long serialVersionUID = 0L;
// Use ListDnsPeeringsResponse.newBuilder() to construct.
private ListDnsPeeringsResponse(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private ListDnsPeeringsResponse() {
dnsPeerings_ = java.util.Collections.emptyList();
nextPageToken_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new ListDnsPeeringsResponse();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.datafusion.v1beta1.V1Beta1
.internal_static_google_cloud_datafusion_v1beta1_ListDnsPeeringsResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.datafusion.v1beta1.V1Beta1
.internal_static_google_cloud_datafusion_v1beta1_ListDnsPeeringsResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.datafusion.v1beta1.ListDnsPeeringsResponse.class,
com.google.cloud.datafusion.v1beta1.ListDnsPeeringsResponse.Builder.class);
}
public static final int DNS_PEERINGS_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private java.util.List<com.google.cloud.datafusion.v1beta1.DnsPeering> dnsPeerings_;
/**
*
*
* <pre>
* List of dns peering configs.
* </pre>
*
* <code>repeated .google.cloud.datafusion.v1beta1.DnsPeering dns_peerings = 1;</code>
*/
@java.lang.Override
public java.util.List<com.google.cloud.datafusion.v1beta1.DnsPeering> getDnsPeeringsList() {
return dnsPeerings_;
}
/**
*
*
* <pre>
* List of dns peering configs.
* </pre>
*
* <code>repeated .google.cloud.datafusion.v1beta1.DnsPeering dns_peerings = 1;</code>
*/
@java.lang.Override
public java.util.List<? extends com.google.cloud.datafusion.v1beta1.DnsPeeringOrBuilder>
getDnsPeeringsOrBuilderList() {
return dnsPeerings_;
}
/**
*
*
* <pre>
* List of dns peering configs.
* </pre>
*
* <code>repeated .google.cloud.datafusion.v1beta1.DnsPeering dns_peerings = 1;</code>
*/
@java.lang.Override
public int getDnsPeeringsCount() {
return dnsPeerings_.size();
}
/**
*
*
* <pre>
* List of dns peering configs.
* </pre>
*
* <code>repeated .google.cloud.datafusion.v1beta1.DnsPeering dns_peerings = 1;</code>
*/
@java.lang.Override
public com.google.cloud.datafusion.v1beta1.DnsPeering getDnsPeerings(int index) {
return dnsPeerings_.get(index);
}
/**
*
*
* <pre>
* List of dns peering configs.
* </pre>
*
* <code>repeated .google.cloud.datafusion.v1beta1.DnsPeering dns_peerings = 1;</code>
*/
@java.lang.Override
public com.google.cloud.datafusion.v1beta1.DnsPeeringOrBuilder getDnsPeeringsOrBuilder(
int index) {
return dnsPeerings_.get(index);
}
public static final int NEXT_PAGE_TOKEN_FIELD_NUMBER = 2;
@SuppressWarnings("serial")
private volatile java.lang.Object nextPageToken_ = "";
/**
*
*
* <pre>
* Token to retrieve the next page of results or empty if there are no more
* results in the list.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The nextPageToken.
*/
@java.lang.Override
public java.lang.String getNextPageToken() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
nextPageToken_ = s;
return s;
}
}
/**
*
*
* <pre>
* Token to retrieve the next page of results or empty if there are no more
* results in the list.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The bytes for nextPageToken.
*/
@java.lang.Override
public com.google.protobuf.ByteString getNextPageTokenBytes() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
nextPageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
for (int i = 0; i < dnsPeerings_.size(); i++) {
output.writeMessage(1, dnsPeerings_.get(i));
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 2, nextPageToken_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
for (int i = 0; i < dnsPeerings_.size(); i++) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, dnsPeerings_.get(i));
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, nextPageToken_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.datafusion.v1beta1.ListDnsPeeringsResponse)) {
return super.equals(obj);
}
com.google.cloud.datafusion.v1beta1.ListDnsPeeringsResponse other =
(com.google.cloud.datafusion.v1beta1.ListDnsPeeringsResponse) obj;
if (!getDnsPeeringsList().equals(other.getDnsPeeringsList())) return false;
if (!getNextPageToken().equals(other.getNextPageToken())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (getDnsPeeringsCount() > 0) {
hash = (37 * hash) + DNS_PEERINGS_FIELD_NUMBER;
hash = (53 * hash) + getDnsPeeringsList().hashCode();
}
hash = (37 * hash) + NEXT_PAGE_TOKEN_FIELD_NUMBER;
hash = (53 * hash) + getNextPageToken().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.datafusion.v1beta1.ListDnsPeeringsResponse parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.datafusion.v1beta1.ListDnsPeeringsResponse parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.datafusion.v1beta1.ListDnsPeeringsResponse parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.datafusion.v1beta1.ListDnsPeeringsResponse parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.datafusion.v1beta1.ListDnsPeeringsResponse parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.datafusion.v1beta1.ListDnsPeeringsResponse parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.datafusion.v1beta1.ListDnsPeeringsResponse parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.datafusion.v1beta1.ListDnsPeeringsResponse parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.datafusion.v1beta1.ListDnsPeeringsResponse parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.datafusion.v1beta1.ListDnsPeeringsResponse parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.datafusion.v1beta1.ListDnsPeeringsResponse parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.datafusion.v1beta1.ListDnsPeeringsResponse parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.datafusion.v1beta1.ListDnsPeeringsResponse prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* List dns peering response.
* </pre>
*
* Protobuf type {@code google.cloud.datafusion.v1beta1.ListDnsPeeringsResponse}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.datafusion.v1beta1.ListDnsPeeringsResponse)
com.google.cloud.datafusion.v1beta1.ListDnsPeeringsResponseOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.datafusion.v1beta1.V1Beta1
.internal_static_google_cloud_datafusion_v1beta1_ListDnsPeeringsResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.datafusion.v1beta1.V1Beta1
.internal_static_google_cloud_datafusion_v1beta1_ListDnsPeeringsResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.datafusion.v1beta1.ListDnsPeeringsResponse.class,
com.google.cloud.datafusion.v1beta1.ListDnsPeeringsResponse.Builder.class);
}
// Construct using com.google.cloud.datafusion.v1beta1.ListDnsPeeringsResponse.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
if (dnsPeeringsBuilder_ == null) {
dnsPeerings_ = java.util.Collections.emptyList();
} else {
dnsPeerings_ = null;
dnsPeeringsBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000001);
nextPageToken_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.datafusion.v1beta1.V1Beta1
.internal_static_google_cloud_datafusion_v1beta1_ListDnsPeeringsResponse_descriptor;
}
@java.lang.Override
public com.google.cloud.datafusion.v1beta1.ListDnsPeeringsResponse getDefaultInstanceForType() {
return com.google.cloud.datafusion.v1beta1.ListDnsPeeringsResponse.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.datafusion.v1beta1.ListDnsPeeringsResponse build() {
com.google.cloud.datafusion.v1beta1.ListDnsPeeringsResponse result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.datafusion.v1beta1.ListDnsPeeringsResponse buildPartial() {
com.google.cloud.datafusion.v1beta1.ListDnsPeeringsResponse result =
new com.google.cloud.datafusion.v1beta1.ListDnsPeeringsResponse(this);
buildPartialRepeatedFields(result);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartialRepeatedFields(
com.google.cloud.datafusion.v1beta1.ListDnsPeeringsResponse result) {
if (dnsPeeringsBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0)) {
dnsPeerings_ = java.util.Collections.unmodifiableList(dnsPeerings_);
bitField0_ = (bitField0_ & ~0x00000001);
}
result.dnsPeerings_ = dnsPeerings_;
} else {
result.dnsPeerings_ = dnsPeeringsBuilder_.build();
}
}
private void buildPartial0(com.google.cloud.datafusion.v1beta1.ListDnsPeeringsResponse result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000002) != 0)) {
result.nextPageToken_ = nextPageToken_;
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.datafusion.v1beta1.ListDnsPeeringsResponse) {
return mergeFrom((com.google.cloud.datafusion.v1beta1.ListDnsPeeringsResponse) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.datafusion.v1beta1.ListDnsPeeringsResponse other) {
if (other == com.google.cloud.datafusion.v1beta1.ListDnsPeeringsResponse.getDefaultInstance())
return this;
if (dnsPeeringsBuilder_ == null) {
if (!other.dnsPeerings_.isEmpty()) {
if (dnsPeerings_.isEmpty()) {
dnsPeerings_ = other.dnsPeerings_;
bitField0_ = (bitField0_ & ~0x00000001);
} else {
ensureDnsPeeringsIsMutable();
dnsPeerings_.addAll(other.dnsPeerings_);
}
onChanged();
}
} else {
if (!other.dnsPeerings_.isEmpty()) {
if (dnsPeeringsBuilder_.isEmpty()) {
dnsPeeringsBuilder_.dispose();
dnsPeeringsBuilder_ = null;
dnsPeerings_ = other.dnsPeerings_;
bitField0_ = (bitField0_ & ~0x00000001);
dnsPeeringsBuilder_ =
com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders
? getDnsPeeringsFieldBuilder()
: null;
} else {
dnsPeeringsBuilder_.addAllMessages(other.dnsPeerings_);
}
}
}
if (!other.getNextPageToken().isEmpty()) {
nextPageToken_ = other.nextPageToken_;
bitField0_ |= 0x00000002;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
com.google.cloud.datafusion.v1beta1.DnsPeering m =
input.readMessage(
com.google.cloud.datafusion.v1beta1.DnsPeering.parser(), extensionRegistry);
if (dnsPeeringsBuilder_ == null) {
ensureDnsPeeringsIsMutable();
dnsPeerings_.add(m);
} else {
dnsPeeringsBuilder_.addMessage(m);
}
break;
} // case 10
case 18:
{
nextPageToken_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000002;
break;
} // case 18
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.util.List<com.google.cloud.datafusion.v1beta1.DnsPeering> dnsPeerings_ =
java.util.Collections.emptyList();
private void ensureDnsPeeringsIsMutable() {
if (!((bitField0_ & 0x00000001) != 0)) {
dnsPeerings_ =
new java.util.ArrayList<com.google.cloud.datafusion.v1beta1.DnsPeering>(dnsPeerings_);
bitField0_ |= 0x00000001;
}
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.datafusion.v1beta1.DnsPeering,
com.google.cloud.datafusion.v1beta1.DnsPeering.Builder,
com.google.cloud.datafusion.v1beta1.DnsPeeringOrBuilder>
dnsPeeringsBuilder_;
/**
*
*
* <pre>
* List of dns peering configs.
* </pre>
*
* <code>repeated .google.cloud.datafusion.v1beta1.DnsPeering dns_peerings = 1;</code>
*/
public java.util.List<com.google.cloud.datafusion.v1beta1.DnsPeering> getDnsPeeringsList() {
if (dnsPeeringsBuilder_ == null) {
return java.util.Collections.unmodifiableList(dnsPeerings_);
} else {
return dnsPeeringsBuilder_.getMessageList();
}
}
/**
*
*
* <pre>
* List of dns peering configs.
* </pre>
*
* <code>repeated .google.cloud.datafusion.v1beta1.DnsPeering dns_peerings = 1;</code>
*/
public int getDnsPeeringsCount() {
if (dnsPeeringsBuilder_ == null) {
return dnsPeerings_.size();
} else {
return dnsPeeringsBuilder_.getCount();
}
}
/**
*
*
* <pre>
* List of dns peering configs.
* </pre>
*
* <code>repeated .google.cloud.datafusion.v1beta1.DnsPeering dns_peerings = 1;</code>
*/
public com.google.cloud.datafusion.v1beta1.DnsPeering getDnsPeerings(int index) {
if (dnsPeeringsBuilder_ == null) {
return dnsPeerings_.get(index);
} else {
return dnsPeeringsBuilder_.getMessage(index);
}
}
/**
*
*
* <pre>
* List of dns peering configs.
* </pre>
*
* <code>repeated .google.cloud.datafusion.v1beta1.DnsPeering dns_peerings = 1;</code>
*/
public Builder setDnsPeerings(int index, com.google.cloud.datafusion.v1beta1.DnsPeering value) {
if (dnsPeeringsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureDnsPeeringsIsMutable();
dnsPeerings_.set(index, value);
onChanged();
} else {
dnsPeeringsBuilder_.setMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* List of dns peering configs.
* </pre>
*
* <code>repeated .google.cloud.datafusion.v1beta1.DnsPeering dns_peerings = 1;</code>
*/
public Builder setDnsPeerings(
int index, com.google.cloud.datafusion.v1beta1.DnsPeering.Builder builderForValue) {
if (dnsPeeringsBuilder_ == null) {
ensureDnsPeeringsIsMutable();
dnsPeerings_.set(index, builderForValue.build());
onChanged();
} else {
dnsPeeringsBuilder_.setMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* List of dns peering configs.
* </pre>
*
* <code>repeated .google.cloud.datafusion.v1beta1.DnsPeering dns_peerings = 1;</code>
*/
public Builder addDnsPeerings(com.google.cloud.datafusion.v1beta1.DnsPeering value) {
if (dnsPeeringsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureDnsPeeringsIsMutable();
dnsPeerings_.add(value);
onChanged();
} else {
dnsPeeringsBuilder_.addMessage(value);
}
return this;
}
/**
*
*
* <pre>
* List of dns peering configs.
* </pre>
*
* <code>repeated .google.cloud.datafusion.v1beta1.DnsPeering dns_peerings = 1;</code>
*/
public Builder addDnsPeerings(int index, com.google.cloud.datafusion.v1beta1.DnsPeering value) {
if (dnsPeeringsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureDnsPeeringsIsMutable();
dnsPeerings_.add(index, value);
onChanged();
} else {
dnsPeeringsBuilder_.addMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* List of dns peering configs.
* </pre>
*
* <code>repeated .google.cloud.datafusion.v1beta1.DnsPeering dns_peerings = 1;</code>
*/
public Builder addDnsPeerings(
com.google.cloud.datafusion.v1beta1.DnsPeering.Builder builderForValue) {
if (dnsPeeringsBuilder_ == null) {
ensureDnsPeeringsIsMutable();
dnsPeerings_.add(builderForValue.build());
onChanged();
} else {
dnsPeeringsBuilder_.addMessage(builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* List of dns peering configs.
* </pre>
*
* <code>repeated .google.cloud.datafusion.v1beta1.DnsPeering dns_peerings = 1;</code>
*/
public Builder addDnsPeerings(
int index, com.google.cloud.datafusion.v1beta1.DnsPeering.Builder builderForValue) {
if (dnsPeeringsBuilder_ == null) {
ensureDnsPeeringsIsMutable();
dnsPeerings_.add(index, builderForValue.build());
onChanged();
} else {
dnsPeeringsBuilder_.addMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* List of dns peering configs.
* </pre>
*
* <code>repeated .google.cloud.datafusion.v1beta1.DnsPeering dns_peerings = 1;</code>
*/
public Builder addAllDnsPeerings(
java.lang.Iterable<? extends com.google.cloud.datafusion.v1beta1.DnsPeering> values) {
if (dnsPeeringsBuilder_ == null) {
ensureDnsPeeringsIsMutable();
com.google.protobuf.AbstractMessageLite.Builder.addAll(values, dnsPeerings_);
onChanged();
} else {
dnsPeeringsBuilder_.addAllMessages(values);
}
return this;
}
/**
*
*
* <pre>
* List of dns peering configs.
* </pre>
*
* <code>repeated .google.cloud.datafusion.v1beta1.DnsPeering dns_peerings = 1;</code>
*/
public Builder clearDnsPeerings() {
if (dnsPeeringsBuilder_ == null) {
dnsPeerings_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
} else {
dnsPeeringsBuilder_.clear();
}
return this;
}
/**
*
*
* <pre>
* List of dns peering configs.
* </pre>
*
* <code>repeated .google.cloud.datafusion.v1beta1.DnsPeering dns_peerings = 1;</code>
*/
public Builder removeDnsPeerings(int index) {
if (dnsPeeringsBuilder_ == null) {
ensureDnsPeeringsIsMutable();
dnsPeerings_.remove(index);
onChanged();
} else {
dnsPeeringsBuilder_.remove(index);
}
return this;
}
/**
*
*
* <pre>
* List of dns peering configs.
* </pre>
*
* <code>repeated .google.cloud.datafusion.v1beta1.DnsPeering dns_peerings = 1;</code>
*/
public com.google.cloud.datafusion.v1beta1.DnsPeering.Builder getDnsPeeringsBuilder(int index) {
return getDnsPeeringsFieldBuilder().getBuilder(index);
}
/**
*
*
* <pre>
* List of dns peering configs.
* </pre>
*
* <code>repeated .google.cloud.datafusion.v1beta1.DnsPeering dns_peerings = 1;</code>
*/
public com.google.cloud.datafusion.v1beta1.DnsPeeringOrBuilder getDnsPeeringsOrBuilder(
int index) {
if (dnsPeeringsBuilder_ == null) {
return dnsPeerings_.get(index);
} else {
return dnsPeeringsBuilder_.getMessageOrBuilder(index);
}
}
/**
*
*
* <pre>
* List of dns peering configs.
* </pre>
*
* <code>repeated .google.cloud.datafusion.v1beta1.DnsPeering dns_peerings = 1;</code>
*/
public java.util.List<? extends com.google.cloud.datafusion.v1beta1.DnsPeeringOrBuilder>
getDnsPeeringsOrBuilderList() {
if (dnsPeeringsBuilder_ != null) {
return dnsPeeringsBuilder_.getMessageOrBuilderList();
} else {
return java.util.Collections.unmodifiableList(dnsPeerings_);
}
}
/**
*
*
* <pre>
* List of dns peering configs.
* </pre>
*
* <code>repeated .google.cloud.datafusion.v1beta1.DnsPeering dns_peerings = 1;</code>
*/
public com.google.cloud.datafusion.v1beta1.DnsPeering.Builder addDnsPeeringsBuilder() {
return getDnsPeeringsFieldBuilder()
.addBuilder(com.google.cloud.datafusion.v1beta1.DnsPeering.getDefaultInstance());
}
/**
*
*
* <pre>
* List of dns peering configs.
* </pre>
*
* <code>repeated .google.cloud.datafusion.v1beta1.DnsPeering dns_peerings = 1;</code>
*/
public com.google.cloud.datafusion.v1beta1.DnsPeering.Builder addDnsPeeringsBuilder(int index) {
return getDnsPeeringsFieldBuilder()
.addBuilder(index, com.google.cloud.datafusion.v1beta1.DnsPeering.getDefaultInstance());
}
/**
*
*
* <pre>
* List of dns peering configs.
* </pre>
*
* <code>repeated .google.cloud.datafusion.v1beta1.DnsPeering dns_peerings = 1;</code>
*/
public java.util.List<com.google.cloud.datafusion.v1beta1.DnsPeering.Builder>
getDnsPeeringsBuilderList() {
return getDnsPeeringsFieldBuilder().getBuilderList();
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.datafusion.v1beta1.DnsPeering,
com.google.cloud.datafusion.v1beta1.DnsPeering.Builder,
com.google.cloud.datafusion.v1beta1.DnsPeeringOrBuilder>
getDnsPeeringsFieldBuilder() {
if (dnsPeeringsBuilder_ == null) {
dnsPeeringsBuilder_ =
new com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.datafusion.v1beta1.DnsPeering,
com.google.cloud.datafusion.v1beta1.DnsPeering.Builder,
com.google.cloud.datafusion.v1beta1.DnsPeeringOrBuilder>(
dnsPeerings_, ((bitField0_ & 0x00000001) != 0), getParentForChildren(), isClean());
dnsPeerings_ = null;
}
return dnsPeeringsBuilder_;
}
private java.lang.Object nextPageToken_ = "";
/**
*
*
* <pre>
* Token to retrieve the next page of results or empty if there are no more
* results in the list.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The nextPageToken.
*/
public java.lang.String getNextPageToken() {
java.lang.Object ref = nextPageToken_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
nextPageToken_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Token to retrieve the next page of results or empty if there are no more
* results in the list.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The bytes for nextPageToken.
*/
public com.google.protobuf.ByteString getNextPageTokenBytes() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
nextPageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Token to retrieve the next page of results or empty if there are no more
* results in the list.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @param value The nextPageToken to set.
* @return This builder for chaining.
*/
public Builder setNextPageToken(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
nextPageToken_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Token to retrieve the next page of results or empty if there are no more
* results in the list.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return This builder for chaining.
*/
public Builder clearNextPageToken() {
nextPageToken_ = getDefaultInstance().getNextPageToken();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
return this;
}
/**
*
*
* <pre>
* Token to retrieve the next page of results or empty if there are no more
* results in the list.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @param value The bytes for nextPageToken to set.
* @return This builder for chaining.
*/
public Builder setNextPageTokenBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
nextPageToken_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.datafusion.v1beta1.ListDnsPeeringsResponse)
}
// @@protoc_insertion_point(class_scope:google.cloud.datafusion.v1beta1.ListDnsPeeringsResponse)
private static final com.google.cloud.datafusion.v1beta1.ListDnsPeeringsResponse DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.datafusion.v1beta1.ListDnsPeeringsResponse();
}
public static com.google.cloud.datafusion.v1beta1.ListDnsPeeringsResponse getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<ListDnsPeeringsResponse> PARSER =
new com.google.protobuf.AbstractParser<ListDnsPeeringsResponse>() {
@java.lang.Override
public ListDnsPeeringsResponse parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<ListDnsPeeringsResponse> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<ListDnsPeeringsResponse> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.datafusion.v1beta1.ListDnsPeeringsResponse getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleapis/google-cloud-java | 36,834 | java-translate/proto-google-cloud-translate-v3/src/main/java/com/google/cloud/translate/v3/ListGlossariesResponse.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/translate/v3/translation_service.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.translate.v3;
/**
*
*
* <pre>
* Response message for ListGlossaries.
* </pre>
*
* Protobuf type {@code google.cloud.translation.v3.ListGlossariesResponse}
*/
public final class ListGlossariesResponse extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.translation.v3.ListGlossariesResponse)
ListGlossariesResponseOrBuilder {
private static final long serialVersionUID = 0L;
// Use ListGlossariesResponse.newBuilder() to construct.
private ListGlossariesResponse(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private ListGlossariesResponse() {
glossaries_ = java.util.Collections.emptyList();
nextPageToken_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new ListGlossariesResponse();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.translate.v3.TranslationServiceProto
.internal_static_google_cloud_translation_v3_ListGlossariesResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.translate.v3.TranslationServiceProto
.internal_static_google_cloud_translation_v3_ListGlossariesResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.translate.v3.ListGlossariesResponse.class,
com.google.cloud.translate.v3.ListGlossariesResponse.Builder.class);
}
public static final int GLOSSARIES_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private java.util.List<com.google.cloud.translate.v3.Glossary> glossaries_;
/**
*
*
* <pre>
* The list of glossaries for a project.
* </pre>
*
* <code>repeated .google.cloud.translation.v3.Glossary glossaries = 1;</code>
*/
@java.lang.Override
public java.util.List<com.google.cloud.translate.v3.Glossary> getGlossariesList() {
return glossaries_;
}
/**
*
*
* <pre>
* The list of glossaries for a project.
* </pre>
*
* <code>repeated .google.cloud.translation.v3.Glossary glossaries = 1;</code>
*/
@java.lang.Override
public java.util.List<? extends com.google.cloud.translate.v3.GlossaryOrBuilder>
getGlossariesOrBuilderList() {
return glossaries_;
}
/**
*
*
* <pre>
* The list of glossaries for a project.
* </pre>
*
* <code>repeated .google.cloud.translation.v3.Glossary glossaries = 1;</code>
*/
@java.lang.Override
public int getGlossariesCount() {
return glossaries_.size();
}
/**
*
*
* <pre>
* The list of glossaries for a project.
* </pre>
*
* <code>repeated .google.cloud.translation.v3.Glossary glossaries = 1;</code>
*/
@java.lang.Override
public com.google.cloud.translate.v3.Glossary getGlossaries(int index) {
return glossaries_.get(index);
}
/**
*
*
* <pre>
* The list of glossaries for a project.
* </pre>
*
* <code>repeated .google.cloud.translation.v3.Glossary glossaries = 1;</code>
*/
@java.lang.Override
public com.google.cloud.translate.v3.GlossaryOrBuilder getGlossariesOrBuilder(int index) {
return glossaries_.get(index);
}
public static final int NEXT_PAGE_TOKEN_FIELD_NUMBER = 2;
@SuppressWarnings("serial")
private volatile java.lang.Object nextPageToken_ = "";
/**
*
*
* <pre>
* A token to retrieve a page of results. Pass this value in the
* [ListGlossariesRequest.page_token] field in the subsequent call to
* `ListGlossaries` method to retrieve the next page of results.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The nextPageToken.
*/
@java.lang.Override
public java.lang.String getNextPageToken() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
nextPageToken_ = s;
return s;
}
}
/**
*
*
* <pre>
* A token to retrieve a page of results. Pass this value in the
* [ListGlossariesRequest.page_token] field in the subsequent call to
* `ListGlossaries` method to retrieve the next page of results.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The bytes for nextPageToken.
*/
@java.lang.Override
public com.google.protobuf.ByteString getNextPageTokenBytes() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
nextPageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
for (int i = 0; i < glossaries_.size(); i++) {
output.writeMessage(1, glossaries_.get(i));
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 2, nextPageToken_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
for (int i = 0; i < glossaries_.size(); i++) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, glossaries_.get(i));
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, nextPageToken_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.translate.v3.ListGlossariesResponse)) {
return super.equals(obj);
}
com.google.cloud.translate.v3.ListGlossariesResponse other =
(com.google.cloud.translate.v3.ListGlossariesResponse) obj;
if (!getGlossariesList().equals(other.getGlossariesList())) return false;
if (!getNextPageToken().equals(other.getNextPageToken())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (getGlossariesCount() > 0) {
hash = (37 * hash) + GLOSSARIES_FIELD_NUMBER;
hash = (53 * hash) + getGlossariesList().hashCode();
}
hash = (37 * hash) + NEXT_PAGE_TOKEN_FIELD_NUMBER;
hash = (53 * hash) + getNextPageToken().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.translate.v3.ListGlossariesResponse parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.translate.v3.ListGlossariesResponse parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.translate.v3.ListGlossariesResponse parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.translate.v3.ListGlossariesResponse parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.translate.v3.ListGlossariesResponse parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.translate.v3.ListGlossariesResponse parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.translate.v3.ListGlossariesResponse parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.translate.v3.ListGlossariesResponse parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.translate.v3.ListGlossariesResponse parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.translate.v3.ListGlossariesResponse parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.translate.v3.ListGlossariesResponse parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.translate.v3.ListGlossariesResponse parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.cloud.translate.v3.ListGlossariesResponse prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Response message for ListGlossaries.
* </pre>
*
* Protobuf type {@code google.cloud.translation.v3.ListGlossariesResponse}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.translation.v3.ListGlossariesResponse)
com.google.cloud.translate.v3.ListGlossariesResponseOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.translate.v3.TranslationServiceProto
.internal_static_google_cloud_translation_v3_ListGlossariesResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.translate.v3.TranslationServiceProto
.internal_static_google_cloud_translation_v3_ListGlossariesResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.translate.v3.ListGlossariesResponse.class,
com.google.cloud.translate.v3.ListGlossariesResponse.Builder.class);
}
// Construct using com.google.cloud.translate.v3.ListGlossariesResponse.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
if (glossariesBuilder_ == null) {
glossaries_ = java.util.Collections.emptyList();
} else {
glossaries_ = null;
glossariesBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000001);
nextPageToken_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.translate.v3.TranslationServiceProto
.internal_static_google_cloud_translation_v3_ListGlossariesResponse_descriptor;
}
@java.lang.Override
public com.google.cloud.translate.v3.ListGlossariesResponse getDefaultInstanceForType() {
return com.google.cloud.translate.v3.ListGlossariesResponse.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.translate.v3.ListGlossariesResponse build() {
com.google.cloud.translate.v3.ListGlossariesResponse result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.translate.v3.ListGlossariesResponse buildPartial() {
com.google.cloud.translate.v3.ListGlossariesResponse result =
new com.google.cloud.translate.v3.ListGlossariesResponse(this);
buildPartialRepeatedFields(result);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartialRepeatedFields(
com.google.cloud.translate.v3.ListGlossariesResponse result) {
if (glossariesBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0)) {
glossaries_ = java.util.Collections.unmodifiableList(glossaries_);
bitField0_ = (bitField0_ & ~0x00000001);
}
result.glossaries_ = glossaries_;
} else {
result.glossaries_ = glossariesBuilder_.build();
}
}
private void buildPartial0(com.google.cloud.translate.v3.ListGlossariesResponse result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000002) != 0)) {
result.nextPageToken_ = nextPageToken_;
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.translate.v3.ListGlossariesResponse) {
return mergeFrom((com.google.cloud.translate.v3.ListGlossariesResponse) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.translate.v3.ListGlossariesResponse other) {
if (other == com.google.cloud.translate.v3.ListGlossariesResponse.getDefaultInstance())
return this;
if (glossariesBuilder_ == null) {
if (!other.glossaries_.isEmpty()) {
if (glossaries_.isEmpty()) {
glossaries_ = other.glossaries_;
bitField0_ = (bitField0_ & ~0x00000001);
} else {
ensureGlossariesIsMutable();
glossaries_.addAll(other.glossaries_);
}
onChanged();
}
} else {
if (!other.glossaries_.isEmpty()) {
if (glossariesBuilder_.isEmpty()) {
glossariesBuilder_.dispose();
glossariesBuilder_ = null;
glossaries_ = other.glossaries_;
bitField0_ = (bitField0_ & ~0x00000001);
glossariesBuilder_ =
com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders
? getGlossariesFieldBuilder()
: null;
} else {
glossariesBuilder_.addAllMessages(other.glossaries_);
}
}
}
if (!other.getNextPageToken().isEmpty()) {
nextPageToken_ = other.nextPageToken_;
bitField0_ |= 0x00000002;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
com.google.cloud.translate.v3.Glossary m =
input.readMessage(
com.google.cloud.translate.v3.Glossary.parser(), extensionRegistry);
if (glossariesBuilder_ == null) {
ensureGlossariesIsMutable();
glossaries_.add(m);
} else {
glossariesBuilder_.addMessage(m);
}
break;
} // case 10
case 18:
{
nextPageToken_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000002;
break;
} // case 18
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.util.List<com.google.cloud.translate.v3.Glossary> glossaries_ =
java.util.Collections.emptyList();
private void ensureGlossariesIsMutable() {
if (!((bitField0_ & 0x00000001) != 0)) {
glossaries_ = new java.util.ArrayList<com.google.cloud.translate.v3.Glossary>(glossaries_);
bitField0_ |= 0x00000001;
}
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.translate.v3.Glossary,
com.google.cloud.translate.v3.Glossary.Builder,
com.google.cloud.translate.v3.GlossaryOrBuilder>
glossariesBuilder_;
/**
*
*
* <pre>
* The list of glossaries for a project.
* </pre>
*
* <code>repeated .google.cloud.translation.v3.Glossary glossaries = 1;</code>
*/
public java.util.List<com.google.cloud.translate.v3.Glossary> getGlossariesList() {
if (glossariesBuilder_ == null) {
return java.util.Collections.unmodifiableList(glossaries_);
} else {
return glossariesBuilder_.getMessageList();
}
}
/**
*
*
* <pre>
* The list of glossaries for a project.
* </pre>
*
* <code>repeated .google.cloud.translation.v3.Glossary glossaries = 1;</code>
*/
public int getGlossariesCount() {
if (glossariesBuilder_ == null) {
return glossaries_.size();
} else {
return glossariesBuilder_.getCount();
}
}
/**
*
*
* <pre>
* The list of glossaries for a project.
* </pre>
*
* <code>repeated .google.cloud.translation.v3.Glossary glossaries = 1;</code>
*/
public com.google.cloud.translate.v3.Glossary getGlossaries(int index) {
if (glossariesBuilder_ == null) {
return glossaries_.get(index);
} else {
return glossariesBuilder_.getMessage(index);
}
}
/**
*
*
* <pre>
* The list of glossaries for a project.
* </pre>
*
* <code>repeated .google.cloud.translation.v3.Glossary glossaries = 1;</code>
*/
public Builder setGlossaries(int index, com.google.cloud.translate.v3.Glossary value) {
if (glossariesBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureGlossariesIsMutable();
glossaries_.set(index, value);
onChanged();
} else {
glossariesBuilder_.setMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* The list of glossaries for a project.
* </pre>
*
* <code>repeated .google.cloud.translation.v3.Glossary glossaries = 1;</code>
*/
public Builder setGlossaries(
int index, com.google.cloud.translate.v3.Glossary.Builder builderForValue) {
if (glossariesBuilder_ == null) {
ensureGlossariesIsMutable();
glossaries_.set(index, builderForValue.build());
onChanged();
} else {
glossariesBuilder_.setMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* The list of glossaries for a project.
* </pre>
*
* <code>repeated .google.cloud.translation.v3.Glossary glossaries = 1;</code>
*/
public Builder addGlossaries(com.google.cloud.translate.v3.Glossary value) {
if (glossariesBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureGlossariesIsMutable();
glossaries_.add(value);
onChanged();
} else {
glossariesBuilder_.addMessage(value);
}
return this;
}
/**
*
*
* <pre>
* The list of glossaries for a project.
* </pre>
*
* <code>repeated .google.cloud.translation.v3.Glossary glossaries = 1;</code>
*/
public Builder addGlossaries(int index, com.google.cloud.translate.v3.Glossary value) {
if (glossariesBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureGlossariesIsMutable();
glossaries_.add(index, value);
onChanged();
} else {
glossariesBuilder_.addMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* The list of glossaries for a project.
* </pre>
*
* <code>repeated .google.cloud.translation.v3.Glossary glossaries = 1;</code>
*/
public Builder addGlossaries(com.google.cloud.translate.v3.Glossary.Builder builderForValue) {
if (glossariesBuilder_ == null) {
ensureGlossariesIsMutable();
glossaries_.add(builderForValue.build());
onChanged();
} else {
glossariesBuilder_.addMessage(builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* The list of glossaries for a project.
* </pre>
*
* <code>repeated .google.cloud.translation.v3.Glossary glossaries = 1;</code>
*/
public Builder addGlossaries(
int index, com.google.cloud.translate.v3.Glossary.Builder builderForValue) {
if (glossariesBuilder_ == null) {
ensureGlossariesIsMutable();
glossaries_.add(index, builderForValue.build());
onChanged();
} else {
glossariesBuilder_.addMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* The list of glossaries for a project.
* </pre>
*
* <code>repeated .google.cloud.translation.v3.Glossary glossaries = 1;</code>
*/
public Builder addAllGlossaries(
java.lang.Iterable<? extends com.google.cloud.translate.v3.Glossary> values) {
if (glossariesBuilder_ == null) {
ensureGlossariesIsMutable();
com.google.protobuf.AbstractMessageLite.Builder.addAll(values, glossaries_);
onChanged();
} else {
glossariesBuilder_.addAllMessages(values);
}
return this;
}
/**
*
*
* <pre>
* The list of glossaries for a project.
* </pre>
*
* <code>repeated .google.cloud.translation.v3.Glossary glossaries = 1;</code>
*/
public Builder clearGlossaries() {
if (glossariesBuilder_ == null) {
glossaries_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
} else {
glossariesBuilder_.clear();
}
return this;
}
/**
*
*
* <pre>
* The list of glossaries for a project.
* </pre>
*
* <code>repeated .google.cloud.translation.v3.Glossary glossaries = 1;</code>
*/
public Builder removeGlossaries(int index) {
if (glossariesBuilder_ == null) {
ensureGlossariesIsMutable();
glossaries_.remove(index);
onChanged();
} else {
glossariesBuilder_.remove(index);
}
return this;
}
/**
*
*
* <pre>
* The list of glossaries for a project.
* </pre>
*
* <code>repeated .google.cloud.translation.v3.Glossary glossaries = 1;</code>
*/
public com.google.cloud.translate.v3.Glossary.Builder getGlossariesBuilder(int index) {
return getGlossariesFieldBuilder().getBuilder(index);
}
/**
*
*
* <pre>
* The list of glossaries for a project.
* </pre>
*
* <code>repeated .google.cloud.translation.v3.Glossary glossaries = 1;</code>
*/
public com.google.cloud.translate.v3.GlossaryOrBuilder getGlossariesOrBuilder(int index) {
if (glossariesBuilder_ == null) {
return glossaries_.get(index);
} else {
return glossariesBuilder_.getMessageOrBuilder(index);
}
}
/**
*
*
* <pre>
* The list of glossaries for a project.
* </pre>
*
* <code>repeated .google.cloud.translation.v3.Glossary glossaries = 1;</code>
*/
public java.util.List<? extends com.google.cloud.translate.v3.GlossaryOrBuilder>
getGlossariesOrBuilderList() {
if (glossariesBuilder_ != null) {
return glossariesBuilder_.getMessageOrBuilderList();
} else {
return java.util.Collections.unmodifiableList(glossaries_);
}
}
/**
*
*
* <pre>
* The list of glossaries for a project.
* </pre>
*
* <code>repeated .google.cloud.translation.v3.Glossary glossaries = 1;</code>
*/
public com.google.cloud.translate.v3.Glossary.Builder addGlossariesBuilder() {
return getGlossariesFieldBuilder()
.addBuilder(com.google.cloud.translate.v3.Glossary.getDefaultInstance());
}
/**
*
*
* <pre>
* The list of glossaries for a project.
* </pre>
*
* <code>repeated .google.cloud.translation.v3.Glossary glossaries = 1;</code>
*/
public com.google.cloud.translate.v3.Glossary.Builder addGlossariesBuilder(int index) {
return getGlossariesFieldBuilder()
.addBuilder(index, com.google.cloud.translate.v3.Glossary.getDefaultInstance());
}
/**
*
*
* <pre>
* The list of glossaries for a project.
* </pre>
*
* <code>repeated .google.cloud.translation.v3.Glossary glossaries = 1;</code>
*/
public java.util.List<com.google.cloud.translate.v3.Glossary.Builder>
getGlossariesBuilderList() {
return getGlossariesFieldBuilder().getBuilderList();
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.translate.v3.Glossary,
com.google.cloud.translate.v3.Glossary.Builder,
com.google.cloud.translate.v3.GlossaryOrBuilder>
getGlossariesFieldBuilder() {
if (glossariesBuilder_ == null) {
glossariesBuilder_ =
new com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.translate.v3.Glossary,
com.google.cloud.translate.v3.Glossary.Builder,
com.google.cloud.translate.v3.GlossaryOrBuilder>(
glossaries_, ((bitField0_ & 0x00000001) != 0), getParentForChildren(), isClean());
glossaries_ = null;
}
return glossariesBuilder_;
}
private java.lang.Object nextPageToken_ = "";
/**
*
*
* <pre>
* A token to retrieve a page of results. Pass this value in the
* [ListGlossariesRequest.page_token] field in the subsequent call to
* `ListGlossaries` method to retrieve the next page of results.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The nextPageToken.
*/
public java.lang.String getNextPageToken() {
java.lang.Object ref = nextPageToken_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
nextPageToken_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* A token to retrieve a page of results. Pass this value in the
* [ListGlossariesRequest.page_token] field in the subsequent call to
* `ListGlossaries` method to retrieve the next page of results.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The bytes for nextPageToken.
*/
public com.google.protobuf.ByteString getNextPageTokenBytes() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
nextPageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* A token to retrieve a page of results. Pass this value in the
* [ListGlossariesRequest.page_token] field in the subsequent call to
* `ListGlossaries` method to retrieve the next page of results.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @param value The nextPageToken to set.
* @return This builder for chaining.
*/
public Builder setNextPageToken(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
nextPageToken_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* A token to retrieve a page of results. Pass this value in the
* [ListGlossariesRequest.page_token] field in the subsequent call to
* `ListGlossaries` method to retrieve the next page of results.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return This builder for chaining.
*/
public Builder clearNextPageToken() {
nextPageToken_ = getDefaultInstance().getNextPageToken();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
return this;
}
/**
*
*
* <pre>
* A token to retrieve a page of results. Pass this value in the
* [ListGlossariesRequest.page_token] field in the subsequent call to
* `ListGlossaries` method to retrieve the next page of results.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @param value The bytes for nextPageToken to set.
* @return This builder for chaining.
*/
public Builder setNextPageTokenBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
nextPageToken_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.translation.v3.ListGlossariesResponse)
}
// @@protoc_insertion_point(class_scope:google.cloud.translation.v3.ListGlossariesResponse)
private static final com.google.cloud.translate.v3.ListGlossariesResponse DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.translate.v3.ListGlossariesResponse();
}
public static com.google.cloud.translate.v3.ListGlossariesResponse getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<ListGlossariesResponse> PARSER =
new com.google.protobuf.AbstractParser<ListGlossariesResponse>() {
@java.lang.Override
public ListGlossariesResponse parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<ListGlossariesResponse> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<ListGlossariesResponse> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.translate.v3.ListGlossariesResponse getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleapis/google-cloud-java | 36,800 | java-datalabeling/proto-google-cloud-datalabeling-v1beta1/src/main/java/com/google/cloud/datalabeling/v1beta1/ListDataItemsRequest.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/datalabeling/v1beta1/data_labeling_service.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.datalabeling.v1beta1;
/**
*
*
* <pre>
* Request message for ListDataItems.
* </pre>
*
* Protobuf type {@code google.cloud.datalabeling.v1beta1.ListDataItemsRequest}
*/
public final class ListDataItemsRequest extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.datalabeling.v1beta1.ListDataItemsRequest)
ListDataItemsRequestOrBuilder {
private static final long serialVersionUID = 0L;
// Use ListDataItemsRequest.newBuilder() to construct.
private ListDataItemsRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private ListDataItemsRequest() {
parent_ = "";
filter_ = "";
pageToken_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new ListDataItemsRequest();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.datalabeling.v1beta1.DataLabelingServiceOuterClass
.internal_static_google_cloud_datalabeling_v1beta1_ListDataItemsRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.datalabeling.v1beta1.DataLabelingServiceOuterClass
.internal_static_google_cloud_datalabeling_v1beta1_ListDataItemsRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.datalabeling.v1beta1.ListDataItemsRequest.class,
com.google.cloud.datalabeling.v1beta1.ListDataItemsRequest.Builder.class);
}
public static final int PARENT_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private volatile java.lang.Object parent_ = "";
/**
*
*
* <pre>
* Required. Name of the dataset to list data items, format:
* projects/{project_id}/datasets/{dataset_id}
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The parent.
*/
@java.lang.Override
public java.lang.String getParent() {
java.lang.Object ref = parent_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
parent_ = s;
return s;
}
}
/**
*
*
* <pre>
* Required. Name of the dataset to list data items, format:
* projects/{project_id}/datasets/{dataset_id}
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for parent.
*/
@java.lang.Override
public com.google.protobuf.ByteString getParentBytes() {
java.lang.Object ref = parent_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
parent_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int FILTER_FIELD_NUMBER = 2;
@SuppressWarnings("serial")
private volatile java.lang.Object filter_ = "";
/**
*
*
* <pre>
* Optional. Filter is not supported at this moment.
* </pre>
*
* <code>string filter = 2 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The filter.
*/
@java.lang.Override
public java.lang.String getFilter() {
java.lang.Object ref = filter_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
filter_ = s;
return s;
}
}
/**
*
*
* <pre>
* Optional. Filter is not supported at this moment.
* </pre>
*
* <code>string filter = 2 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The bytes for filter.
*/
@java.lang.Override
public com.google.protobuf.ByteString getFilterBytes() {
java.lang.Object ref = filter_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
filter_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int PAGE_SIZE_FIELD_NUMBER = 3;
private int pageSize_ = 0;
/**
*
*
* <pre>
* Optional. Requested page size. Server may return fewer results than
* requested. Default value is 100.
* </pre>
*
* <code>int32 page_size = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The pageSize.
*/
@java.lang.Override
public int getPageSize() {
return pageSize_;
}
public static final int PAGE_TOKEN_FIELD_NUMBER = 4;
@SuppressWarnings("serial")
private volatile java.lang.Object pageToken_ = "";
/**
*
*
* <pre>
* Optional. A token identifying a page of results for the server to return.
* Typically obtained by
* [ListDataItemsResponse.next_page_token][google.cloud.datalabeling.v1beta1.ListDataItemsResponse.next_page_token] of the previous
* [DataLabelingService.ListDataItems] call.
* Return first page if empty.
* </pre>
*
* <code>string page_token = 4 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The pageToken.
*/
@java.lang.Override
public java.lang.String getPageToken() {
java.lang.Object ref = pageToken_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
pageToken_ = s;
return s;
}
}
/**
*
*
* <pre>
* Optional. A token identifying a page of results for the server to return.
* Typically obtained by
* [ListDataItemsResponse.next_page_token][google.cloud.datalabeling.v1beta1.ListDataItemsResponse.next_page_token] of the previous
* [DataLabelingService.ListDataItems] call.
* Return first page if empty.
* </pre>
*
* <code>string page_token = 4 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The bytes for pageToken.
*/
@java.lang.Override
public com.google.protobuf.ByteString getPageTokenBytes() {
java.lang.Object ref = pageToken_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
pageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 1, parent_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(filter_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 2, filter_);
}
if (pageSize_ != 0) {
output.writeInt32(3, pageSize_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(pageToken_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 4, pageToken_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, parent_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(filter_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, filter_);
}
if (pageSize_ != 0) {
size += com.google.protobuf.CodedOutputStream.computeInt32Size(3, pageSize_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(pageToken_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(4, pageToken_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.datalabeling.v1beta1.ListDataItemsRequest)) {
return super.equals(obj);
}
com.google.cloud.datalabeling.v1beta1.ListDataItemsRequest other =
(com.google.cloud.datalabeling.v1beta1.ListDataItemsRequest) obj;
if (!getParent().equals(other.getParent())) return false;
if (!getFilter().equals(other.getFilter())) return false;
if (getPageSize() != other.getPageSize()) return false;
if (!getPageToken().equals(other.getPageToken())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + PARENT_FIELD_NUMBER;
hash = (53 * hash) + getParent().hashCode();
hash = (37 * hash) + FILTER_FIELD_NUMBER;
hash = (53 * hash) + getFilter().hashCode();
hash = (37 * hash) + PAGE_SIZE_FIELD_NUMBER;
hash = (53 * hash) + getPageSize();
hash = (37 * hash) + PAGE_TOKEN_FIELD_NUMBER;
hash = (53 * hash) + getPageToken().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.datalabeling.v1beta1.ListDataItemsRequest parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.datalabeling.v1beta1.ListDataItemsRequest parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.datalabeling.v1beta1.ListDataItemsRequest parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.datalabeling.v1beta1.ListDataItemsRequest parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.datalabeling.v1beta1.ListDataItemsRequest parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.datalabeling.v1beta1.ListDataItemsRequest parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.datalabeling.v1beta1.ListDataItemsRequest parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.datalabeling.v1beta1.ListDataItemsRequest parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.datalabeling.v1beta1.ListDataItemsRequest parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.datalabeling.v1beta1.ListDataItemsRequest parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.datalabeling.v1beta1.ListDataItemsRequest parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.datalabeling.v1beta1.ListDataItemsRequest parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.datalabeling.v1beta1.ListDataItemsRequest prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Request message for ListDataItems.
* </pre>
*
* Protobuf type {@code google.cloud.datalabeling.v1beta1.ListDataItemsRequest}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.datalabeling.v1beta1.ListDataItemsRequest)
com.google.cloud.datalabeling.v1beta1.ListDataItemsRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.datalabeling.v1beta1.DataLabelingServiceOuterClass
.internal_static_google_cloud_datalabeling_v1beta1_ListDataItemsRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.datalabeling.v1beta1.DataLabelingServiceOuterClass
.internal_static_google_cloud_datalabeling_v1beta1_ListDataItemsRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.datalabeling.v1beta1.ListDataItemsRequest.class,
com.google.cloud.datalabeling.v1beta1.ListDataItemsRequest.Builder.class);
}
// Construct using com.google.cloud.datalabeling.v1beta1.ListDataItemsRequest.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
parent_ = "";
filter_ = "";
pageSize_ = 0;
pageToken_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.datalabeling.v1beta1.DataLabelingServiceOuterClass
.internal_static_google_cloud_datalabeling_v1beta1_ListDataItemsRequest_descriptor;
}
@java.lang.Override
public com.google.cloud.datalabeling.v1beta1.ListDataItemsRequest getDefaultInstanceForType() {
return com.google.cloud.datalabeling.v1beta1.ListDataItemsRequest.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.datalabeling.v1beta1.ListDataItemsRequest build() {
com.google.cloud.datalabeling.v1beta1.ListDataItemsRequest result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.datalabeling.v1beta1.ListDataItemsRequest buildPartial() {
com.google.cloud.datalabeling.v1beta1.ListDataItemsRequest result =
new com.google.cloud.datalabeling.v1beta1.ListDataItemsRequest(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(com.google.cloud.datalabeling.v1beta1.ListDataItemsRequest result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.parent_ = parent_;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.filter_ = filter_;
}
if (((from_bitField0_ & 0x00000004) != 0)) {
result.pageSize_ = pageSize_;
}
if (((from_bitField0_ & 0x00000008) != 0)) {
result.pageToken_ = pageToken_;
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.datalabeling.v1beta1.ListDataItemsRequest) {
return mergeFrom((com.google.cloud.datalabeling.v1beta1.ListDataItemsRequest) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.datalabeling.v1beta1.ListDataItemsRequest other) {
if (other == com.google.cloud.datalabeling.v1beta1.ListDataItemsRequest.getDefaultInstance())
return this;
if (!other.getParent().isEmpty()) {
parent_ = other.parent_;
bitField0_ |= 0x00000001;
onChanged();
}
if (!other.getFilter().isEmpty()) {
filter_ = other.filter_;
bitField0_ |= 0x00000002;
onChanged();
}
if (other.getPageSize() != 0) {
setPageSize(other.getPageSize());
}
if (!other.getPageToken().isEmpty()) {
pageToken_ = other.pageToken_;
bitField0_ |= 0x00000008;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
parent_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000001;
break;
} // case 10
case 18:
{
filter_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000002;
break;
} // case 18
case 24:
{
pageSize_ = input.readInt32();
bitField0_ |= 0x00000004;
break;
} // case 24
case 34:
{
pageToken_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000008;
break;
} // case 34
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.lang.Object parent_ = "";
/**
*
*
* <pre>
* Required. Name of the dataset to list data items, format:
* projects/{project_id}/datasets/{dataset_id}
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The parent.
*/
public java.lang.String getParent() {
java.lang.Object ref = parent_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
parent_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Required. Name of the dataset to list data items, format:
* projects/{project_id}/datasets/{dataset_id}
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for parent.
*/
public com.google.protobuf.ByteString getParentBytes() {
java.lang.Object ref = parent_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
parent_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Required. Name of the dataset to list data items, format:
* projects/{project_id}/datasets/{dataset_id}
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The parent to set.
* @return This builder for chaining.
*/
public Builder setParent(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
parent_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. Name of the dataset to list data items, format:
* projects/{project_id}/datasets/{dataset_id}
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return This builder for chaining.
*/
public Builder clearParent() {
parent_ = getDefaultInstance().getParent();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. Name of the dataset to list data items, format:
* projects/{project_id}/datasets/{dataset_id}
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The bytes for parent to set.
* @return This builder for chaining.
*/
public Builder setParentBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
parent_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
private java.lang.Object filter_ = "";
/**
*
*
* <pre>
* Optional. Filter is not supported at this moment.
* </pre>
*
* <code>string filter = 2 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The filter.
*/
public java.lang.String getFilter() {
java.lang.Object ref = filter_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
filter_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Optional. Filter is not supported at this moment.
* </pre>
*
* <code>string filter = 2 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The bytes for filter.
*/
public com.google.protobuf.ByteString getFilterBytes() {
java.lang.Object ref = filter_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
filter_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Optional. Filter is not supported at this moment.
* </pre>
*
* <code>string filter = 2 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @param value The filter to set.
* @return This builder for chaining.
*/
public Builder setFilter(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
filter_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. Filter is not supported at this moment.
* </pre>
*
* <code>string filter = 2 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return This builder for chaining.
*/
public Builder clearFilter() {
filter_ = getDefaultInstance().getFilter();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. Filter is not supported at this moment.
* </pre>
*
* <code>string filter = 2 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @param value The bytes for filter to set.
* @return This builder for chaining.
*/
public Builder setFilterBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
filter_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
private int pageSize_;
/**
*
*
* <pre>
* Optional. Requested page size. Server may return fewer results than
* requested. Default value is 100.
* </pre>
*
* <code>int32 page_size = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The pageSize.
*/
@java.lang.Override
public int getPageSize() {
return pageSize_;
}
/**
*
*
* <pre>
* Optional. Requested page size. Server may return fewer results than
* requested. Default value is 100.
* </pre>
*
* <code>int32 page_size = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @param value The pageSize to set.
* @return This builder for chaining.
*/
public Builder setPageSize(int value) {
pageSize_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. Requested page size. Server may return fewer results than
* requested. Default value is 100.
* </pre>
*
* <code>int32 page_size = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return This builder for chaining.
*/
public Builder clearPageSize() {
bitField0_ = (bitField0_ & ~0x00000004);
pageSize_ = 0;
onChanged();
return this;
}
private java.lang.Object pageToken_ = "";
/**
*
*
* <pre>
* Optional. A token identifying a page of results for the server to return.
* Typically obtained by
* [ListDataItemsResponse.next_page_token][google.cloud.datalabeling.v1beta1.ListDataItemsResponse.next_page_token] of the previous
* [DataLabelingService.ListDataItems] call.
* Return first page if empty.
* </pre>
*
* <code>string page_token = 4 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The pageToken.
*/
public java.lang.String getPageToken() {
java.lang.Object ref = pageToken_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
pageToken_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Optional. A token identifying a page of results for the server to return.
* Typically obtained by
* [ListDataItemsResponse.next_page_token][google.cloud.datalabeling.v1beta1.ListDataItemsResponse.next_page_token] of the previous
* [DataLabelingService.ListDataItems] call.
* Return first page if empty.
* </pre>
*
* <code>string page_token = 4 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The bytes for pageToken.
*/
public com.google.protobuf.ByteString getPageTokenBytes() {
java.lang.Object ref = pageToken_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
pageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Optional. A token identifying a page of results for the server to return.
* Typically obtained by
* [ListDataItemsResponse.next_page_token][google.cloud.datalabeling.v1beta1.ListDataItemsResponse.next_page_token] of the previous
* [DataLabelingService.ListDataItems] call.
* Return first page if empty.
* </pre>
*
* <code>string page_token = 4 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @param value The pageToken to set.
* @return This builder for chaining.
*/
public Builder setPageToken(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
pageToken_ = value;
bitField0_ |= 0x00000008;
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. A token identifying a page of results for the server to return.
* Typically obtained by
* [ListDataItemsResponse.next_page_token][google.cloud.datalabeling.v1beta1.ListDataItemsResponse.next_page_token] of the previous
* [DataLabelingService.ListDataItems] call.
* Return first page if empty.
* </pre>
*
* <code>string page_token = 4 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return This builder for chaining.
*/
public Builder clearPageToken() {
pageToken_ = getDefaultInstance().getPageToken();
bitField0_ = (bitField0_ & ~0x00000008);
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. A token identifying a page of results for the server to return.
* Typically obtained by
* [ListDataItemsResponse.next_page_token][google.cloud.datalabeling.v1beta1.ListDataItemsResponse.next_page_token] of the previous
* [DataLabelingService.ListDataItems] call.
* Return first page if empty.
* </pre>
*
* <code>string page_token = 4 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @param value The bytes for pageToken to set.
* @return This builder for chaining.
*/
public Builder setPageTokenBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
pageToken_ = value;
bitField0_ |= 0x00000008;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.datalabeling.v1beta1.ListDataItemsRequest)
}
// @@protoc_insertion_point(class_scope:google.cloud.datalabeling.v1beta1.ListDataItemsRequest)
private static final com.google.cloud.datalabeling.v1beta1.ListDataItemsRequest DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.datalabeling.v1beta1.ListDataItemsRequest();
}
public static com.google.cloud.datalabeling.v1beta1.ListDataItemsRequest getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<ListDataItemsRequest> PARSER =
new com.google.protobuf.AbstractParser<ListDataItemsRequest>() {
@java.lang.Override
public ListDataItemsRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<ListDataItemsRequest> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<ListDataItemsRequest> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.datalabeling.v1beta1.ListDataItemsRequest getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleapis/google-cloud-java | 37,066 | java-cloudcontrolspartner/google-cloud-cloudcontrolspartner/src/main/java/com/google/cloud/cloudcontrolspartner/v1/stub/CloudControlsPartnerCoreStubSettings.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.cloud.cloudcontrolspartner.v1.stub;
import static com.google.cloud.cloudcontrolspartner.v1.CloudControlsPartnerCoreClient.ListAccessApprovalRequestsPagedResponse;
import static com.google.cloud.cloudcontrolspartner.v1.CloudControlsPartnerCoreClient.ListCustomersPagedResponse;
import static com.google.cloud.cloudcontrolspartner.v1.CloudControlsPartnerCoreClient.ListWorkloadsPagedResponse;
import com.google.api.core.ApiFunction;
import com.google.api.core.ApiFuture;
import com.google.api.core.BetaApi;
import com.google.api.core.ObsoleteApi;
import com.google.api.gax.core.GaxProperties;
import com.google.api.gax.core.GoogleCredentialsProvider;
import com.google.api.gax.core.InstantiatingExecutorProvider;
import com.google.api.gax.grpc.GaxGrpcProperties;
import com.google.api.gax.grpc.GrpcTransportChannel;
import com.google.api.gax.grpc.InstantiatingGrpcChannelProvider;
import com.google.api.gax.httpjson.GaxHttpJsonProperties;
import com.google.api.gax.httpjson.HttpJsonTransportChannel;
import com.google.api.gax.httpjson.InstantiatingHttpJsonChannelProvider;
import com.google.api.gax.retrying.RetrySettings;
import com.google.api.gax.rpc.ApiCallContext;
import com.google.api.gax.rpc.ApiClientHeaderProvider;
import com.google.api.gax.rpc.ClientContext;
import com.google.api.gax.rpc.PageContext;
import com.google.api.gax.rpc.PagedCallSettings;
import com.google.api.gax.rpc.PagedListDescriptor;
import com.google.api.gax.rpc.PagedListResponseFactory;
import com.google.api.gax.rpc.StatusCode;
import com.google.api.gax.rpc.StubSettings;
import com.google.api.gax.rpc.TransportChannelProvider;
import com.google.api.gax.rpc.UnaryCallSettings;
import com.google.api.gax.rpc.UnaryCallable;
import com.google.cloud.cloudcontrolspartner.v1.AccessApprovalRequest;
import com.google.cloud.cloudcontrolspartner.v1.CreateCustomerRequest;
import com.google.cloud.cloudcontrolspartner.v1.Customer;
import com.google.cloud.cloudcontrolspartner.v1.DeleteCustomerRequest;
import com.google.cloud.cloudcontrolspartner.v1.EkmConnections;
import com.google.cloud.cloudcontrolspartner.v1.GetCustomerRequest;
import com.google.cloud.cloudcontrolspartner.v1.GetEkmConnectionsRequest;
import com.google.cloud.cloudcontrolspartner.v1.GetPartnerPermissionsRequest;
import com.google.cloud.cloudcontrolspartner.v1.GetPartnerRequest;
import com.google.cloud.cloudcontrolspartner.v1.GetWorkloadRequest;
import com.google.cloud.cloudcontrolspartner.v1.ListAccessApprovalRequestsRequest;
import com.google.cloud.cloudcontrolspartner.v1.ListAccessApprovalRequestsResponse;
import com.google.cloud.cloudcontrolspartner.v1.ListCustomersRequest;
import com.google.cloud.cloudcontrolspartner.v1.ListCustomersResponse;
import com.google.cloud.cloudcontrolspartner.v1.ListWorkloadsRequest;
import com.google.cloud.cloudcontrolspartner.v1.ListWorkloadsResponse;
import com.google.cloud.cloudcontrolspartner.v1.Partner;
import com.google.cloud.cloudcontrolspartner.v1.PartnerPermissions;
import com.google.cloud.cloudcontrolspartner.v1.UpdateCustomerRequest;
import com.google.cloud.cloudcontrolspartner.v1.Workload;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.Lists;
import com.google.protobuf.Empty;
import java.io.IOException;
import java.time.Duration;
import java.util.List;
import javax.annotation.Generated;
// AUTO-GENERATED DOCUMENTATION AND CLASS.
/**
* Settings class to configure an instance of {@link CloudControlsPartnerCoreStub}.
*
* <p>The default instance has everything set to sensible defaults:
*
* <ul>
* <li>The default service address (cloudcontrolspartner.googleapis.com) and default port (443)
* are used.
* <li>Credentials are acquired automatically through Application Default Credentials.
* <li>Retries are configured for idempotent methods but not for non-idempotent methods.
* </ul>
*
* <p>The builder of this class is recursive, so contained classes are themselves builders. When
* build() is called, the tree of builders is called to create the complete settings object.
*
* <p>For example, to set the
* [RetrySettings](https://cloud.google.com/java/docs/reference/gax/latest/com.google.api.gax.retrying.RetrySettings)
* of getWorkload:
*
* <pre>{@code
* // This snippet has been automatically generated and should be regarded as a code template only.
* // It will require modifications to work:
* // - It may require correct/in-range values for request initialization.
* // - It may require specifying regional endpoints when creating the service client as shown in
* // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library
* CloudControlsPartnerCoreStubSettings.Builder cloudControlsPartnerCoreSettingsBuilder =
* CloudControlsPartnerCoreStubSettings.newBuilder();
* cloudControlsPartnerCoreSettingsBuilder
* .getWorkloadSettings()
* .setRetrySettings(
* cloudControlsPartnerCoreSettingsBuilder
* .getWorkloadSettings()
* .getRetrySettings()
* .toBuilder()
* .setInitialRetryDelayDuration(Duration.ofSeconds(1))
* .setInitialRpcTimeoutDuration(Duration.ofSeconds(5))
* .setMaxAttempts(5)
* .setMaxRetryDelayDuration(Duration.ofSeconds(30))
* .setMaxRpcTimeoutDuration(Duration.ofSeconds(60))
* .setRetryDelayMultiplier(1.3)
* .setRpcTimeoutMultiplier(1.5)
* .setTotalTimeoutDuration(Duration.ofSeconds(300))
* .build());
* CloudControlsPartnerCoreStubSettings cloudControlsPartnerCoreSettings =
* cloudControlsPartnerCoreSettingsBuilder.build();
* }</pre>
*
* Please refer to the [Client Side Retry
* Guide](https://github.com/googleapis/google-cloud-java/blob/main/docs/client_retries.md) for
* additional support in setting retries.
*/
@Generated("by gapic-generator-java")
public class CloudControlsPartnerCoreStubSettings
extends StubSettings<CloudControlsPartnerCoreStubSettings> {
/** The default scopes of the service. */
private static final ImmutableList<String> DEFAULT_SERVICE_SCOPES =
ImmutableList.<String>builder().add("https://www.googleapis.com/auth/cloud-platform").build();
private final UnaryCallSettings<GetWorkloadRequest, Workload> getWorkloadSettings;
private final PagedCallSettings<
ListWorkloadsRequest, ListWorkloadsResponse, ListWorkloadsPagedResponse>
listWorkloadsSettings;
private final UnaryCallSettings<GetCustomerRequest, Customer> getCustomerSettings;
private final PagedCallSettings<
ListCustomersRequest, ListCustomersResponse, ListCustomersPagedResponse>
listCustomersSettings;
private final UnaryCallSettings<GetEkmConnectionsRequest, EkmConnections>
getEkmConnectionsSettings;
private final UnaryCallSettings<GetPartnerPermissionsRequest, PartnerPermissions>
getPartnerPermissionsSettings;
private final PagedCallSettings<
ListAccessApprovalRequestsRequest,
ListAccessApprovalRequestsResponse,
ListAccessApprovalRequestsPagedResponse>
listAccessApprovalRequestsSettings;
private final UnaryCallSettings<GetPartnerRequest, Partner> getPartnerSettings;
private final UnaryCallSettings<CreateCustomerRequest, Customer> createCustomerSettings;
private final UnaryCallSettings<UpdateCustomerRequest, Customer> updateCustomerSettings;
private final UnaryCallSettings<DeleteCustomerRequest, Empty> deleteCustomerSettings;
private static final PagedListDescriptor<ListWorkloadsRequest, ListWorkloadsResponse, Workload>
LIST_WORKLOADS_PAGE_STR_DESC =
new PagedListDescriptor<ListWorkloadsRequest, ListWorkloadsResponse, Workload>() {
@Override
public String emptyToken() {
return "";
}
@Override
public ListWorkloadsRequest injectToken(ListWorkloadsRequest payload, String token) {
return ListWorkloadsRequest.newBuilder(payload).setPageToken(token).build();
}
@Override
public ListWorkloadsRequest injectPageSize(ListWorkloadsRequest payload, int pageSize) {
return ListWorkloadsRequest.newBuilder(payload).setPageSize(pageSize).build();
}
@Override
public Integer extractPageSize(ListWorkloadsRequest payload) {
return payload.getPageSize();
}
@Override
public String extractNextToken(ListWorkloadsResponse payload) {
return payload.getNextPageToken();
}
@Override
public Iterable<Workload> extractResources(ListWorkloadsResponse payload) {
return payload.getWorkloadsList();
}
};
private static final PagedListDescriptor<ListCustomersRequest, ListCustomersResponse, Customer>
LIST_CUSTOMERS_PAGE_STR_DESC =
new PagedListDescriptor<ListCustomersRequest, ListCustomersResponse, Customer>() {
@Override
public String emptyToken() {
return "";
}
@Override
public ListCustomersRequest injectToken(ListCustomersRequest payload, String token) {
return ListCustomersRequest.newBuilder(payload).setPageToken(token).build();
}
@Override
public ListCustomersRequest injectPageSize(ListCustomersRequest payload, int pageSize) {
return ListCustomersRequest.newBuilder(payload).setPageSize(pageSize).build();
}
@Override
public Integer extractPageSize(ListCustomersRequest payload) {
return payload.getPageSize();
}
@Override
public String extractNextToken(ListCustomersResponse payload) {
return payload.getNextPageToken();
}
@Override
public Iterable<Customer> extractResources(ListCustomersResponse payload) {
return payload.getCustomersList();
}
};
private static final PagedListDescriptor<
ListAccessApprovalRequestsRequest,
ListAccessApprovalRequestsResponse,
AccessApprovalRequest>
LIST_ACCESS_APPROVAL_REQUESTS_PAGE_STR_DESC =
new PagedListDescriptor<
ListAccessApprovalRequestsRequest,
ListAccessApprovalRequestsResponse,
AccessApprovalRequest>() {
@Override
public String emptyToken() {
return "";
}
@Override
public ListAccessApprovalRequestsRequest injectToken(
ListAccessApprovalRequestsRequest payload, String token) {
return ListAccessApprovalRequestsRequest.newBuilder(payload)
.setPageToken(token)
.build();
}
@Override
public ListAccessApprovalRequestsRequest injectPageSize(
ListAccessApprovalRequestsRequest payload, int pageSize) {
return ListAccessApprovalRequestsRequest.newBuilder(payload)
.setPageSize(pageSize)
.build();
}
@Override
public Integer extractPageSize(ListAccessApprovalRequestsRequest payload) {
return payload.getPageSize();
}
@Override
public String extractNextToken(ListAccessApprovalRequestsResponse payload) {
return payload.getNextPageToken();
}
@Override
public Iterable<AccessApprovalRequest> extractResources(
ListAccessApprovalRequestsResponse payload) {
return payload.getAccessApprovalRequestsList();
}
};
private static final PagedListResponseFactory<
ListWorkloadsRequest, ListWorkloadsResponse, ListWorkloadsPagedResponse>
LIST_WORKLOADS_PAGE_STR_FACT =
new PagedListResponseFactory<
ListWorkloadsRequest, ListWorkloadsResponse, ListWorkloadsPagedResponse>() {
@Override
public ApiFuture<ListWorkloadsPagedResponse> getFuturePagedResponse(
UnaryCallable<ListWorkloadsRequest, ListWorkloadsResponse> callable,
ListWorkloadsRequest request,
ApiCallContext context,
ApiFuture<ListWorkloadsResponse> futureResponse) {
PageContext<ListWorkloadsRequest, ListWorkloadsResponse, Workload> pageContext =
PageContext.create(callable, LIST_WORKLOADS_PAGE_STR_DESC, request, context);
return ListWorkloadsPagedResponse.createAsync(pageContext, futureResponse);
}
};
private static final PagedListResponseFactory<
ListCustomersRequest, ListCustomersResponse, ListCustomersPagedResponse>
LIST_CUSTOMERS_PAGE_STR_FACT =
new PagedListResponseFactory<
ListCustomersRequest, ListCustomersResponse, ListCustomersPagedResponse>() {
@Override
public ApiFuture<ListCustomersPagedResponse> getFuturePagedResponse(
UnaryCallable<ListCustomersRequest, ListCustomersResponse> callable,
ListCustomersRequest request,
ApiCallContext context,
ApiFuture<ListCustomersResponse> futureResponse) {
PageContext<ListCustomersRequest, ListCustomersResponse, Customer> pageContext =
PageContext.create(callable, LIST_CUSTOMERS_PAGE_STR_DESC, request, context);
return ListCustomersPagedResponse.createAsync(pageContext, futureResponse);
}
};
private static final PagedListResponseFactory<
ListAccessApprovalRequestsRequest,
ListAccessApprovalRequestsResponse,
ListAccessApprovalRequestsPagedResponse>
LIST_ACCESS_APPROVAL_REQUESTS_PAGE_STR_FACT =
new PagedListResponseFactory<
ListAccessApprovalRequestsRequest,
ListAccessApprovalRequestsResponse,
ListAccessApprovalRequestsPagedResponse>() {
@Override
public ApiFuture<ListAccessApprovalRequestsPagedResponse> getFuturePagedResponse(
UnaryCallable<ListAccessApprovalRequestsRequest, ListAccessApprovalRequestsResponse>
callable,
ListAccessApprovalRequestsRequest request,
ApiCallContext context,
ApiFuture<ListAccessApprovalRequestsResponse> futureResponse) {
PageContext<
ListAccessApprovalRequestsRequest,
ListAccessApprovalRequestsResponse,
AccessApprovalRequest>
pageContext =
PageContext.create(
callable, LIST_ACCESS_APPROVAL_REQUESTS_PAGE_STR_DESC, request, context);
return ListAccessApprovalRequestsPagedResponse.createAsync(
pageContext, futureResponse);
}
};
/** Returns the object with the settings used for calls to getWorkload. */
public UnaryCallSettings<GetWorkloadRequest, Workload> getWorkloadSettings() {
return getWorkloadSettings;
}
/** Returns the object with the settings used for calls to listWorkloads. */
public PagedCallSettings<ListWorkloadsRequest, ListWorkloadsResponse, ListWorkloadsPagedResponse>
listWorkloadsSettings() {
return listWorkloadsSettings;
}
/** Returns the object with the settings used for calls to getCustomer. */
public UnaryCallSettings<GetCustomerRequest, Customer> getCustomerSettings() {
return getCustomerSettings;
}
/** Returns the object with the settings used for calls to listCustomers. */
public PagedCallSettings<ListCustomersRequest, ListCustomersResponse, ListCustomersPagedResponse>
listCustomersSettings() {
return listCustomersSettings;
}
/** Returns the object with the settings used for calls to getEkmConnections. */
public UnaryCallSettings<GetEkmConnectionsRequest, EkmConnections> getEkmConnectionsSettings() {
return getEkmConnectionsSettings;
}
/** Returns the object with the settings used for calls to getPartnerPermissions. */
public UnaryCallSettings<GetPartnerPermissionsRequest, PartnerPermissions>
getPartnerPermissionsSettings() {
return getPartnerPermissionsSettings;
}
/**
* Returns the object with the settings used for calls to listAccessApprovalRequests.
*
* @deprecated This method is deprecated and will be removed in the next major version update.
*/
@Deprecated
public PagedCallSettings<
ListAccessApprovalRequestsRequest,
ListAccessApprovalRequestsResponse,
ListAccessApprovalRequestsPagedResponse>
listAccessApprovalRequestsSettings() {
return listAccessApprovalRequestsSettings;
}
/** Returns the object with the settings used for calls to getPartner. */
public UnaryCallSettings<GetPartnerRequest, Partner> getPartnerSettings() {
return getPartnerSettings;
}
/** Returns the object with the settings used for calls to createCustomer. */
public UnaryCallSettings<CreateCustomerRequest, Customer> createCustomerSettings() {
return createCustomerSettings;
}
/** Returns the object with the settings used for calls to updateCustomer. */
public UnaryCallSettings<UpdateCustomerRequest, Customer> updateCustomerSettings() {
return updateCustomerSettings;
}
/** Returns the object with the settings used for calls to deleteCustomer. */
public UnaryCallSettings<DeleteCustomerRequest, Empty> deleteCustomerSettings() {
return deleteCustomerSettings;
}
public CloudControlsPartnerCoreStub createStub() throws IOException {
if (getTransportChannelProvider()
.getTransportName()
.equals(GrpcTransportChannel.getGrpcTransportName())) {
return GrpcCloudControlsPartnerCoreStub.create(this);
}
if (getTransportChannelProvider()
.getTransportName()
.equals(HttpJsonTransportChannel.getHttpJsonTransportName())) {
return HttpJsonCloudControlsPartnerCoreStub.create(this);
}
throw new UnsupportedOperationException(
String.format(
"Transport not supported: %s", getTransportChannelProvider().getTransportName()));
}
/** Returns the default service name. */
@Override
public String getServiceName() {
return "cloudcontrolspartner";
}
/** Returns a builder for the default ExecutorProvider for this service. */
public static InstantiatingExecutorProvider.Builder defaultExecutorProviderBuilder() {
return InstantiatingExecutorProvider.newBuilder();
}
/** Returns the default service endpoint. */
@ObsoleteApi("Use getEndpoint() instead")
public static String getDefaultEndpoint() {
return "cloudcontrolspartner.googleapis.com:443";
}
/** Returns the default mTLS service endpoint. */
public static String getDefaultMtlsEndpoint() {
return "cloudcontrolspartner.mtls.googleapis.com:443";
}
/** Returns the default service scopes. */
public static List<String> getDefaultServiceScopes() {
return DEFAULT_SERVICE_SCOPES;
}
/** Returns a builder for the default credentials for this service. */
public static GoogleCredentialsProvider.Builder defaultCredentialsProviderBuilder() {
return GoogleCredentialsProvider.newBuilder()
.setScopesToApply(DEFAULT_SERVICE_SCOPES)
.setUseJwtAccessWithScope(true);
}
/** Returns a builder for the default gRPC ChannelProvider for this service. */
public static InstantiatingGrpcChannelProvider.Builder defaultGrpcTransportProviderBuilder() {
return InstantiatingGrpcChannelProvider.newBuilder()
.setMaxInboundMessageSize(Integer.MAX_VALUE);
}
/** Returns a builder for the default REST ChannelProvider for this service. */
@BetaApi
public static InstantiatingHttpJsonChannelProvider.Builder
defaultHttpJsonTransportProviderBuilder() {
return InstantiatingHttpJsonChannelProvider.newBuilder();
}
public static TransportChannelProvider defaultTransportChannelProvider() {
return defaultGrpcTransportProviderBuilder().build();
}
public static ApiClientHeaderProvider.Builder defaultGrpcApiClientHeaderProviderBuilder() {
return ApiClientHeaderProvider.newBuilder()
.setGeneratedLibToken(
"gapic", GaxProperties.getLibraryVersion(CloudControlsPartnerCoreStubSettings.class))
.setTransportToken(
GaxGrpcProperties.getGrpcTokenName(), GaxGrpcProperties.getGrpcVersion());
}
public static ApiClientHeaderProvider.Builder defaultHttpJsonApiClientHeaderProviderBuilder() {
return ApiClientHeaderProvider.newBuilder()
.setGeneratedLibToken(
"gapic", GaxProperties.getLibraryVersion(CloudControlsPartnerCoreStubSettings.class))
.setTransportToken(
GaxHttpJsonProperties.getHttpJsonTokenName(),
GaxHttpJsonProperties.getHttpJsonVersion());
}
public static ApiClientHeaderProvider.Builder defaultApiClientHeaderProviderBuilder() {
return CloudControlsPartnerCoreStubSettings.defaultGrpcApiClientHeaderProviderBuilder();
}
/** Returns a new gRPC builder for this class. */
public static Builder newBuilder() {
return Builder.createDefault();
}
/** Returns a new REST builder for this class. */
public static Builder newHttpJsonBuilder() {
return Builder.createHttpJsonDefault();
}
/** Returns a new builder for this class. */
public static Builder newBuilder(ClientContext clientContext) {
return new Builder(clientContext);
}
/** Returns a builder containing all the values of this settings class. */
public Builder toBuilder() {
return new Builder(this);
}
protected CloudControlsPartnerCoreStubSettings(Builder settingsBuilder) throws IOException {
super(settingsBuilder);
getWorkloadSettings = settingsBuilder.getWorkloadSettings().build();
listWorkloadsSettings = settingsBuilder.listWorkloadsSettings().build();
getCustomerSettings = settingsBuilder.getCustomerSettings().build();
listCustomersSettings = settingsBuilder.listCustomersSettings().build();
getEkmConnectionsSettings = settingsBuilder.getEkmConnectionsSettings().build();
getPartnerPermissionsSettings = settingsBuilder.getPartnerPermissionsSettings().build();
listAccessApprovalRequestsSettings =
settingsBuilder.listAccessApprovalRequestsSettings().build();
getPartnerSettings = settingsBuilder.getPartnerSettings().build();
createCustomerSettings = settingsBuilder.createCustomerSettings().build();
updateCustomerSettings = settingsBuilder.updateCustomerSettings().build();
deleteCustomerSettings = settingsBuilder.deleteCustomerSettings().build();
}
/** Builder for CloudControlsPartnerCoreStubSettings. */
public static class Builder
extends StubSettings.Builder<CloudControlsPartnerCoreStubSettings, Builder> {
private final ImmutableList<UnaryCallSettings.Builder<?, ?>> unaryMethodSettingsBuilders;
private final UnaryCallSettings.Builder<GetWorkloadRequest, Workload> getWorkloadSettings;
private final PagedCallSettings.Builder<
ListWorkloadsRequest, ListWorkloadsResponse, ListWorkloadsPagedResponse>
listWorkloadsSettings;
private final UnaryCallSettings.Builder<GetCustomerRequest, Customer> getCustomerSettings;
private final PagedCallSettings.Builder<
ListCustomersRequest, ListCustomersResponse, ListCustomersPagedResponse>
listCustomersSettings;
private final UnaryCallSettings.Builder<GetEkmConnectionsRequest, EkmConnections>
getEkmConnectionsSettings;
private final UnaryCallSettings.Builder<GetPartnerPermissionsRequest, PartnerPermissions>
getPartnerPermissionsSettings;
private final PagedCallSettings.Builder<
ListAccessApprovalRequestsRequest,
ListAccessApprovalRequestsResponse,
ListAccessApprovalRequestsPagedResponse>
listAccessApprovalRequestsSettings;
private final UnaryCallSettings.Builder<GetPartnerRequest, Partner> getPartnerSettings;
private final UnaryCallSettings.Builder<CreateCustomerRequest, Customer> createCustomerSettings;
private final UnaryCallSettings.Builder<UpdateCustomerRequest, Customer> updateCustomerSettings;
private final UnaryCallSettings.Builder<DeleteCustomerRequest, Empty> deleteCustomerSettings;
private static final ImmutableMap<String, ImmutableSet<StatusCode.Code>>
RETRYABLE_CODE_DEFINITIONS;
static {
ImmutableMap.Builder<String, ImmutableSet<StatusCode.Code>> definitions =
ImmutableMap.builder();
definitions.put(
"retry_policy_0_codes",
ImmutableSet.copyOf(Lists.<StatusCode.Code>newArrayList(StatusCode.Code.UNAVAILABLE)));
definitions.put("no_retry_codes", ImmutableSet.copyOf(Lists.<StatusCode.Code>newArrayList()));
RETRYABLE_CODE_DEFINITIONS = definitions.build();
}
private static final ImmutableMap<String, RetrySettings> RETRY_PARAM_DEFINITIONS;
static {
ImmutableMap.Builder<String, RetrySettings> definitions = ImmutableMap.builder();
RetrySettings settings = null;
settings =
RetrySettings.newBuilder()
.setInitialRetryDelayDuration(Duration.ofMillis(1000L))
.setRetryDelayMultiplier(1.3)
.setMaxRetryDelayDuration(Duration.ofMillis(10000L))
.setInitialRpcTimeoutDuration(Duration.ofMillis(60000L))
.setRpcTimeoutMultiplier(1.0)
.setMaxRpcTimeoutDuration(Duration.ofMillis(60000L))
.setTotalTimeoutDuration(Duration.ofMillis(60000L))
.build();
definitions.put("retry_policy_0_params", settings);
settings = RetrySettings.newBuilder().setRpcTimeoutMultiplier(1.0).build();
definitions.put("no_retry_params", settings);
RETRY_PARAM_DEFINITIONS = definitions.build();
}
protected Builder() {
this(((ClientContext) null));
}
protected Builder(ClientContext clientContext) {
super(clientContext);
getWorkloadSettings = UnaryCallSettings.newUnaryCallSettingsBuilder();
listWorkloadsSettings = PagedCallSettings.newBuilder(LIST_WORKLOADS_PAGE_STR_FACT);
getCustomerSettings = UnaryCallSettings.newUnaryCallSettingsBuilder();
listCustomersSettings = PagedCallSettings.newBuilder(LIST_CUSTOMERS_PAGE_STR_FACT);
getEkmConnectionsSettings = UnaryCallSettings.newUnaryCallSettingsBuilder();
getPartnerPermissionsSettings = UnaryCallSettings.newUnaryCallSettingsBuilder();
listAccessApprovalRequestsSettings =
PagedCallSettings.newBuilder(LIST_ACCESS_APPROVAL_REQUESTS_PAGE_STR_FACT);
getPartnerSettings = UnaryCallSettings.newUnaryCallSettingsBuilder();
createCustomerSettings = UnaryCallSettings.newUnaryCallSettingsBuilder();
updateCustomerSettings = UnaryCallSettings.newUnaryCallSettingsBuilder();
deleteCustomerSettings = UnaryCallSettings.newUnaryCallSettingsBuilder();
unaryMethodSettingsBuilders =
ImmutableList.<UnaryCallSettings.Builder<?, ?>>of(
getWorkloadSettings,
listWorkloadsSettings,
getCustomerSettings,
listCustomersSettings,
getEkmConnectionsSettings,
getPartnerPermissionsSettings,
listAccessApprovalRequestsSettings,
getPartnerSettings,
createCustomerSettings,
updateCustomerSettings,
deleteCustomerSettings);
initDefaults(this);
}
protected Builder(CloudControlsPartnerCoreStubSettings settings) {
super(settings);
getWorkloadSettings = settings.getWorkloadSettings.toBuilder();
listWorkloadsSettings = settings.listWorkloadsSettings.toBuilder();
getCustomerSettings = settings.getCustomerSettings.toBuilder();
listCustomersSettings = settings.listCustomersSettings.toBuilder();
getEkmConnectionsSettings = settings.getEkmConnectionsSettings.toBuilder();
getPartnerPermissionsSettings = settings.getPartnerPermissionsSettings.toBuilder();
listAccessApprovalRequestsSettings = settings.listAccessApprovalRequestsSettings.toBuilder();
getPartnerSettings = settings.getPartnerSettings.toBuilder();
createCustomerSettings = settings.createCustomerSettings.toBuilder();
updateCustomerSettings = settings.updateCustomerSettings.toBuilder();
deleteCustomerSettings = settings.deleteCustomerSettings.toBuilder();
unaryMethodSettingsBuilders =
ImmutableList.<UnaryCallSettings.Builder<?, ?>>of(
getWorkloadSettings,
listWorkloadsSettings,
getCustomerSettings,
listCustomersSettings,
getEkmConnectionsSettings,
getPartnerPermissionsSettings,
listAccessApprovalRequestsSettings,
getPartnerSettings,
createCustomerSettings,
updateCustomerSettings,
deleteCustomerSettings);
}
private static Builder createDefault() {
Builder builder = new Builder(((ClientContext) null));
builder.setTransportChannelProvider(defaultTransportChannelProvider());
builder.setCredentialsProvider(defaultCredentialsProviderBuilder().build());
builder.setInternalHeaderProvider(defaultApiClientHeaderProviderBuilder().build());
builder.setMtlsEndpoint(getDefaultMtlsEndpoint());
builder.setSwitchToMtlsEndpointAllowed(true);
return initDefaults(builder);
}
private static Builder createHttpJsonDefault() {
Builder builder = new Builder(((ClientContext) null));
builder.setTransportChannelProvider(defaultHttpJsonTransportProviderBuilder().build());
builder.setCredentialsProvider(defaultCredentialsProviderBuilder().build());
builder.setInternalHeaderProvider(defaultHttpJsonApiClientHeaderProviderBuilder().build());
builder.setMtlsEndpoint(getDefaultMtlsEndpoint());
builder.setSwitchToMtlsEndpointAllowed(true);
return initDefaults(builder);
}
private static Builder initDefaults(Builder builder) {
builder
.getWorkloadSettings()
.setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_0_codes"))
.setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_0_params"));
builder
.listWorkloadsSettings()
.setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_0_codes"))
.setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_0_params"));
builder
.getCustomerSettings()
.setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_0_codes"))
.setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_0_params"));
builder
.listCustomersSettings()
.setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_0_codes"))
.setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_0_params"));
builder
.getEkmConnectionsSettings()
.setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_0_codes"))
.setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_0_params"));
builder
.getPartnerPermissionsSettings()
.setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_0_codes"))
.setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_0_params"));
builder
.listAccessApprovalRequestsSettings()
.setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_0_codes"))
.setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_0_params"));
builder
.getPartnerSettings()
.setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("no_retry_codes"))
.setRetrySettings(RETRY_PARAM_DEFINITIONS.get("no_retry_params"));
builder
.createCustomerSettings()
.setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("no_retry_codes"))
.setRetrySettings(RETRY_PARAM_DEFINITIONS.get("no_retry_params"));
builder
.updateCustomerSettings()
.setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("no_retry_codes"))
.setRetrySettings(RETRY_PARAM_DEFINITIONS.get("no_retry_params"));
builder
.deleteCustomerSettings()
.setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("no_retry_codes"))
.setRetrySettings(RETRY_PARAM_DEFINITIONS.get("no_retry_params"));
return builder;
}
/**
* Applies the given settings updater function to all of the unary API methods in this service.
*
* <p>Note: This method does not support applying settings to streaming methods.
*/
public Builder applyToAllUnaryMethods(
ApiFunction<UnaryCallSettings.Builder<?, ?>, Void> settingsUpdater) {
super.applyToAllUnaryMethods(unaryMethodSettingsBuilders, settingsUpdater);
return this;
}
public ImmutableList<UnaryCallSettings.Builder<?, ?>> unaryMethodSettingsBuilders() {
return unaryMethodSettingsBuilders;
}
/** Returns the builder for the settings used for calls to getWorkload. */
public UnaryCallSettings.Builder<GetWorkloadRequest, Workload> getWorkloadSettings() {
return getWorkloadSettings;
}
/** Returns the builder for the settings used for calls to listWorkloads. */
public PagedCallSettings.Builder<
ListWorkloadsRequest, ListWorkloadsResponse, ListWorkloadsPagedResponse>
listWorkloadsSettings() {
return listWorkloadsSettings;
}
/** Returns the builder for the settings used for calls to getCustomer. */
public UnaryCallSettings.Builder<GetCustomerRequest, Customer> getCustomerSettings() {
return getCustomerSettings;
}
/** Returns the builder for the settings used for calls to listCustomers. */
public PagedCallSettings.Builder<
ListCustomersRequest, ListCustomersResponse, ListCustomersPagedResponse>
listCustomersSettings() {
return listCustomersSettings;
}
/** Returns the builder for the settings used for calls to getEkmConnections. */
public UnaryCallSettings.Builder<GetEkmConnectionsRequest, EkmConnections>
getEkmConnectionsSettings() {
return getEkmConnectionsSettings;
}
/** Returns the builder for the settings used for calls to getPartnerPermissions. */
public UnaryCallSettings.Builder<GetPartnerPermissionsRequest, PartnerPermissions>
getPartnerPermissionsSettings() {
return getPartnerPermissionsSettings;
}
/**
* Returns the builder for the settings used for calls to listAccessApprovalRequests.
*
* @deprecated This method is deprecated and will be removed in the next major version update.
*/
@Deprecated
public PagedCallSettings.Builder<
ListAccessApprovalRequestsRequest,
ListAccessApprovalRequestsResponse,
ListAccessApprovalRequestsPagedResponse>
listAccessApprovalRequestsSettings() {
return listAccessApprovalRequestsSettings;
}
/** Returns the builder for the settings used for calls to getPartner. */
public UnaryCallSettings.Builder<GetPartnerRequest, Partner> getPartnerSettings() {
return getPartnerSettings;
}
/** Returns the builder for the settings used for calls to createCustomer. */
public UnaryCallSettings.Builder<CreateCustomerRequest, Customer> createCustomerSettings() {
return createCustomerSettings;
}
/** Returns the builder for the settings used for calls to updateCustomer. */
public UnaryCallSettings.Builder<UpdateCustomerRequest, Customer> updateCustomerSettings() {
return updateCustomerSettings;
}
/** Returns the builder for the settings used for calls to deleteCustomer. */
public UnaryCallSettings.Builder<DeleteCustomerRequest, Empty> deleteCustomerSettings() {
return deleteCustomerSettings;
}
@Override
public CloudControlsPartnerCoreStubSettings build() throws IOException {
return new CloudControlsPartnerCoreStubSettings(this);
}
}
}
|
googleapis/google-cloud-java | 36,805 | java-cloudsupport/proto-google-cloud-cloudsupport-v2beta/src/main/java/com/google/cloud/support/v2beta/SearchCasesResponse.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/support/v2beta/case_service.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.support.v2beta;
/**
*
*
* <pre>
* The response message for the SearchCases endpoint.
* </pre>
*
* Protobuf type {@code google.cloud.support.v2beta.SearchCasesResponse}
*/
public final class SearchCasesResponse extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.support.v2beta.SearchCasesResponse)
SearchCasesResponseOrBuilder {
private static final long serialVersionUID = 0L;
// Use SearchCasesResponse.newBuilder() to construct.
private SearchCasesResponse(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private SearchCasesResponse() {
cases_ = java.util.Collections.emptyList();
nextPageToken_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new SearchCasesResponse();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.support.v2beta.CaseServiceProto
.internal_static_google_cloud_support_v2beta_SearchCasesResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.support.v2beta.CaseServiceProto
.internal_static_google_cloud_support_v2beta_SearchCasesResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.support.v2beta.SearchCasesResponse.class,
com.google.cloud.support.v2beta.SearchCasesResponse.Builder.class);
}
public static final int CASES_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private java.util.List<com.google.cloud.support.v2beta.Case> cases_;
/**
*
*
* <pre>
* The list of cases associated with the parent after any
* filters have been applied.
* </pre>
*
* <code>repeated .google.cloud.support.v2beta.Case cases = 1;</code>
*/
@java.lang.Override
public java.util.List<com.google.cloud.support.v2beta.Case> getCasesList() {
return cases_;
}
/**
*
*
* <pre>
* The list of cases associated with the parent after any
* filters have been applied.
* </pre>
*
* <code>repeated .google.cloud.support.v2beta.Case cases = 1;</code>
*/
@java.lang.Override
public java.util.List<? extends com.google.cloud.support.v2beta.CaseOrBuilder>
getCasesOrBuilderList() {
return cases_;
}
/**
*
*
* <pre>
* The list of cases associated with the parent after any
* filters have been applied.
* </pre>
*
* <code>repeated .google.cloud.support.v2beta.Case cases = 1;</code>
*/
@java.lang.Override
public int getCasesCount() {
return cases_.size();
}
/**
*
*
* <pre>
* The list of cases associated with the parent after any
* filters have been applied.
* </pre>
*
* <code>repeated .google.cloud.support.v2beta.Case cases = 1;</code>
*/
@java.lang.Override
public com.google.cloud.support.v2beta.Case getCases(int index) {
return cases_.get(index);
}
/**
*
*
* <pre>
* The list of cases associated with the parent after any
* filters have been applied.
* </pre>
*
* <code>repeated .google.cloud.support.v2beta.Case cases = 1;</code>
*/
@java.lang.Override
public com.google.cloud.support.v2beta.CaseOrBuilder getCasesOrBuilder(int index) {
return cases_.get(index);
}
public static final int NEXT_PAGE_TOKEN_FIELD_NUMBER = 2;
@SuppressWarnings("serial")
private volatile java.lang.Object nextPageToken_ = "";
/**
*
*
* <pre>
* A token to retrieve the next page of results. Set this in the
* `page_token` field of subsequent `cases.search` requests. If unspecified,
* there are no more results to retrieve.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The nextPageToken.
*/
@java.lang.Override
public java.lang.String getNextPageToken() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
nextPageToken_ = s;
return s;
}
}
/**
*
*
* <pre>
* A token to retrieve the next page of results. Set this in the
* `page_token` field of subsequent `cases.search` requests. If unspecified,
* there are no more results to retrieve.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The bytes for nextPageToken.
*/
@java.lang.Override
public com.google.protobuf.ByteString getNextPageTokenBytes() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
nextPageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
for (int i = 0; i < cases_.size(); i++) {
output.writeMessage(1, cases_.get(i));
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 2, nextPageToken_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
for (int i = 0; i < cases_.size(); i++) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, cases_.get(i));
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, nextPageToken_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.support.v2beta.SearchCasesResponse)) {
return super.equals(obj);
}
com.google.cloud.support.v2beta.SearchCasesResponse other =
(com.google.cloud.support.v2beta.SearchCasesResponse) obj;
if (!getCasesList().equals(other.getCasesList())) return false;
if (!getNextPageToken().equals(other.getNextPageToken())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (getCasesCount() > 0) {
hash = (37 * hash) + CASES_FIELD_NUMBER;
hash = (53 * hash) + getCasesList().hashCode();
}
hash = (37 * hash) + NEXT_PAGE_TOKEN_FIELD_NUMBER;
hash = (53 * hash) + getNextPageToken().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.support.v2beta.SearchCasesResponse parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.support.v2beta.SearchCasesResponse parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.support.v2beta.SearchCasesResponse parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.support.v2beta.SearchCasesResponse parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.support.v2beta.SearchCasesResponse parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.support.v2beta.SearchCasesResponse parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.support.v2beta.SearchCasesResponse parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.support.v2beta.SearchCasesResponse parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.support.v2beta.SearchCasesResponse parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.support.v2beta.SearchCasesResponse parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.support.v2beta.SearchCasesResponse parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.support.v2beta.SearchCasesResponse parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.cloud.support.v2beta.SearchCasesResponse prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* The response message for the SearchCases endpoint.
* </pre>
*
* Protobuf type {@code google.cloud.support.v2beta.SearchCasesResponse}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.support.v2beta.SearchCasesResponse)
com.google.cloud.support.v2beta.SearchCasesResponseOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.support.v2beta.CaseServiceProto
.internal_static_google_cloud_support_v2beta_SearchCasesResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.support.v2beta.CaseServiceProto
.internal_static_google_cloud_support_v2beta_SearchCasesResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.support.v2beta.SearchCasesResponse.class,
com.google.cloud.support.v2beta.SearchCasesResponse.Builder.class);
}
// Construct using com.google.cloud.support.v2beta.SearchCasesResponse.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
if (casesBuilder_ == null) {
cases_ = java.util.Collections.emptyList();
} else {
cases_ = null;
casesBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000001);
nextPageToken_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.support.v2beta.CaseServiceProto
.internal_static_google_cloud_support_v2beta_SearchCasesResponse_descriptor;
}
@java.lang.Override
public com.google.cloud.support.v2beta.SearchCasesResponse getDefaultInstanceForType() {
return com.google.cloud.support.v2beta.SearchCasesResponse.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.support.v2beta.SearchCasesResponse build() {
com.google.cloud.support.v2beta.SearchCasesResponse result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.support.v2beta.SearchCasesResponse buildPartial() {
com.google.cloud.support.v2beta.SearchCasesResponse result =
new com.google.cloud.support.v2beta.SearchCasesResponse(this);
buildPartialRepeatedFields(result);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartialRepeatedFields(
com.google.cloud.support.v2beta.SearchCasesResponse result) {
if (casesBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0)) {
cases_ = java.util.Collections.unmodifiableList(cases_);
bitField0_ = (bitField0_ & ~0x00000001);
}
result.cases_ = cases_;
} else {
result.cases_ = casesBuilder_.build();
}
}
private void buildPartial0(com.google.cloud.support.v2beta.SearchCasesResponse result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000002) != 0)) {
result.nextPageToken_ = nextPageToken_;
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.support.v2beta.SearchCasesResponse) {
return mergeFrom((com.google.cloud.support.v2beta.SearchCasesResponse) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.support.v2beta.SearchCasesResponse other) {
if (other == com.google.cloud.support.v2beta.SearchCasesResponse.getDefaultInstance())
return this;
if (casesBuilder_ == null) {
if (!other.cases_.isEmpty()) {
if (cases_.isEmpty()) {
cases_ = other.cases_;
bitField0_ = (bitField0_ & ~0x00000001);
} else {
ensureCasesIsMutable();
cases_.addAll(other.cases_);
}
onChanged();
}
} else {
if (!other.cases_.isEmpty()) {
if (casesBuilder_.isEmpty()) {
casesBuilder_.dispose();
casesBuilder_ = null;
cases_ = other.cases_;
bitField0_ = (bitField0_ & ~0x00000001);
casesBuilder_ =
com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders
? getCasesFieldBuilder()
: null;
} else {
casesBuilder_.addAllMessages(other.cases_);
}
}
}
if (!other.getNextPageToken().isEmpty()) {
nextPageToken_ = other.nextPageToken_;
bitField0_ |= 0x00000002;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
com.google.cloud.support.v2beta.Case m =
input.readMessage(
com.google.cloud.support.v2beta.Case.parser(), extensionRegistry);
if (casesBuilder_ == null) {
ensureCasesIsMutable();
cases_.add(m);
} else {
casesBuilder_.addMessage(m);
}
break;
} // case 10
case 18:
{
nextPageToken_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000002;
break;
} // case 18
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.util.List<com.google.cloud.support.v2beta.Case> cases_ =
java.util.Collections.emptyList();
private void ensureCasesIsMutable() {
if (!((bitField0_ & 0x00000001) != 0)) {
cases_ = new java.util.ArrayList<com.google.cloud.support.v2beta.Case>(cases_);
bitField0_ |= 0x00000001;
}
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.support.v2beta.Case,
com.google.cloud.support.v2beta.Case.Builder,
com.google.cloud.support.v2beta.CaseOrBuilder>
casesBuilder_;
/**
*
*
* <pre>
* The list of cases associated with the parent after any
* filters have been applied.
* </pre>
*
* <code>repeated .google.cloud.support.v2beta.Case cases = 1;</code>
*/
public java.util.List<com.google.cloud.support.v2beta.Case> getCasesList() {
if (casesBuilder_ == null) {
return java.util.Collections.unmodifiableList(cases_);
} else {
return casesBuilder_.getMessageList();
}
}
/**
*
*
* <pre>
* The list of cases associated with the parent after any
* filters have been applied.
* </pre>
*
* <code>repeated .google.cloud.support.v2beta.Case cases = 1;</code>
*/
public int getCasesCount() {
if (casesBuilder_ == null) {
return cases_.size();
} else {
return casesBuilder_.getCount();
}
}
/**
*
*
* <pre>
* The list of cases associated with the parent after any
* filters have been applied.
* </pre>
*
* <code>repeated .google.cloud.support.v2beta.Case cases = 1;</code>
*/
public com.google.cloud.support.v2beta.Case getCases(int index) {
if (casesBuilder_ == null) {
return cases_.get(index);
} else {
return casesBuilder_.getMessage(index);
}
}
/**
*
*
* <pre>
* The list of cases associated with the parent after any
* filters have been applied.
* </pre>
*
* <code>repeated .google.cloud.support.v2beta.Case cases = 1;</code>
*/
public Builder setCases(int index, com.google.cloud.support.v2beta.Case value) {
if (casesBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureCasesIsMutable();
cases_.set(index, value);
onChanged();
} else {
casesBuilder_.setMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* The list of cases associated with the parent after any
* filters have been applied.
* </pre>
*
* <code>repeated .google.cloud.support.v2beta.Case cases = 1;</code>
*/
public Builder setCases(
int index, com.google.cloud.support.v2beta.Case.Builder builderForValue) {
if (casesBuilder_ == null) {
ensureCasesIsMutable();
cases_.set(index, builderForValue.build());
onChanged();
} else {
casesBuilder_.setMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* The list of cases associated with the parent after any
* filters have been applied.
* </pre>
*
* <code>repeated .google.cloud.support.v2beta.Case cases = 1;</code>
*/
public Builder addCases(com.google.cloud.support.v2beta.Case value) {
if (casesBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureCasesIsMutable();
cases_.add(value);
onChanged();
} else {
casesBuilder_.addMessage(value);
}
return this;
}
/**
*
*
* <pre>
* The list of cases associated with the parent after any
* filters have been applied.
* </pre>
*
* <code>repeated .google.cloud.support.v2beta.Case cases = 1;</code>
*/
public Builder addCases(int index, com.google.cloud.support.v2beta.Case value) {
if (casesBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureCasesIsMutable();
cases_.add(index, value);
onChanged();
} else {
casesBuilder_.addMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* The list of cases associated with the parent after any
* filters have been applied.
* </pre>
*
* <code>repeated .google.cloud.support.v2beta.Case cases = 1;</code>
*/
public Builder addCases(com.google.cloud.support.v2beta.Case.Builder builderForValue) {
if (casesBuilder_ == null) {
ensureCasesIsMutable();
cases_.add(builderForValue.build());
onChanged();
} else {
casesBuilder_.addMessage(builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* The list of cases associated with the parent after any
* filters have been applied.
* </pre>
*
* <code>repeated .google.cloud.support.v2beta.Case cases = 1;</code>
*/
public Builder addCases(
int index, com.google.cloud.support.v2beta.Case.Builder builderForValue) {
if (casesBuilder_ == null) {
ensureCasesIsMutable();
cases_.add(index, builderForValue.build());
onChanged();
} else {
casesBuilder_.addMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* The list of cases associated with the parent after any
* filters have been applied.
* </pre>
*
* <code>repeated .google.cloud.support.v2beta.Case cases = 1;</code>
*/
public Builder addAllCases(
java.lang.Iterable<? extends com.google.cloud.support.v2beta.Case> values) {
if (casesBuilder_ == null) {
ensureCasesIsMutable();
com.google.protobuf.AbstractMessageLite.Builder.addAll(values, cases_);
onChanged();
} else {
casesBuilder_.addAllMessages(values);
}
return this;
}
/**
*
*
* <pre>
* The list of cases associated with the parent after any
* filters have been applied.
* </pre>
*
* <code>repeated .google.cloud.support.v2beta.Case cases = 1;</code>
*/
public Builder clearCases() {
if (casesBuilder_ == null) {
cases_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
} else {
casesBuilder_.clear();
}
return this;
}
/**
*
*
* <pre>
* The list of cases associated with the parent after any
* filters have been applied.
* </pre>
*
* <code>repeated .google.cloud.support.v2beta.Case cases = 1;</code>
*/
public Builder removeCases(int index) {
if (casesBuilder_ == null) {
ensureCasesIsMutable();
cases_.remove(index);
onChanged();
} else {
casesBuilder_.remove(index);
}
return this;
}
/**
*
*
* <pre>
* The list of cases associated with the parent after any
* filters have been applied.
* </pre>
*
* <code>repeated .google.cloud.support.v2beta.Case cases = 1;</code>
*/
public com.google.cloud.support.v2beta.Case.Builder getCasesBuilder(int index) {
return getCasesFieldBuilder().getBuilder(index);
}
/**
*
*
* <pre>
* The list of cases associated with the parent after any
* filters have been applied.
* </pre>
*
* <code>repeated .google.cloud.support.v2beta.Case cases = 1;</code>
*/
public com.google.cloud.support.v2beta.CaseOrBuilder getCasesOrBuilder(int index) {
if (casesBuilder_ == null) {
return cases_.get(index);
} else {
return casesBuilder_.getMessageOrBuilder(index);
}
}
/**
*
*
* <pre>
* The list of cases associated with the parent after any
* filters have been applied.
* </pre>
*
* <code>repeated .google.cloud.support.v2beta.Case cases = 1;</code>
*/
public java.util.List<? extends com.google.cloud.support.v2beta.CaseOrBuilder>
getCasesOrBuilderList() {
if (casesBuilder_ != null) {
return casesBuilder_.getMessageOrBuilderList();
} else {
return java.util.Collections.unmodifiableList(cases_);
}
}
/**
*
*
* <pre>
* The list of cases associated with the parent after any
* filters have been applied.
* </pre>
*
* <code>repeated .google.cloud.support.v2beta.Case cases = 1;</code>
*/
public com.google.cloud.support.v2beta.Case.Builder addCasesBuilder() {
return getCasesFieldBuilder()
.addBuilder(com.google.cloud.support.v2beta.Case.getDefaultInstance());
}
/**
*
*
* <pre>
* The list of cases associated with the parent after any
* filters have been applied.
* </pre>
*
* <code>repeated .google.cloud.support.v2beta.Case cases = 1;</code>
*/
public com.google.cloud.support.v2beta.Case.Builder addCasesBuilder(int index) {
return getCasesFieldBuilder()
.addBuilder(index, com.google.cloud.support.v2beta.Case.getDefaultInstance());
}
/**
*
*
* <pre>
* The list of cases associated with the parent after any
* filters have been applied.
* </pre>
*
* <code>repeated .google.cloud.support.v2beta.Case cases = 1;</code>
*/
public java.util.List<com.google.cloud.support.v2beta.Case.Builder> getCasesBuilderList() {
return getCasesFieldBuilder().getBuilderList();
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.support.v2beta.Case,
com.google.cloud.support.v2beta.Case.Builder,
com.google.cloud.support.v2beta.CaseOrBuilder>
getCasesFieldBuilder() {
if (casesBuilder_ == null) {
casesBuilder_ =
new com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.support.v2beta.Case,
com.google.cloud.support.v2beta.Case.Builder,
com.google.cloud.support.v2beta.CaseOrBuilder>(
cases_, ((bitField0_ & 0x00000001) != 0), getParentForChildren(), isClean());
cases_ = null;
}
return casesBuilder_;
}
private java.lang.Object nextPageToken_ = "";
/**
*
*
* <pre>
* A token to retrieve the next page of results. Set this in the
* `page_token` field of subsequent `cases.search` requests. If unspecified,
* there are no more results to retrieve.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The nextPageToken.
*/
public java.lang.String getNextPageToken() {
java.lang.Object ref = nextPageToken_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
nextPageToken_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* A token to retrieve the next page of results. Set this in the
* `page_token` field of subsequent `cases.search` requests. If unspecified,
* there are no more results to retrieve.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The bytes for nextPageToken.
*/
public com.google.protobuf.ByteString getNextPageTokenBytes() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
nextPageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* A token to retrieve the next page of results. Set this in the
* `page_token` field of subsequent `cases.search` requests. If unspecified,
* there are no more results to retrieve.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @param value The nextPageToken to set.
* @return This builder for chaining.
*/
public Builder setNextPageToken(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
nextPageToken_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* A token to retrieve the next page of results. Set this in the
* `page_token` field of subsequent `cases.search` requests. If unspecified,
* there are no more results to retrieve.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return This builder for chaining.
*/
public Builder clearNextPageToken() {
nextPageToken_ = getDefaultInstance().getNextPageToken();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
return this;
}
/**
*
*
* <pre>
* A token to retrieve the next page of results. Set this in the
* `page_token` field of subsequent `cases.search` requests. If unspecified,
* there are no more results to retrieve.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @param value The bytes for nextPageToken to set.
* @return This builder for chaining.
*/
public Builder setNextPageTokenBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
nextPageToken_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.support.v2beta.SearchCasesResponse)
}
// @@protoc_insertion_point(class_scope:google.cloud.support.v2beta.SearchCasesResponse)
private static final com.google.cloud.support.v2beta.SearchCasesResponse DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.support.v2beta.SearchCasesResponse();
}
public static com.google.cloud.support.v2beta.SearchCasesResponse getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<SearchCasesResponse> PARSER =
new com.google.protobuf.AbstractParser<SearchCasesResponse>() {
@java.lang.Override
public SearchCasesResponse parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<SearchCasesResponse> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<SearchCasesResponse> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.support.v2beta.SearchCasesResponse getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleapis/google-cloud-java | 36,820 | java-discoveryengine/proto-google-cloud-discoveryengine-v1beta/src/main/java/com/google/cloud/discoveryengine/v1beta/ListEnginesResponse.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/discoveryengine/v1beta/engine_service.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.discoveryengine.v1beta;
/**
*
*
* <pre>
* Response message for
* [EngineService.ListEngines][google.cloud.discoveryengine.v1beta.EngineService.ListEngines]
* method.
* </pre>
*
* Protobuf type {@code google.cloud.discoveryengine.v1beta.ListEnginesResponse}
*/
public final class ListEnginesResponse extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.discoveryengine.v1beta.ListEnginesResponse)
ListEnginesResponseOrBuilder {
private static final long serialVersionUID = 0L;
// Use ListEnginesResponse.newBuilder() to construct.
private ListEnginesResponse(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private ListEnginesResponse() {
engines_ = java.util.Collections.emptyList();
nextPageToken_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new ListEnginesResponse();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.discoveryengine.v1beta.EngineServiceProto
.internal_static_google_cloud_discoveryengine_v1beta_ListEnginesResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.discoveryengine.v1beta.EngineServiceProto
.internal_static_google_cloud_discoveryengine_v1beta_ListEnginesResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.discoveryengine.v1beta.ListEnginesResponse.class,
com.google.cloud.discoveryengine.v1beta.ListEnginesResponse.Builder.class);
}
public static final int ENGINES_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private java.util.List<com.google.cloud.discoveryengine.v1beta.Engine> engines_;
/**
*
*
* <pre>
* All the customer's [Engine][google.cloud.discoveryengine.v1beta.Engine]s.
* </pre>
*
* <code>repeated .google.cloud.discoveryengine.v1beta.Engine engines = 1;</code>
*/
@java.lang.Override
public java.util.List<com.google.cloud.discoveryengine.v1beta.Engine> getEnginesList() {
return engines_;
}
/**
*
*
* <pre>
* All the customer's [Engine][google.cloud.discoveryengine.v1beta.Engine]s.
* </pre>
*
* <code>repeated .google.cloud.discoveryengine.v1beta.Engine engines = 1;</code>
*/
@java.lang.Override
public java.util.List<? extends com.google.cloud.discoveryengine.v1beta.EngineOrBuilder>
getEnginesOrBuilderList() {
return engines_;
}
/**
*
*
* <pre>
* All the customer's [Engine][google.cloud.discoveryengine.v1beta.Engine]s.
* </pre>
*
* <code>repeated .google.cloud.discoveryengine.v1beta.Engine engines = 1;</code>
*/
@java.lang.Override
public int getEnginesCount() {
return engines_.size();
}
/**
*
*
* <pre>
* All the customer's [Engine][google.cloud.discoveryengine.v1beta.Engine]s.
* </pre>
*
* <code>repeated .google.cloud.discoveryengine.v1beta.Engine engines = 1;</code>
*/
@java.lang.Override
public com.google.cloud.discoveryengine.v1beta.Engine getEngines(int index) {
return engines_.get(index);
}
/**
*
*
* <pre>
* All the customer's [Engine][google.cloud.discoveryengine.v1beta.Engine]s.
* </pre>
*
* <code>repeated .google.cloud.discoveryengine.v1beta.Engine engines = 1;</code>
*/
@java.lang.Override
public com.google.cloud.discoveryengine.v1beta.EngineOrBuilder getEnginesOrBuilder(int index) {
return engines_.get(index);
}
public static final int NEXT_PAGE_TOKEN_FIELD_NUMBER = 2;
@SuppressWarnings("serial")
private volatile java.lang.Object nextPageToken_ = "";
/**
*
*
* <pre>
* Not supported.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The nextPageToken.
*/
@java.lang.Override
public java.lang.String getNextPageToken() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
nextPageToken_ = s;
return s;
}
}
/**
*
*
* <pre>
* Not supported.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The bytes for nextPageToken.
*/
@java.lang.Override
public com.google.protobuf.ByteString getNextPageTokenBytes() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
nextPageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
for (int i = 0; i < engines_.size(); i++) {
output.writeMessage(1, engines_.get(i));
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 2, nextPageToken_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
for (int i = 0; i < engines_.size(); i++) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, engines_.get(i));
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, nextPageToken_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.discoveryengine.v1beta.ListEnginesResponse)) {
return super.equals(obj);
}
com.google.cloud.discoveryengine.v1beta.ListEnginesResponse other =
(com.google.cloud.discoveryengine.v1beta.ListEnginesResponse) obj;
if (!getEnginesList().equals(other.getEnginesList())) return false;
if (!getNextPageToken().equals(other.getNextPageToken())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (getEnginesCount() > 0) {
hash = (37 * hash) + ENGINES_FIELD_NUMBER;
hash = (53 * hash) + getEnginesList().hashCode();
}
hash = (37 * hash) + NEXT_PAGE_TOKEN_FIELD_NUMBER;
hash = (53 * hash) + getNextPageToken().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.discoveryengine.v1beta.ListEnginesResponse parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.discoveryengine.v1beta.ListEnginesResponse parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.discoveryengine.v1beta.ListEnginesResponse parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.discoveryengine.v1beta.ListEnginesResponse parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.discoveryengine.v1beta.ListEnginesResponse parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.discoveryengine.v1beta.ListEnginesResponse parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.discoveryengine.v1beta.ListEnginesResponse parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.discoveryengine.v1beta.ListEnginesResponse parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.discoveryengine.v1beta.ListEnginesResponse parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.discoveryengine.v1beta.ListEnginesResponse parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.discoveryengine.v1beta.ListEnginesResponse parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.discoveryengine.v1beta.ListEnginesResponse parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.discoveryengine.v1beta.ListEnginesResponse prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Response message for
* [EngineService.ListEngines][google.cloud.discoveryengine.v1beta.EngineService.ListEngines]
* method.
* </pre>
*
* Protobuf type {@code google.cloud.discoveryengine.v1beta.ListEnginesResponse}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.discoveryengine.v1beta.ListEnginesResponse)
com.google.cloud.discoveryengine.v1beta.ListEnginesResponseOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.discoveryengine.v1beta.EngineServiceProto
.internal_static_google_cloud_discoveryengine_v1beta_ListEnginesResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.discoveryengine.v1beta.EngineServiceProto
.internal_static_google_cloud_discoveryengine_v1beta_ListEnginesResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.discoveryengine.v1beta.ListEnginesResponse.class,
com.google.cloud.discoveryengine.v1beta.ListEnginesResponse.Builder.class);
}
// Construct using com.google.cloud.discoveryengine.v1beta.ListEnginesResponse.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
if (enginesBuilder_ == null) {
engines_ = java.util.Collections.emptyList();
} else {
engines_ = null;
enginesBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000001);
nextPageToken_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.discoveryengine.v1beta.EngineServiceProto
.internal_static_google_cloud_discoveryengine_v1beta_ListEnginesResponse_descriptor;
}
@java.lang.Override
public com.google.cloud.discoveryengine.v1beta.ListEnginesResponse getDefaultInstanceForType() {
return com.google.cloud.discoveryengine.v1beta.ListEnginesResponse.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.discoveryengine.v1beta.ListEnginesResponse build() {
com.google.cloud.discoveryengine.v1beta.ListEnginesResponse result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.discoveryengine.v1beta.ListEnginesResponse buildPartial() {
com.google.cloud.discoveryengine.v1beta.ListEnginesResponse result =
new com.google.cloud.discoveryengine.v1beta.ListEnginesResponse(this);
buildPartialRepeatedFields(result);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartialRepeatedFields(
com.google.cloud.discoveryengine.v1beta.ListEnginesResponse result) {
if (enginesBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0)) {
engines_ = java.util.Collections.unmodifiableList(engines_);
bitField0_ = (bitField0_ & ~0x00000001);
}
result.engines_ = engines_;
} else {
result.engines_ = enginesBuilder_.build();
}
}
private void buildPartial0(com.google.cloud.discoveryengine.v1beta.ListEnginesResponse result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000002) != 0)) {
result.nextPageToken_ = nextPageToken_;
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.discoveryengine.v1beta.ListEnginesResponse) {
return mergeFrom((com.google.cloud.discoveryengine.v1beta.ListEnginesResponse) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.discoveryengine.v1beta.ListEnginesResponse other) {
if (other == com.google.cloud.discoveryengine.v1beta.ListEnginesResponse.getDefaultInstance())
return this;
if (enginesBuilder_ == null) {
if (!other.engines_.isEmpty()) {
if (engines_.isEmpty()) {
engines_ = other.engines_;
bitField0_ = (bitField0_ & ~0x00000001);
} else {
ensureEnginesIsMutable();
engines_.addAll(other.engines_);
}
onChanged();
}
} else {
if (!other.engines_.isEmpty()) {
if (enginesBuilder_.isEmpty()) {
enginesBuilder_.dispose();
enginesBuilder_ = null;
engines_ = other.engines_;
bitField0_ = (bitField0_ & ~0x00000001);
enginesBuilder_ =
com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders
? getEnginesFieldBuilder()
: null;
} else {
enginesBuilder_.addAllMessages(other.engines_);
}
}
}
if (!other.getNextPageToken().isEmpty()) {
nextPageToken_ = other.nextPageToken_;
bitField0_ |= 0x00000002;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
com.google.cloud.discoveryengine.v1beta.Engine m =
input.readMessage(
com.google.cloud.discoveryengine.v1beta.Engine.parser(), extensionRegistry);
if (enginesBuilder_ == null) {
ensureEnginesIsMutable();
engines_.add(m);
} else {
enginesBuilder_.addMessage(m);
}
break;
} // case 10
case 18:
{
nextPageToken_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000002;
break;
} // case 18
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.util.List<com.google.cloud.discoveryengine.v1beta.Engine> engines_ =
java.util.Collections.emptyList();
private void ensureEnginesIsMutable() {
if (!((bitField0_ & 0x00000001) != 0)) {
engines_ =
new java.util.ArrayList<com.google.cloud.discoveryengine.v1beta.Engine>(engines_);
bitField0_ |= 0x00000001;
}
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.discoveryengine.v1beta.Engine,
com.google.cloud.discoveryengine.v1beta.Engine.Builder,
com.google.cloud.discoveryengine.v1beta.EngineOrBuilder>
enginesBuilder_;
/**
*
*
* <pre>
* All the customer's [Engine][google.cloud.discoveryengine.v1beta.Engine]s.
* </pre>
*
* <code>repeated .google.cloud.discoveryengine.v1beta.Engine engines = 1;</code>
*/
public java.util.List<com.google.cloud.discoveryengine.v1beta.Engine> getEnginesList() {
if (enginesBuilder_ == null) {
return java.util.Collections.unmodifiableList(engines_);
} else {
return enginesBuilder_.getMessageList();
}
}
/**
*
*
* <pre>
* All the customer's [Engine][google.cloud.discoveryengine.v1beta.Engine]s.
* </pre>
*
* <code>repeated .google.cloud.discoveryengine.v1beta.Engine engines = 1;</code>
*/
public int getEnginesCount() {
if (enginesBuilder_ == null) {
return engines_.size();
} else {
return enginesBuilder_.getCount();
}
}
/**
*
*
* <pre>
* All the customer's [Engine][google.cloud.discoveryengine.v1beta.Engine]s.
* </pre>
*
* <code>repeated .google.cloud.discoveryengine.v1beta.Engine engines = 1;</code>
*/
public com.google.cloud.discoveryengine.v1beta.Engine getEngines(int index) {
if (enginesBuilder_ == null) {
return engines_.get(index);
} else {
return enginesBuilder_.getMessage(index);
}
}
/**
*
*
* <pre>
* All the customer's [Engine][google.cloud.discoveryengine.v1beta.Engine]s.
* </pre>
*
* <code>repeated .google.cloud.discoveryengine.v1beta.Engine engines = 1;</code>
*/
public Builder setEngines(int index, com.google.cloud.discoveryengine.v1beta.Engine value) {
if (enginesBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureEnginesIsMutable();
engines_.set(index, value);
onChanged();
} else {
enginesBuilder_.setMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* All the customer's [Engine][google.cloud.discoveryengine.v1beta.Engine]s.
* </pre>
*
* <code>repeated .google.cloud.discoveryengine.v1beta.Engine engines = 1;</code>
*/
public Builder setEngines(
int index, com.google.cloud.discoveryengine.v1beta.Engine.Builder builderForValue) {
if (enginesBuilder_ == null) {
ensureEnginesIsMutable();
engines_.set(index, builderForValue.build());
onChanged();
} else {
enginesBuilder_.setMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* All the customer's [Engine][google.cloud.discoveryengine.v1beta.Engine]s.
* </pre>
*
* <code>repeated .google.cloud.discoveryengine.v1beta.Engine engines = 1;</code>
*/
public Builder addEngines(com.google.cloud.discoveryengine.v1beta.Engine value) {
if (enginesBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureEnginesIsMutable();
engines_.add(value);
onChanged();
} else {
enginesBuilder_.addMessage(value);
}
return this;
}
/**
*
*
* <pre>
* All the customer's [Engine][google.cloud.discoveryengine.v1beta.Engine]s.
* </pre>
*
* <code>repeated .google.cloud.discoveryengine.v1beta.Engine engines = 1;</code>
*/
public Builder addEngines(int index, com.google.cloud.discoveryengine.v1beta.Engine value) {
if (enginesBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureEnginesIsMutable();
engines_.add(index, value);
onChanged();
} else {
enginesBuilder_.addMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* All the customer's [Engine][google.cloud.discoveryengine.v1beta.Engine]s.
* </pre>
*
* <code>repeated .google.cloud.discoveryengine.v1beta.Engine engines = 1;</code>
*/
public Builder addEngines(
com.google.cloud.discoveryengine.v1beta.Engine.Builder builderForValue) {
if (enginesBuilder_ == null) {
ensureEnginesIsMutable();
engines_.add(builderForValue.build());
onChanged();
} else {
enginesBuilder_.addMessage(builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* All the customer's [Engine][google.cloud.discoveryengine.v1beta.Engine]s.
* </pre>
*
* <code>repeated .google.cloud.discoveryengine.v1beta.Engine engines = 1;</code>
*/
public Builder addEngines(
int index, com.google.cloud.discoveryengine.v1beta.Engine.Builder builderForValue) {
if (enginesBuilder_ == null) {
ensureEnginesIsMutable();
engines_.add(index, builderForValue.build());
onChanged();
} else {
enginesBuilder_.addMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* All the customer's [Engine][google.cloud.discoveryengine.v1beta.Engine]s.
* </pre>
*
* <code>repeated .google.cloud.discoveryengine.v1beta.Engine engines = 1;</code>
*/
public Builder addAllEngines(
java.lang.Iterable<? extends com.google.cloud.discoveryengine.v1beta.Engine> values) {
if (enginesBuilder_ == null) {
ensureEnginesIsMutable();
com.google.protobuf.AbstractMessageLite.Builder.addAll(values, engines_);
onChanged();
} else {
enginesBuilder_.addAllMessages(values);
}
return this;
}
/**
*
*
* <pre>
* All the customer's [Engine][google.cloud.discoveryengine.v1beta.Engine]s.
* </pre>
*
* <code>repeated .google.cloud.discoveryengine.v1beta.Engine engines = 1;</code>
*/
public Builder clearEngines() {
if (enginesBuilder_ == null) {
engines_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
} else {
enginesBuilder_.clear();
}
return this;
}
/**
*
*
* <pre>
* All the customer's [Engine][google.cloud.discoveryengine.v1beta.Engine]s.
* </pre>
*
* <code>repeated .google.cloud.discoveryengine.v1beta.Engine engines = 1;</code>
*/
public Builder removeEngines(int index) {
if (enginesBuilder_ == null) {
ensureEnginesIsMutable();
engines_.remove(index);
onChanged();
} else {
enginesBuilder_.remove(index);
}
return this;
}
/**
*
*
* <pre>
* All the customer's [Engine][google.cloud.discoveryengine.v1beta.Engine]s.
* </pre>
*
* <code>repeated .google.cloud.discoveryengine.v1beta.Engine engines = 1;</code>
*/
public com.google.cloud.discoveryengine.v1beta.Engine.Builder getEnginesBuilder(int index) {
return getEnginesFieldBuilder().getBuilder(index);
}
/**
*
*
* <pre>
* All the customer's [Engine][google.cloud.discoveryengine.v1beta.Engine]s.
* </pre>
*
* <code>repeated .google.cloud.discoveryengine.v1beta.Engine engines = 1;</code>
*/
public com.google.cloud.discoveryengine.v1beta.EngineOrBuilder getEnginesOrBuilder(int index) {
if (enginesBuilder_ == null) {
return engines_.get(index);
} else {
return enginesBuilder_.getMessageOrBuilder(index);
}
}
/**
*
*
* <pre>
* All the customer's [Engine][google.cloud.discoveryengine.v1beta.Engine]s.
* </pre>
*
* <code>repeated .google.cloud.discoveryengine.v1beta.Engine engines = 1;</code>
*/
public java.util.List<? extends com.google.cloud.discoveryengine.v1beta.EngineOrBuilder>
getEnginesOrBuilderList() {
if (enginesBuilder_ != null) {
return enginesBuilder_.getMessageOrBuilderList();
} else {
return java.util.Collections.unmodifiableList(engines_);
}
}
/**
*
*
* <pre>
* All the customer's [Engine][google.cloud.discoveryengine.v1beta.Engine]s.
* </pre>
*
* <code>repeated .google.cloud.discoveryengine.v1beta.Engine engines = 1;</code>
*/
public com.google.cloud.discoveryengine.v1beta.Engine.Builder addEnginesBuilder() {
return getEnginesFieldBuilder()
.addBuilder(com.google.cloud.discoveryengine.v1beta.Engine.getDefaultInstance());
}
/**
*
*
* <pre>
* All the customer's [Engine][google.cloud.discoveryengine.v1beta.Engine]s.
* </pre>
*
* <code>repeated .google.cloud.discoveryengine.v1beta.Engine engines = 1;</code>
*/
public com.google.cloud.discoveryengine.v1beta.Engine.Builder addEnginesBuilder(int index) {
return getEnginesFieldBuilder()
.addBuilder(index, com.google.cloud.discoveryengine.v1beta.Engine.getDefaultInstance());
}
/**
*
*
* <pre>
* All the customer's [Engine][google.cloud.discoveryengine.v1beta.Engine]s.
* </pre>
*
* <code>repeated .google.cloud.discoveryengine.v1beta.Engine engines = 1;</code>
*/
public java.util.List<com.google.cloud.discoveryengine.v1beta.Engine.Builder>
getEnginesBuilderList() {
return getEnginesFieldBuilder().getBuilderList();
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.discoveryengine.v1beta.Engine,
com.google.cloud.discoveryengine.v1beta.Engine.Builder,
com.google.cloud.discoveryengine.v1beta.EngineOrBuilder>
getEnginesFieldBuilder() {
if (enginesBuilder_ == null) {
enginesBuilder_ =
new com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.discoveryengine.v1beta.Engine,
com.google.cloud.discoveryengine.v1beta.Engine.Builder,
com.google.cloud.discoveryengine.v1beta.EngineOrBuilder>(
engines_, ((bitField0_ & 0x00000001) != 0), getParentForChildren(), isClean());
engines_ = null;
}
return enginesBuilder_;
}
private java.lang.Object nextPageToken_ = "";
/**
*
*
* <pre>
* Not supported.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The nextPageToken.
*/
public java.lang.String getNextPageToken() {
java.lang.Object ref = nextPageToken_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
nextPageToken_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Not supported.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The bytes for nextPageToken.
*/
public com.google.protobuf.ByteString getNextPageTokenBytes() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
nextPageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Not supported.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @param value The nextPageToken to set.
* @return This builder for chaining.
*/
public Builder setNextPageToken(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
nextPageToken_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Not supported.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return This builder for chaining.
*/
public Builder clearNextPageToken() {
nextPageToken_ = getDefaultInstance().getNextPageToken();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
return this;
}
/**
*
*
* <pre>
* Not supported.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @param value The bytes for nextPageToken to set.
* @return This builder for chaining.
*/
public Builder setNextPageTokenBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
nextPageToken_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.discoveryengine.v1beta.ListEnginesResponse)
}
// @@protoc_insertion_point(class_scope:google.cloud.discoveryengine.v1beta.ListEnginesResponse)
private static final com.google.cloud.discoveryengine.v1beta.ListEnginesResponse DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.discoveryengine.v1beta.ListEnginesResponse();
}
public static com.google.cloud.discoveryengine.v1beta.ListEnginesResponse getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<ListEnginesResponse> PARSER =
new com.google.protobuf.AbstractParser<ListEnginesResponse>() {
@java.lang.Override
public ListEnginesResponse parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<ListEnginesResponse> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<ListEnginesResponse> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.discoveryengine.v1beta.ListEnginesResponse getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleapis/google-cloud-java | 36,821 | java-dataproc-metastore/proto-google-cloud-dataproc-metastore-v1beta/src/main/java/com/google/cloud/metastore/v1beta/KerberosConfig.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/metastore/v1beta/metastore.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.metastore.v1beta;
/**
*
*
* <pre>
* Configuration information for a Kerberos principal.
* </pre>
*
* Protobuf type {@code google.cloud.metastore.v1beta.KerberosConfig}
*/
public final class KerberosConfig extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.metastore.v1beta.KerberosConfig)
KerberosConfigOrBuilder {
private static final long serialVersionUID = 0L;
// Use KerberosConfig.newBuilder() to construct.
private KerberosConfig(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private KerberosConfig() {
principal_ = "";
krb5ConfigGcsUri_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new KerberosConfig();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.metastore.v1beta.MetastoreProto
.internal_static_google_cloud_metastore_v1beta_KerberosConfig_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.metastore.v1beta.MetastoreProto
.internal_static_google_cloud_metastore_v1beta_KerberosConfig_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.metastore.v1beta.KerberosConfig.class,
com.google.cloud.metastore.v1beta.KerberosConfig.Builder.class);
}
private int bitField0_;
public static final int KEYTAB_FIELD_NUMBER = 1;
private com.google.cloud.metastore.v1beta.Secret keytab_;
/**
*
*
* <pre>
* A Kerberos keytab file that can be used to authenticate a service principal
* with a Kerberos Key Distribution Center (KDC).
* </pre>
*
* <code>.google.cloud.metastore.v1beta.Secret keytab = 1;</code>
*
* @return Whether the keytab field is set.
*/
@java.lang.Override
public boolean hasKeytab() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* A Kerberos keytab file that can be used to authenticate a service principal
* with a Kerberos Key Distribution Center (KDC).
* </pre>
*
* <code>.google.cloud.metastore.v1beta.Secret keytab = 1;</code>
*
* @return The keytab.
*/
@java.lang.Override
public com.google.cloud.metastore.v1beta.Secret getKeytab() {
return keytab_ == null
? com.google.cloud.metastore.v1beta.Secret.getDefaultInstance()
: keytab_;
}
/**
*
*
* <pre>
* A Kerberos keytab file that can be used to authenticate a service principal
* with a Kerberos Key Distribution Center (KDC).
* </pre>
*
* <code>.google.cloud.metastore.v1beta.Secret keytab = 1;</code>
*/
@java.lang.Override
public com.google.cloud.metastore.v1beta.SecretOrBuilder getKeytabOrBuilder() {
return keytab_ == null
? com.google.cloud.metastore.v1beta.Secret.getDefaultInstance()
: keytab_;
}
public static final int PRINCIPAL_FIELD_NUMBER = 2;
@SuppressWarnings("serial")
private volatile java.lang.Object principal_ = "";
/**
*
*
* <pre>
* A Kerberos principal that exists in the both the keytab the KDC
* to authenticate as. A typical principal is of the form
* `primary/instance@REALM`, but there is no exact format.
* </pre>
*
* <code>string principal = 2;</code>
*
* @return The principal.
*/
@java.lang.Override
public java.lang.String getPrincipal() {
java.lang.Object ref = principal_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
principal_ = s;
return s;
}
}
/**
*
*
* <pre>
* A Kerberos principal that exists in the both the keytab the KDC
* to authenticate as. A typical principal is of the form
* `primary/instance@REALM`, but there is no exact format.
* </pre>
*
* <code>string principal = 2;</code>
*
* @return The bytes for principal.
*/
@java.lang.Override
public com.google.protobuf.ByteString getPrincipalBytes() {
java.lang.Object ref = principal_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
principal_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int KRB5_CONFIG_GCS_URI_FIELD_NUMBER = 3;
@SuppressWarnings("serial")
private volatile java.lang.Object krb5ConfigGcsUri_ = "";
/**
*
*
* <pre>
* A Cloud Storage URI that specifies the path to a
* krb5.conf file. It is of the form `gs://{bucket_name}/path/to/krb5.conf`,
* although the file does not need to be named krb5.conf explicitly.
* </pre>
*
* <code>string krb5_config_gcs_uri = 3;</code>
*
* @return The krb5ConfigGcsUri.
*/
@java.lang.Override
public java.lang.String getKrb5ConfigGcsUri() {
java.lang.Object ref = krb5ConfigGcsUri_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
krb5ConfigGcsUri_ = s;
return s;
}
}
/**
*
*
* <pre>
* A Cloud Storage URI that specifies the path to a
* krb5.conf file. It is of the form `gs://{bucket_name}/path/to/krb5.conf`,
* although the file does not need to be named krb5.conf explicitly.
* </pre>
*
* <code>string krb5_config_gcs_uri = 3;</code>
*
* @return The bytes for krb5ConfigGcsUri.
*/
@java.lang.Override
public com.google.protobuf.ByteString getKrb5ConfigGcsUriBytes() {
java.lang.Object ref = krb5ConfigGcsUri_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
krb5ConfigGcsUri_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (((bitField0_ & 0x00000001) != 0)) {
output.writeMessage(1, getKeytab());
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(principal_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 2, principal_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(krb5ConfigGcsUri_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 3, krb5ConfigGcsUri_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) != 0)) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, getKeytab());
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(principal_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, principal_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(krb5ConfigGcsUri_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(3, krb5ConfigGcsUri_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.metastore.v1beta.KerberosConfig)) {
return super.equals(obj);
}
com.google.cloud.metastore.v1beta.KerberosConfig other =
(com.google.cloud.metastore.v1beta.KerberosConfig) obj;
if (hasKeytab() != other.hasKeytab()) return false;
if (hasKeytab()) {
if (!getKeytab().equals(other.getKeytab())) return false;
}
if (!getPrincipal().equals(other.getPrincipal())) return false;
if (!getKrb5ConfigGcsUri().equals(other.getKrb5ConfigGcsUri())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (hasKeytab()) {
hash = (37 * hash) + KEYTAB_FIELD_NUMBER;
hash = (53 * hash) + getKeytab().hashCode();
}
hash = (37 * hash) + PRINCIPAL_FIELD_NUMBER;
hash = (53 * hash) + getPrincipal().hashCode();
hash = (37 * hash) + KRB5_CONFIG_GCS_URI_FIELD_NUMBER;
hash = (53 * hash) + getKrb5ConfigGcsUri().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.metastore.v1beta.KerberosConfig parseFrom(java.nio.ByteBuffer data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.metastore.v1beta.KerberosConfig parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.metastore.v1beta.KerberosConfig parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.metastore.v1beta.KerberosConfig parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.metastore.v1beta.KerberosConfig parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.metastore.v1beta.KerberosConfig parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.metastore.v1beta.KerberosConfig parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.metastore.v1beta.KerberosConfig parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.metastore.v1beta.KerberosConfig parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.metastore.v1beta.KerberosConfig parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.metastore.v1beta.KerberosConfig parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.metastore.v1beta.KerberosConfig parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.cloud.metastore.v1beta.KerberosConfig prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Configuration information for a Kerberos principal.
* </pre>
*
* Protobuf type {@code google.cloud.metastore.v1beta.KerberosConfig}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.metastore.v1beta.KerberosConfig)
com.google.cloud.metastore.v1beta.KerberosConfigOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.metastore.v1beta.MetastoreProto
.internal_static_google_cloud_metastore_v1beta_KerberosConfig_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.metastore.v1beta.MetastoreProto
.internal_static_google_cloud_metastore_v1beta_KerberosConfig_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.metastore.v1beta.KerberosConfig.class,
com.google.cloud.metastore.v1beta.KerberosConfig.Builder.class);
}
// Construct using com.google.cloud.metastore.v1beta.KerberosConfig.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {
getKeytabFieldBuilder();
}
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
keytab_ = null;
if (keytabBuilder_ != null) {
keytabBuilder_.dispose();
keytabBuilder_ = null;
}
principal_ = "";
krb5ConfigGcsUri_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.metastore.v1beta.MetastoreProto
.internal_static_google_cloud_metastore_v1beta_KerberosConfig_descriptor;
}
@java.lang.Override
public com.google.cloud.metastore.v1beta.KerberosConfig getDefaultInstanceForType() {
return com.google.cloud.metastore.v1beta.KerberosConfig.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.metastore.v1beta.KerberosConfig build() {
com.google.cloud.metastore.v1beta.KerberosConfig result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.metastore.v1beta.KerberosConfig buildPartial() {
com.google.cloud.metastore.v1beta.KerberosConfig result =
new com.google.cloud.metastore.v1beta.KerberosConfig(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(com.google.cloud.metastore.v1beta.KerberosConfig result) {
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.keytab_ = keytabBuilder_ == null ? keytab_ : keytabBuilder_.build();
to_bitField0_ |= 0x00000001;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.principal_ = principal_;
}
if (((from_bitField0_ & 0x00000004) != 0)) {
result.krb5ConfigGcsUri_ = krb5ConfigGcsUri_;
}
result.bitField0_ |= to_bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.metastore.v1beta.KerberosConfig) {
return mergeFrom((com.google.cloud.metastore.v1beta.KerberosConfig) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.metastore.v1beta.KerberosConfig other) {
if (other == com.google.cloud.metastore.v1beta.KerberosConfig.getDefaultInstance())
return this;
if (other.hasKeytab()) {
mergeKeytab(other.getKeytab());
}
if (!other.getPrincipal().isEmpty()) {
principal_ = other.principal_;
bitField0_ |= 0x00000002;
onChanged();
}
if (!other.getKrb5ConfigGcsUri().isEmpty()) {
krb5ConfigGcsUri_ = other.krb5ConfigGcsUri_;
bitField0_ |= 0x00000004;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
input.readMessage(getKeytabFieldBuilder().getBuilder(), extensionRegistry);
bitField0_ |= 0x00000001;
break;
} // case 10
case 18:
{
principal_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000002;
break;
} // case 18
case 26:
{
krb5ConfigGcsUri_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000004;
break;
} // case 26
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private com.google.cloud.metastore.v1beta.Secret keytab_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.metastore.v1beta.Secret,
com.google.cloud.metastore.v1beta.Secret.Builder,
com.google.cloud.metastore.v1beta.SecretOrBuilder>
keytabBuilder_;
/**
*
*
* <pre>
* A Kerberos keytab file that can be used to authenticate a service principal
* with a Kerberos Key Distribution Center (KDC).
* </pre>
*
* <code>.google.cloud.metastore.v1beta.Secret keytab = 1;</code>
*
* @return Whether the keytab field is set.
*/
public boolean hasKeytab() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* A Kerberos keytab file that can be used to authenticate a service principal
* with a Kerberos Key Distribution Center (KDC).
* </pre>
*
* <code>.google.cloud.metastore.v1beta.Secret keytab = 1;</code>
*
* @return The keytab.
*/
public com.google.cloud.metastore.v1beta.Secret getKeytab() {
if (keytabBuilder_ == null) {
return keytab_ == null
? com.google.cloud.metastore.v1beta.Secret.getDefaultInstance()
: keytab_;
} else {
return keytabBuilder_.getMessage();
}
}
/**
*
*
* <pre>
* A Kerberos keytab file that can be used to authenticate a service principal
* with a Kerberos Key Distribution Center (KDC).
* </pre>
*
* <code>.google.cloud.metastore.v1beta.Secret keytab = 1;</code>
*/
public Builder setKeytab(com.google.cloud.metastore.v1beta.Secret value) {
if (keytabBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
keytab_ = value;
} else {
keytabBuilder_.setMessage(value);
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* A Kerberos keytab file that can be used to authenticate a service principal
* with a Kerberos Key Distribution Center (KDC).
* </pre>
*
* <code>.google.cloud.metastore.v1beta.Secret keytab = 1;</code>
*/
public Builder setKeytab(com.google.cloud.metastore.v1beta.Secret.Builder builderForValue) {
if (keytabBuilder_ == null) {
keytab_ = builderForValue.build();
} else {
keytabBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* A Kerberos keytab file that can be used to authenticate a service principal
* with a Kerberos Key Distribution Center (KDC).
* </pre>
*
* <code>.google.cloud.metastore.v1beta.Secret keytab = 1;</code>
*/
public Builder mergeKeytab(com.google.cloud.metastore.v1beta.Secret value) {
if (keytabBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0)
&& keytab_ != null
&& keytab_ != com.google.cloud.metastore.v1beta.Secret.getDefaultInstance()) {
getKeytabBuilder().mergeFrom(value);
} else {
keytab_ = value;
}
} else {
keytabBuilder_.mergeFrom(value);
}
if (keytab_ != null) {
bitField0_ |= 0x00000001;
onChanged();
}
return this;
}
/**
*
*
* <pre>
* A Kerberos keytab file that can be used to authenticate a service principal
* with a Kerberos Key Distribution Center (KDC).
* </pre>
*
* <code>.google.cloud.metastore.v1beta.Secret keytab = 1;</code>
*/
public Builder clearKeytab() {
bitField0_ = (bitField0_ & ~0x00000001);
keytab_ = null;
if (keytabBuilder_ != null) {
keytabBuilder_.dispose();
keytabBuilder_ = null;
}
onChanged();
return this;
}
/**
*
*
* <pre>
* A Kerberos keytab file that can be used to authenticate a service principal
* with a Kerberos Key Distribution Center (KDC).
* </pre>
*
* <code>.google.cloud.metastore.v1beta.Secret keytab = 1;</code>
*/
public com.google.cloud.metastore.v1beta.Secret.Builder getKeytabBuilder() {
bitField0_ |= 0x00000001;
onChanged();
return getKeytabFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* A Kerberos keytab file that can be used to authenticate a service principal
* with a Kerberos Key Distribution Center (KDC).
* </pre>
*
* <code>.google.cloud.metastore.v1beta.Secret keytab = 1;</code>
*/
public com.google.cloud.metastore.v1beta.SecretOrBuilder getKeytabOrBuilder() {
if (keytabBuilder_ != null) {
return keytabBuilder_.getMessageOrBuilder();
} else {
return keytab_ == null
? com.google.cloud.metastore.v1beta.Secret.getDefaultInstance()
: keytab_;
}
}
/**
*
*
* <pre>
* A Kerberos keytab file that can be used to authenticate a service principal
* with a Kerberos Key Distribution Center (KDC).
* </pre>
*
* <code>.google.cloud.metastore.v1beta.Secret keytab = 1;</code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.metastore.v1beta.Secret,
com.google.cloud.metastore.v1beta.Secret.Builder,
com.google.cloud.metastore.v1beta.SecretOrBuilder>
getKeytabFieldBuilder() {
if (keytabBuilder_ == null) {
keytabBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.metastore.v1beta.Secret,
com.google.cloud.metastore.v1beta.Secret.Builder,
com.google.cloud.metastore.v1beta.SecretOrBuilder>(
getKeytab(), getParentForChildren(), isClean());
keytab_ = null;
}
return keytabBuilder_;
}
private java.lang.Object principal_ = "";
/**
*
*
* <pre>
* A Kerberos principal that exists in the both the keytab the KDC
* to authenticate as. A typical principal is of the form
* `primary/instance@REALM`, but there is no exact format.
* </pre>
*
* <code>string principal = 2;</code>
*
* @return The principal.
*/
public java.lang.String getPrincipal() {
java.lang.Object ref = principal_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
principal_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* A Kerberos principal that exists in the both the keytab the KDC
* to authenticate as. A typical principal is of the form
* `primary/instance@REALM`, but there is no exact format.
* </pre>
*
* <code>string principal = 2;</code>
*
* @return The bytes for principal.
*/
public com.google.protobuf.ByteString getPrincipalBytes() {
java.lang.Object ref = principal_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
principal_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* A Kerberos principal that exists in the both the keytab the KDC
* to authenticate as. A typical principal is of the form
* `primary/instance@REALM`, but there is no exact format.
* </pre>
*
* <code>string principal = 2;</code>
*
* @param value The principal to set.
* @return This builder for chaining.
*/
public Builder setPrincipal(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
principal_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* A Kerberos principal that exists in the both the keytab the KDC
* to authenticate as. A typical principal is of the form
* `primary/instance@REALM`, but there is no exact format.
* </pre>
*
* <code>string principal = 2;</code>
*
* @return This builder for chaining.
*/
public Builder clearPrincipal() {
principal_ = getDefaultInstance().getPrincipal();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
return this;
}
/**
*
*
* <pre>
* A Kerberos principal that exists in the both the keytab the KDC
* to authenticate as. A typical principal is of the form
* `primary/instance@REALM`, but there is no exact format.
* </pre>
*
* <code>string principal = 2;</code>
*
* @param value The bytes for principal to set.
* @return This builder for chaining.
*/
public Builder setPrincipalBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
principal_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
private java.lang.Object krb5ConfigGcsUri_ = "";
/**
*
*
* <pre>
* A Cloud Storage URI that specifies the path to a
* krb5.conf file. It is of the form `gs://{bucket_name}/path/to/krb5.conf`,
* although the file does not need to be named krb5.conf explicitly.
* </pre>
*
* <code>string krb5_config_gcs_uri = 3;</code>
*
* @return The krb5ConfigGcsUri.
*/
public java.lang.String getKrb5ConfigGcsUri() {
java.lang.Object ref = krb5ConfigGcsUri_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
krb5ConfigGcsUri_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* A Cloud Storage URI that specifies the path to a
* krb5.conf file. It is of the form `gs://{bucket_name}/path/to/krb5.conf`,
* although the file does not need to be named krb5.conf explicitly.
* </pre>
*
* <code>string krb5_config_gcs_uri = 3;</code>
*
* @return The bytes for krb5ConfigGcsUri.
*/
public com.google.protobuf.ByteString getKrb5ConfigGcsUriBytes() {
java.lang.Object ref = krb5ConfigGcsUri_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
krb5ConfigGcsUri_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* A Cloud Storage URI that specifies the path to a
* krb5.conf file. It is of the form `gs://{bucket_name}/path/to/krb5.conf`,
* although the file does not need to be named krb5.conf explicitly.
* </pre>
*
* <code>string krb5_config_gcs_uri = 3;</code>
*
* @param value The krb5ConfigGcsUri to set.
* @return This builder for chaining.
*/
public Builder setKrb5ConfigGcsUri(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
krb5ConfigGcsUri_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
*
*
* <pre>
* A Cloud Storage URI that specifies the path to a
* krb5.conf file. It is of the form `gs://{bucket_name}/path/to/krb5.conf`,
* although the file does not need to be named krb5.conf explicitly.
* </pre>
*
* <code>string krb5_config_gcs_uri = 3;</code>
*
* @return This builder for chaining.
*/
public Builder clearKrb5ConfigGcsUri() {
krb5ConfigGcsUri_ = getDefaultInstance().getKrb5ConfigGcsUri();
bitField0_ = (bitField0_ & ~0x00000004);
onChanged();
return this;
}
/**
*
*
* <pre>
* A Cloud Storage URI that specifies the path to a
* krb5.conf file. It is of the form `gs://{bucket_name}/path/to/krb5.conf`,
* although the file does not need to be named krb5.conf explicitly.
* </pre>
*
* <code>string krb5_config_gcs_uri = 3;</code>
*
* @param value The bytes for krb5ConfigGcsUri to set.
* @return This builder for chaining.
*/
public Builder setKrb5ConfigGcsUriBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
krb5ConfigGcsUri_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.metastore.v1beta.KerberosConfig)
}
// @@protoc_insertion_point(class_scope:google.cloud.metastore.v1beta.KerberosConfig)
private static final com.google.cloud.metastore.v1beta.KerberosConfig DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.metastore.v1beta.KerberosConfig();
}
public static com.google.cloud.metastore.v1beta.KerberosConfig getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<KerberosConfig> PARSER =
new com.google.protobuf.AbstractParser<KerberosConfig>() {
@java.lang.Override
public KerberosConfig parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<KerberosConfig> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<KerberosConfig> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.metastore.v1beta.KerberosConfig getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleapis/google-cloud-java | 37,113 | java-monitoring/google-cloud-monitoring/src/main/java/com/google/cloud/monitoring/v3/SnoozeServiceClient.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.cloud.monitoring.v3;
import com.google.api.core.ApiFuture;
import com.google.api.core.ApiFutures;
import com.google.api.gax.core.BackgroundResource;
import com.google.api.gax.paging.AbstractFixedSizeCollection;
import com.google.api.gax.paging.AbstractPage;
import com.google.api.gax.paging.AbstractPagedListResponse;
import com.google.api.gax.rpc.PageContext;
import com.google.api.gax.rpc.UnaryCallable;
import com.google.cloud.monitoring.v3.stub.SnoozeServiceStub;
import com.google.cloud.monitoring.v3.stub.SnoozeServiceStubSettings;
import com.google.common.util.concurrent.MoreExecutors;
import com.google.monitoring.v3.CreateSnoozeRequest;
import com.google.monitoring.v3.GetSnoozeRequest;
import com.google.monitoring.v3.ListSnoozesRequest;
import com.google.monitoring.v3.ListSnoozesResponse;
import com.google.monitoring.v3.ProjectName;
import com.google.monitoring.v3.Snooze;
import com.google.monitoring.v3.SnoozeName;
import com.google.monitoring.v3.UpdateSnoozeRequest;
import com.google.protobuf.FieldMask;
import java.io.IOException;
import java.util.List;
import java.util.concurrent.TimeUnit;
import javax.annotation.Generated;
// AUTO-GENERATED DOCUMENTATION AND CLASS.
/**
* Service Description: The SnoozeService API is used to temporarily prevent an alert policy from
* generating alerts. A Snooze is a description of the criteria under which one or more alert
* policies should not fire alerts for the specified duration.
*
* <p>This class provides the ability to make remote calls to the backing service through method
* calls that map to API methods. Sample code to get started:
*
* <pre>{@code
* // This snippet has been automatically generated and should be regarded as a code template only.
* // It will require modifications to work:
* // - It may require correct/in-range values for request initialization.
* // - It may require specifying regional endpoints when creating the service client as shown in
* // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library
* try (SnoozeServiceClient snoozeServiceClient = SnoozeServiceClient.create()) {
* ProjectName parent = ProjectName.of("[PROJECT]");
* Snooze snooze = Snooze.newBuilder().build();
* Snooze response = snoozeServiceClient.createSnooze(parent, snooze);
* }
* }</pre>
*
* <p>Note: close() needs to be called on the SnoozeServiceClient object to clean up resources such
* as threads. In the example above, try-with-resources is used, which automatically calls close().
*
* <table>
* <caption>Methods</caption>
* <tr>
* <th>Method</th>
* <th>Description</th>
* <th>Method Variants</th>
* </tr>
* <tr>
* <td><p> CreateSnooze</td>
* <td><p> Creates a `Snooze` that will prevent alerts, which match the provided criteria, from being opened. The `Snooze` applies for a specific time interval.</td>
* <td>
* <p>Request object method variants only take one parameter, a request object, which must be constructed before the call.</p>
* <ul>
* <li><p> createSnooze(CreateSnoozeRequest request)
* </ul>
* <p>"Flattened" method variants have converted the fields of the request object into function parameters to enable multiple ways to call the same method.</p>
* <ul>
* <li><p> createSnooze(ProjectName parent, Snooze snooze)
* <li><p> createSnooze(String parent, Snooze snooze)
* </ul>
* <p>Callable method variants take no parameters and return an immutable API callable object, which can be used to initiate calls to the service.</p>
* <ul>
* <li><p> createSnoozeCallable()
* </ul>
* </td>
* </tr>
* <tr>
* <td><p> ListSnoozes</td>
* <td><p> Lists the `Snooze`s associated with a project. Can optionally pass in `filter`, which specifies predicates to match `Snooze`s.</td>
* <td>
* <p>Request object method variants only take one parameter, a request object, which must be constructed before the call.</p>
* <ul>
* <li><p> listSnoozes(ListSnoozesRequest request)
* </ul>
* <p>"Flattened" method variants have converted the fields of the request object into function parameters to enable multiple ways to call the same method.</p>
* <ul>
* <li><p> listSnoozes(ProjectName parent)
* <li><p> listSnoozes(String parent)
* </ul>
* <p>Callable method variants take no parameters and return an immutable API callable object, which can be used to initiate calls to the service.</p>
* <ul>
* <li><p> listSnoozesPagedCallable()
* <li><p> listSnoozesCallable()
* </ul>
* </td>
* </tr>
* <tr>
* <td><p> GetSnooze</td>
* <td><p> Retrieves a `Snooze` by `name`.</td>
* <td>
* <p>Request object method variants only take one parameter, a request object, which must be constructed before the call.</p>
* <ul>
* <li><p> getSnooze(GetSnoozeRequest request)
* </ul>
* <p>"Flattened" method variants have converted the fields of the request object into function parameters to enable multiple ways to call the same method.</p>
* <ul>
* <li><p> getSnooze(SnoozeName name)
* <li><p> getSnooze(String name)
* </ul>
* <p>Callable method variants take no parameters and return an immutable API callable object, which can be used to initiate calls to the service.</p>
* <ul>
* <li><p> getSnoozeCallable()
* </ul>
* </td>
* </tr>
* <tr>
* <td><p> UpdateSnooze</td>
* <td><p> Updates a `Snooze`, identified by its `name`, with the parameters in the given `Snooze` object.</td>
* <td>
* <p>Request object method variants only take one parameter, a request object, which must be constructed before the call.</p>
* <ul>
* <li><p> updateSnooze(UpdateSnoozeRequest request)
* </ul>
* <p>"Flattened" method variants have converted the fields of the request object into function parameters to enable multiple ways to call the same method.</p>
* <ul>
* <li><p> updateSnooze(Snooze snooze, FieldMask updateMask)
* </ul>
* <p>Callable method variants take no parameters and return an immutable API callable object, which can be used to initiate calls to the service.</p>
* <ul>
* <li><p> updateSnoozeCallable()
* </ul>
* </td>
* </tr>
* </table>
*
* <p>See the individual methods for example code.
*
* <p>Many parameters require resource names to be formatted in a particular way. To assist with
* these names, this class includes a format method for each type of name, and additionally a parse
* method to extract the individual identifiers contained within names that are returned.
*
* <p>This class can be customized by passing in a custom instance of SnoozeServiceSettings to
* create(). For example:
*
* <p>To customize credentials:
*
* <pre>{@code
* // This snippet has been automatically generated and should be regarded as a code template only.
* // It will require modifications to work:
* // - It may require correct/in-range values for request initialization.
* // - It may require specifying regional endpoints when creating the service client as shown in
* // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library
* SnoozeServiceSettings snoozeServiceSettings =
* SnoozeServiceSettings.newBuilder()
* .setCredentialsProvider(FixedCredentialsProvider.create(myCredentials))
* .build();
* SnoozeServiceClient snoozeServiceClient = SnoozeServiceClient.create(snoozeServiceSettings);
* }</pre>
*
* <p>To customize the endpoint:
*
* <pre>{@code
* // This snippet has been automatically generated and should be regarded as a code template only.
* // It will require modifications to work:
* // - It may require correct/in-range values for request initialization.
* // - It may require specifying regional endpoints when creating the service client as shown in
* // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library
* SnoozeServiceSettings snoozeServiceSettings =
* SnoozeServiceSettings.newBuilder().setEndpoint(myEndpoint).build();
* SnoozeServiceClient snoozeServiceClient = SnoozeServiceClient.create(snoozeServiceSettings);
* }</pre>
*
* <p>Please refer to the GitHub repository's samples for more quickstart code snippets.
*/
@Generated("by gapic-generator-java")
public class SnoozeServiceClient implements BackgroundResource {
private final SnoozeServiceSettings settings;
private final SnoozeServiceStub stub;
/** Constructs an instance of SnoozeServiceClient with default settings. */
public static final SnoozeServiceClient create() throws IOException {
return create(SnoozeServiceSettings.newBuilder().build());
}
/**
* Constructs an instance of SnoozeServiceClient, using the given settings. The channels are
* created based on the settings passed in, or defaults for any settings that are not set.
*/
public static final SnoozeServiceClient create(SnoozeServiceSettings settings)
throws IOException {
return new SnoozeServiceClient(settings);
}
/**
* Constructs an instance of SnoozeServiceClient, using the given stub for making calls. This is
* for advanced usage - prefer using create(SnoozeServiceSettings).
*/
public static final SnoozeServiceClient create(SnoozeServiceStub stub) {
return new SnoozeServiceClient(stub);
}
/**
* Constructs an instance of SnoozeServiceClient, using the given settings. This is protected so
* that it is easy to make a subclass, but otherwise, the static factory methods should be
* preferred.
*/
protected SnoozeServiceClient(SnoozeServiceSettings settings) throws IOException {
this.settings = settings;
this.stub = ((SnoozeServiceStubSettings) settings.getStubSettings()).createStub();
}
protected SnoozeServiceClient(SnoozeServiceStub stub) {
this.settings = null;
this.stub = stub;
}
public final SnoozeServiceSettings getSettings() {
return settings;
}
public SnoozeServiceStub getStub() {
return stub;
}
// AUTO-GENERATED DOCUMENTATION AND METHOD.
/**
* Creates a `Snooze` that will prevent alerts, which match the provided criteria, from being
* opened. The `Snooze` applies for a specific time interval.
*
* <p>Sample code:
*
* <pre>{@code
* // This snippet has been automatically generated and should be regarded as a code template only.
* // It will require modifications to work:
* // - It may require correct/in-range values for request initialization.
* // - It may require specifying regional endpoints when creating the service client as shown in
* // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library
* try (SnoozeServiceClient snoozeServiceClient = SnoozeServiceClient.create()) {
* ProjectName parent = ProjectName.of("[PROJECT]");
* Snooze snooze = Snooze.newBuilder().build();
* Snooze response = snoozeServiceClient.createSnooze(parent, snooze);
* }
* }</pre>
*
* @param parent Required. The [project](https://cloud.google.com/monitoring/api/v3#project_name)
* in which a `Snooze` should be created. The format is:
* <p>projects/[PROJECT_ID_OR_NUMBER]
* @param snooze Required. The `Snooze` to create. Omit the `name` field, as it will be filled in
* by the API.
* @throws com.google.api.gax.rpc.ApiException if the remote call fails
*/
public final Snooze createSnooze(ProjectName parent, Snooze snooze) {
CreateSnoozeRequest request =
CreateSnoozeRequest.newBuilder()
.setParent(parent == null ? null : parent.toString())
.setSnooze(snooze)
.build();
return createSnooze(request);
}
// AUTO-GENERATED DOCUMENTATION AND METHOD.
/**
* Creates a `Snooze` that will prevent alerts, which match the provided criteria, from being
* opened. The `Snooze` applies for a specific time interval.
*
* <p>Sample code:
*
* <pre>{@code
* // This snippet has been automatically generated and should be regarded as a code template only.
* // It will require modifications to work:
* // - It may require correct/in-range values for request initialization.
* // - It may require specifying regional endpoints when creating the service client as shown in
* // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library
* try (SnoozeServiceClient snoozeServiceClient = SnoozeServiceClient.create()) {
* String parent = ProjectName.of("[PROJECT]").toString();
* Snooze snooze = Snooze.newBuilder().build();
* Snooze response = snoozeServiceClient.createSnooze(parent, snooze);
* }
* }</pre>
*
* @param parent Required. The [project](https://cloud.google.com/monitoring/api/v3#project_name)
* in which a `Snooze` should be created. The format is:
* <p>projects/[PROJECT_ID_OR_NUMBER]
* @param snooze Required. The `Snooze` to create. Omit the `name` field, as it will be filled in
* by the API.
* @throws com.google.api.gax.rpc.ApiException if the remote call fails
*/
public final Snooze createSnooze(String parent, Snooze snooze) {
CreateSnoozeRequest request =
CreateSnoozeRequest.newBuilder().setParent(parent).setSnooze(snooze).build();
return createSnooze(request);
}
// AUTO-GENERATED DOCUMENTATION AND METHOD.
/**
* Creates a `Snooze` that will prevent alerts, which match the provided criteria, from being
* opened. The `Snooze` applies for a specific time interval.
*
* <p>Sample code:
*
* <pre>{@code
* // This snippet has been automatically generated and should be regarded as a code template only.
* // It will require modifications to work:
* // - It may require correct/in-range values for request initialization.
* // - It may require specifying regional endpoints when creating the service client as shown in
* // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library
* try (SnoozeServiceClient snoozeServiceClient = SnoozeServiceClient.create()) {
* CreateSnoozeRequest request =
* CreateSnoozeRequest.newBuilder()
* .setParent(ProjectName.of("[PROJECT]").toString())
* .setSnooze(Snooze.newBuilder().build())
* .build();
* Snooze response = snoozeServiceClient.createSnooze(request);
* }
* }</pre>
*
* @param request The request object containing all of the parameters for the API call.
* @throws com.google.api.gax.rpc.ApiException if the remote call fails
*/
public final Snooze createSnooze(CreateSnoozeRequest request) {
return createSnoozeCallable().call(request);
}
// AUTO-GENERATED DOCUMENTATION AND METHOD.
/**
* Creates a `Snooze` that will prevent alerts, which match the provided criteria, from being
* opened. The `Snooze` applies for a specific time interval.
*
* <p>Sample code:
*
* <pre>{@code
* // This snippet has been automatically generated and should be regarded as a code template only.
* // It will require modifications to work:
* // - It may require correct/in-range values for request initialization.
* // - It may require specifying regional endpoints when creating the service client as shown in
* // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library
* try (SnoozeServiceClient snoozeServiceClient = SnoozeServiceClient.create()) {
* CreateSnoozeRequest request =
* CreateSnoozeRequest.newBuilder()
* .setParent(ProjectName.of("[PROJECT]").toString())
* .setSnooze(Snooze.newBuilder().build())
* .build();
* ApiFuture<Snooze> future = snoozeServiceClient.createSnoozeCallable().futureCall(request);
* // Do something.
* Snooze response = future.get();
* }
* }</pre>
*/
public final UnaryCallable<CreateSnoozeRequest, Snooze> createSnoozeCallable() {
return stub.createSnoozeCallable();
}
// AUTO-GENERATED DOCUMENTATION AND METHOD.
/**
* Lists the `Snooze`s associated with a project. Can optionally pass in `filter`, which specifies
* predicates to match `Snooze`s.
*
* <p>Sample code:
*
* <pre>{@code
* // This snippet has been automatically generated and should be regarded as a code template only.
* // It will require modifications to work:
* // - It may require correct/in-range values for request initialization.
* // - It may require specifying regional endpoints when creating the service client as shown in
* // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library
* try (SnoozeServiceClient snoozeServiceClient = SnoozeServiceClient.create()) {
* ProjectName parent = ProjectName.of("[PROJECT]");
* for (Snooze element : snoozeServiceClient.listSnoozes(parent).iterateAll()) {
* // doThingsWith(element);
* }
* }
* }</pre>
*
* @param parent Required. The [project](https://cloud.google.com/monitoring/api/v3#project_name)
* whose `Snooze`s should be listed. The format is:
* <p>projects/[PROJECT_ID_OR_NUMBER]
* @throws com.google.api.gax.rpc.ApiException if the remote call fails
*/
public final ListSnoozesPagedResponse listSnoozes(ProjectName parent) {
ListSnoozesRequest request =
ListSnoozesRequest.newBuilder()
.setParent(parent == null ? null : parent.toString())
.build();
return listSnoozes(request);
}
// AUTO-GENERATED DOCUMENTATION AND METHOD.
/**
* Lists the `Snooze`s associated with a project. Can optionally pass in `filter`, which specifies
* predicates to match `Snooze`s.
*
* <p>Sample code:
*
* <pre>{@code
* // This snippet has been automatically generated and should be regarded as a code template only.
* // It will require modifications to work:
* // - It may require correct/in-range values for request initialization.
* // - It may require specifying regional endpoints when creating the service client as shown in
* // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library
* try (SnoozeServiceClient snoozeServiceClient = SnoozeServiceClient.create()) {
* String parent = ProjectName.of("[PROJECT]").toString();
* for (Snooze element : snoozeServiceClient.listSnoozes(parent).iterateAll()) {
* // doThingsWith(element);
* }
* }
* }</pre>
*
* @param parent Required. The [project](https://cloud.google.com/monitoring/api/v3#project_name)
* whose `Snooze`s should be listed. The format is:
* <p>projects/[PROJECT_ID_OR_NUMBER]
* @throws com.google.api.gax.rpc.ApiException if the remote call fails
*/
public final ListSnoozesPagedResponse listSnoozes(String parent) {
ListSnoozesRequest request = ListSnoozesRequest.newBuilder().setParent(parent).build();
return listSnoozes(request);
}
// AUTO-GENERATED DOCUMENTATION AND METHOD.
/**
* Lists the `Snooze`s associated with a project. Can optionally pass in `filter`, which specifies
* predicates to match `Snooze`s.
*
* <p>Sample code:
*
* <pre>{@code
* // This snippet has been automatically generated and should be regarded as a code template only.
* // It will require modifications to work:
* // - It may require correct/in-range values for request initialization.
* // - It may require specifying regional endpoints when creating the service client as shown in
* // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library
* try (SnoozeServiceClient snoozeServiceClient = SnoozeServiceClient.create()) {
* ListSnoozesRequest request =
* ListSnoozesRequest.newBuilder()
* .setParent(ProjectName.of("[PROJECT]").toString())
* .setFilter("filter-1274492040")
* .setPageSize(883849137)
* .setPageToken("pageToken873572522")
* .build();
* for (Snooze element : snoozeServiceClient.listSnoozes(request).iterateAll()) {
* // doThingsWith(element);
* }
* }
* }</pre>
*
* @param request The request object containing all of the parameters for the API call.
* @throws com.google.api.gax.rpc.ApiException if the remote call fails
*/
public final ListSnoozesPagedResponse listSnoozes(ListSnoozesRequest request) {
return listSnoozesPagedCallable().call(request);
}
// AUTO-GENERATED DOCUMENTATION AND METHOD.
/**
* Lists the `Snooze`s associated with a project. Can optionally pass in `filter`, which specifies
* predicates to match `Snooze`s.
*
* <p>Sample code:
*
* <pre>{@code
* // This snippet has been automatically generated and should be regarded as a code template only.
* // It will require modifications to work:
* // - It may require correct/in-range values for request initialization.
* // - It may require specifying regional endpoints when creating the service client as shown in
* // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library
* try (SnoozeServiceClient snoozeServiceClient = SnoozeServiceClient.create()) {
* ListSnoozesRequest request =
* ListSnoozesRequest.newBuilder()
* .setParent(ProjectName.of("[PROJECT]").toString())
* .setFilter("filter-1274492040")
* .setPageSize(883849137)
* .setPageToken("pageToken873572522")
* .build();
* ApiFuture<Snooze> future = snoozeServiceClient.listSnoozesPagedCallable().futureCall(request);
* // Do something.
* for (Snooze element : future.get().iterateAll()) {
* // doThingsWith(element);
* }
* }
* }</pre>
*/
public final UnaryCallable<ListSnoozesRequest, ListSnoozesPagedResponse>
listSnoozesPagedCallable() {
return stub.listSnoozesPagedCallable();
}
// AUTO-GENERATED DOCUMENTATION AND METHOD.
/**
* Lists the `Snooze`s associated with a project. Can optionally pass in `filter`, which specifies
* predicates to match `Snooze`s.
*
* <p>Sample code:
*
* <pre>{@code
* // This snippet has been automatically generated and should be regarded as a code template only.
* // It will require modifications to work:
* // - It may require correct/in-range values for request initialization.
* // - It may require specifying regional endpoints when creating the service client as shown in
* // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library
* try (SnoozeServiceClient snoozeServiceClient = SnoozeServiceClient.create()) {
* ListSnoozesRequest request =
* ListSnoozesRequest.newBuilder()
* .setParent(ProjectName.of("[PROJECT]").toString())
* .setFilter("filter-1274492040")
* .setPageSize(883849137)
* .setPageToken("pageToken873572522")
* .build();
* while (true) {
* ListSnoozesResponse response = snoozeServiceClient.listSnoozesCallable().call(request);
* for (Snooze element : response.getSnoozesList()) {
* // doThingsWith(element);
* }
* String nextPageToken = response.getNextPageToken();
* if (!Strings.isNullOrEmpty(nextPageToken)) {
* request = request.toBuilder().setPageToken(nextPageToken).build();
* } else {
* break;
* }
* }
* }
* }</pre>
*/
public final UnaryCallable<ListSnoozesRequest, ListSnoozesResponse> listSnoozesCallable() {
return stub.listSnoozesCallable();
}
// AUTO-GENERATED DOCUMENTATION AND METHOD.
/**
* Retrieves a `Snooze` by `name`.
*
* <p>Sample code:
*
* <pre>{@code
* // This snippet has been automatically generated and should be regarded as a code template only.
* // It will require modifications to work:
* // - It may require correct/in-range values for request initialization.
* // - It may require specifying regional endpoints when creating the service client as shown in
* // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library
* try (SnoozeServiceClient snoozeServiceClient = SnoozeServiceClient.create()) {
* SnoozeName name = SnoozeName.of("[PROJECT]", "[SNOOZE]");
* Snooze response = snoozeServiceClient.getSnooze(name);
* }
* }</pre>
*
* @param name Required. The ID of the `Snooze` to retrieve. The format is:
* <p>projects/[PROJECT_ID_OR_NUMBER]/snoozes/[SNOOZE_ID]
* @throws com.google.api.gax.rpc.ApiException if the remote call fails
*/
public final Snooze getSnooze(SnoozeName name) {
GetSnoozeRequest request =
GetSnoozeRequest.newBuilder().setName(name == null ? null : name.toString()).build();
return getSnooze(request);
}
// AUTO-GENERATED DOCUMENTATION AND METHOD.
/**
* Retrieves a `Snooze` by `name`.
*
* <p>Sample code:
*
* <pre>{@code
* // This snippet has been automatically generated and should be regarded as a code template only.
* // It will require modifications to work:
* // - It may require correct/in-range values for request initialization.
* // - It may require specifying regional endpoints when creating the service client as shown in
* // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library
* try (SnoozeServiceClient snoozeServiceClient = SnoozeServiceClient.create()) {
* String name = SnoozeName.of("[PROJECT]", "[SNOOZE]").toString();
* Snooze response = snoozeServiceClient.getSnooze(name);
* }
* }</pre>
*
* @param name Required. The ID of the `Snooze` to retrieve. The format is:
* <p>projects/[PROJECT_ID_OR_NUMBER]/snoozes/[SNOOZE_ID]
* @throws com.google.api.gax.rpc.ApiException if the remote call fails
*/
public final Snooze getSnooze(String name) {
GetSnoozeRequest request = GetSnoozeRequest.newBuilder().setName(name).build();
return getSnooze(request);
}
// AUTO-GENERATED DOCUMENTATION AND METHOD.
/**
* Retrieves a `Snooze` by `name`.
*
* <p>Sample code:
*
* <pre>{@code
* // This snippet has been automatically generated and should be regarded as a code template only.
* // It will require modifications to work:
* // - It may require correct/in-range values for request initialization.
* // - It may require specifying regional endpoints when creating the service client as shown in
* // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library
* try (SnoozeServiceClient snoozeServiceClient = SnoozeServiceClient.create()) {
* GetSnoozeRequest request =
* GetSnoozeRequest.newBuilder()
* .setName(SnoozeName.of("[PROJECT]", "[SNOOZE]").toString())
* .build();
* Snooze response = snoozeServiceClient.getSnooze(request);
* }
* }</pre>
*
* @param request The request object containing all of the parameters for the API call.
* @throws com.google.api.gax.rpc.ApiException if the remote call fails
*/
public final Snooze getSnooze(GetSnoozeRequest request) {
return getSnoozeCallable().call(request);
}
// AUTO-GENERATED DOCUMENTATION AND METHOD.
/**
* Retrieves a `Snooze` by `name`.
*
* <p>Sample code:
*
* <pre>{@code
* // This snippet has been automatically generated and should be regarded as a code template only.
* // It will require modifications to work:
* // - It may require correct/in-range values for request initialization.
* // - It may require specifying regional endpoints when creating the service client as shown in
* // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library
* try (SnoozeServiceClient snoozeServiceClient = SnoozeServiceClient.create()) {
* GetSnoozeRequest request =
* GetSnoozeRequest.newBuilder()
* .setName(SnoozeName.of("[PROJECT]", "[SNOOZE]").toString())
* .build();
* ApiFuture<Snooze> future = snoozeServiceClient.getSnoozeCallable().futureCall(request);
* // Do something.
* Snooze response = future.get();
* }
* }</pre>
*/
public final UnaryCallable<GetSnoozeRequest, Snooze> getSnoozeCallable() {
return stub.getSnoozeCallable();
}
// AUTO-GENERATED DOCUMENTATION AND METHOD.
/**
* Updates a `Snooze`, identified by its `name`, with the parameters in the given `Snooze` object.
*
* <p>Sample code:
*
* <pre>{@code
* // This snippet has been automatically generated and should be regarded as a code template only.
* // It will require modifications to work:
* // - It may require correct/in-range values for request initialization.
* // - It may require specifying regional endpoints when creating the service client as shown in
* // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library
* try (SnoozeServiceClient snoozeServiceClient = SnoozeServiceClient.create()) {
* Snooze snooze = Snooze.newBuilder().build();
* FieldMask updateMask = FieldMask.newBuilder().build();
* Snooze response = snoozeServiceClient.updateSnooze(snooze, updateMask);
* }
* }</pre>
*
* @param snooze Required. The `Snooze` to update. Must have the name field present.
* @param updateMask Required. The fields to update.
* <p>For each field listed in `update_mask`:
* <p>* If the `Snooze` object supplied in the `UpdateSnoozeRequest` has a value for that
* field, the value of the field in the existing `Snooze` will be set to the value of the
* field in the supplied `Snooze`. * If the field does not have a value in the supplied
* `Snooze`, the field in the existing `Snooze` is set to its default value.
* <p>Fields not listed retain their existing value.
* <p>The following are the field names that are accepted in `update_mask`:
* <p>* `display_name` * `interval.start_time` * `interval.end_time`
* <p>That said, the start time and end time of the `Snooze` determines which fields can
* legally be updated. Before attempting an update, users should consult the documentation for
* `UpdateSnoozeRequest`, which talks about which fields can be updated.
* @throws com.google.api.gax.rpc.ApiException if the remote call fails
*/
public final Snooze updateSnooze(Snooze snooze, FieldMask updateMask) {
UpdateSnoozeRequest request =
UpdateSnoozeRequest.newBuilder().setSnooze(snooze).setUpdateMask(updateMask).build();
return updateSnooze(request);
}
// AUTO-GENERATED DOCUMENTATION AND METHOD.
/**
* Updates a `Snooze`, identified by its `name`, with the parameters in the given `Snooze` object.
*
* <p>Sample code:
*
* <pre>{@code
* // This snippet has been automatically generated and should be regarded as a code template only.
* // It will require modifications to work:
* // - It may require correct/in-range values for request initialization.
* // - It may require specifying regional endpoints when creating the service client as shown in
* // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library
* try (SnoozeServiceClient snoozeServiceClient = SnoozeServiceClient.create()) {
* UpdateSnoozeRequest request =
* UpdateSnoozeRequest.newBuilder()
* .setSnooze(Snooze.newBuilder().build())
* .setUpdateMask(FieldMask.newBuilder().build())
* .build();
* Snooze response = snoozeServiceClient.updateSnooze(request);
* }
* }</pre>
*
* @param request The request object containing all of the parameters for the API call.
* @throws com.google.api.gax.rpc.ApiException if the remote call fails
*/
public final Snooze updateSnooze(UpdateSnoozeRequest request) {
return updateSnoozeCallable().call(request);
}
// AUTO-GENERATED DOCUMENTATION AND METHOD.
/**
* Updates a `Snooze`, identified by its `name`, with the parameters in the given `Snooze` object.
*
* <p>Sample code:
*
* <pre>{@code
* // This snippet has been automatically generated and should be regarded as a code template only.
* // It will require modifications to work:
* // - It may require correct/in-range values for request initialization.
* // - It may require specifying regional endpoints when creating the service client as shown in
* // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library
* try (SnoozeServiceClient snoozeServiceClient = SnoozeServiceClient.create()) {
* UpdateSnoozeRequest request =
* UpdateSnoozeRequest.newBuilder()
* .setSnooze(Snooze.newBuilder().build())
* .setUpdateMask(FieldMask.newBuilder().build())
* .build();
* ApiFuture<Snooze> future = snoozeServiceClient.updateSnoozeCallable().futureCall(request);
* // Do something.
* Snooze response = future.get();
* }
* }</pre>
*/
public final UnaryCallable<UpdateSnoozeRequest, Snooze> updateSnoozeCallable() {
return stub.updateSnoozeCallable();
}
@Override
public final void close() {
stub.close();
}
@Override
public void shutdown() {
stub.shutdown();
}
@Override
public boolean isShutdown() {
return stub.isShutdown();
}
@Override
public boolean isTerminated() {
return stub.isTerminated();
}
@Override
public void shutdownNow() {
stub.shutdownNow();
}
@Override
public boolean awaitTermination(long duration, TimeUnit unit) throws InterruptedException {
return stub.awaitTermination(duration, unit);
}
public static class ListSnoozesPagedResponse
extends AbstractPagedListResponse<
ListSnoozesRequest,
ListSnoozesResponse,
Snooze,
ListSnoozesPage,
ListSnoozesFixedSizeCollection> {
public static ApiFuture<ListSnoozesPagedResponse> createAsync(
PageContext<ListSnoozesRequest, ListSnoozesResponse, Snooze> context,
ApiFuture<ListSnoozesResponse> futureResponse) {
ApiFuture<ListSnoozesPage> futurePage =
ListSnoozesPage.createEmptyPage().createPageAsync(context, futureResponse);
return ApiFutures.transform(
futurePage, input -> new ListSnoozesPagedResponse(input), MoreExecutors.directExecutor());
}
private ListSnoozesPagedResponse(ListSnoozesPage page) {
super(page, ListSnoozesFixedSizeCollection.createEmptyCollection());
}
}
public static class ListSnoozesPage
extends AbstractPage<ListSnoozesRequest, ListSnoozesResponse, Snooze, ListSnoozesPage> {
private ListSnoozesPage(
PageContext<ListSnoozesRequest, ListSnoozesResponse, Snooze> context,
ListSnoozesResponse response) {
super(context, response);
}
private static ListSnoozesPage createEmptyPage() {
return new ListSnoozesPage(null, null);
}
@Override
protected ListSnoozesPage createPage(
PageContext<ListSnoozesRequest, ListSnoozesResponse, Snooze> context,
ListSnoozesResponse response) {
return new ListSnoozesPage(context, response);
}
@Override
public ApiFuture<ListSnoozesPage> createPageAsync(
PageContext<ListSnoozesRequest, ListSnoozesResponse, Snooze> context,
ApiFuture<ListSnoozesResponse> futureResponse) {
return super.createPageAsync(context, futureResponse);
}
}
public static class ListSnoozesFixedSizeCollection
extends AbstractFixedSizeCollection<
ListSnoozesRequest,
ListSnoozesResponse,
Snooze,
ListSnoozesPage,
ListSnoozesFixedSizeCollection> {
private ListSnoozesFixedSizeCollection(List<ListSnoozesPage> pages, int collectionSize) {
super(pages, collectionSize);
}
private static ListSnoozesFixedSizeCollection createEmptyCollection() {
return new ListSnoozesFixedSizeCollection(null, 0);
}
@Override
protected ListSnoozesFixedSizeCollection createCollection(
List<ListSnoozesPage> pages, int collectionSize) {
return new ListSnoozesFixedSizeCollection(pages, collectionSize);
}
}
}
|
apache/hadoop-common | 36,949 | hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/ViewFs.java | /**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.fs.viewfs;
import static org.apache.hadoop.fs.viewfs.Constants.PERMISSION_555;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.net.URI;
import java.net.URISyntaxException;
import java.util.ArrayList;
import java.util.EnumSet;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.AbstractFileSystem;
import org.apache.hadoop.fs.BlockLocation;
import org.apache.hadoop.fs.CreateFlag;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileAlreadyExistsException;
import org.apache.hadoop.fs.FileChecksum;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FsConstants;
import org.apache.hadoop.fs.FsServerDefaults;
import org.apache.hadoop.fs.FsStatus;
import org.apache.hadoop.fs.Options.ChecksumOpt;
import org.apache.hadoop.fs.ParentNotDirectoryException;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.RemoteIterator;
import org.apache.hadoop.fs.UnresolvedLinkException;
import org.apache.hadoop.fs.UnsupportedFileSystemException;
import org.apache.hadoop.fs.XAttrSetFlag;
import org.apache.hadoop.fs.local.LocalConfigKeys;
import org.apache.hadoop.fs.permission.AclEntry;
import org.apache.hadoop.fs.permission.AclUtil;
import org.apache.hadoop.fs.permission.AclStatus;
import org.apache.hadoop.fs.permission.FsAction;
import org.apache.hadoop.fs.permission.FsPermission;
import org.apache.hadoop.fs.viewfs.InodeTree.INode;
import org.apache.hadoop.fs.viewfs.InodeTree.INodeLink;
import org.apache.hadoop.security.AccessControlException;
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.security.token.Token;
import org.apache.hadoop.util.Progressable;
import org.apache.hadoop.util.Time;
/**
* ViewFs (extends the AbstractFileSystem interface) implements a client-side
* mount table. The viewFs file system is implemented completely in memory on
* the client side. The client-side mount table allows a client to provide a
* customized view of a file system namespace that is composed from
* one or more individual file systems (a localFs or Hdfs, S3fs, etc).
* For example one could have a mount table that provides links such as
* <ul>
* <li> /user -> hdfs://nnContainingUserDir/user
* <li> /project/foo -> hdfs://nnProject1/projects/foo
* <li> /project/bar -> hdfs://nnProject2/projects/bar
* <li> /tmp -> hdfs://nnTmp/privateTmpForUserXXX
* </ul>
*
* ViewFs is specified with the following URI: <b>viewfs:///</b>
* <p>
* To use viewfs one would typically set the default file system in the
* config (i.e. fs.default.name< = viewfs:///) along with the
* mount table config variables as described below.
*
* <p>
* <b> ** Config variables to specify the mount table entries ** </b>
* <p>
*
* The file system is initialized from the standard Hadoop config through
* config variables.
* See {@link FsConstants} for URI and Scheme constants;
* See {@link Constants} for config var constants;
* see {@link ConfigUtil} for convenient lib.
*
* <p>
* All the mount table config entries for view fs are prefixed by
* <b>fs.viewfs.mounttable.</b>
* For example the above example can be specified with the following
* config variables:
* <ul>
* <li> fs.viewfs.mounttable.default.link./user=
* hdfs://nnContainingUserDir/user
* <li> fs.viewfs.mounttable.default.link./project/foo=
* hdfs://nnProject1/projects/foo
* <li> fs.viewfs.mounttable.default.link./project/bar=
* hdfs://nnProject2/projects/bar
* <li> fs.viewfs.mounttable.default.link./tmp=
* hdfs://nnTmp/privateTmpForUserXXX
* </ul>
*
* The default mount table (when no authority is specified) is
* from config variables prefixed by <b>fs.viewFs.mounttable.default </b>
* The authority component of a URI can be used to specify a different mount
* table. For example,
* <ul>
* <li> viewfs://sanjayMountable/
* </ul>
* is initialized from fs.viewFs.mounttable.sanjayMountable.* config variables.
*
* <p>
* <b> **** Merge Mounts **** </b>(NOTE: merge mounts are not implemented yet.)
* <p>
*
* One can also use "MergeMounts" to merge several directories (this is
* sometimes called union-mounts or junction-mounts in the literature.
* For example of the home directories are stored on say two file systems
* (because they do not fit on one) then one could specify a mount
* entry such as following merges two dirs:
* <ul>
* <li> /user -> hdfs://nnUser1/user,hdfs://nnUser2/user
* </ul>
* Such a mergeLink can be specified with the following config var where ","
* is used as the separator for each of links to be merged:
* <ul>
* <li> fs.viewfs.mounttable.default.linkMerge./user=
* hdfs://nnUser1/user,hdfs://nnUser1/user
* </ul>
* A special case of the merge mount is where mount table's root is merged
* with the root (slash) of another file system:
* <ul>
* <li> fs.viewfs.mounttable.default.linkMergeSlash=hdfs://nn99/
* </ul>
* In this cases the root of the mount table is merged with the root of
* <b>hdfs://nn99/ </b>
*/
@InterfaceAudience.Public
@InterfaceStability.Evolving /*Evolving for a release,to be changed to Stable */
public class ViewFs extends AbstractFileSystem {
final long creationTime; // of the the mount table
final UserGroupInformation ugi; // the user/group of user who created mtable
final Configuration config;
InodeTree<AbstractFileSystem> fsState; // the fs state; ie the mount table
Path homeDir = null;
static AccessControlException readOnlyMountTable(final String operation,
final String p) {
return new AccessControlException(
"InternalDir of ViewFileSystem is readonly; operation=" + operation +
"Path=" + p);
}
static AccessControlException readOnlyMountTable(final String operation,
final Path p) {
return readOnlyMountTable(operation, p.toString());
}
static public class MountPoint {
private Path src; // the src of the mount
private URI[] targets; // target of the mount; Multiple targets imply mergeMount
MountPoint(Path srcPath, URI[] targetURIs) {
src = srcPath;
targets = targetURIs;
}
Path getSrc() {
return src;
}
URI[] getTargets() {
return targets;
}
}
public ViewFs(final Configuration conf) throws IOException,
URISyntaxException {
this(FsConstants.VIEWFS_URI, conf);
}
/**
* This constructor has the signature needed by
* {@link AbstractFileSystem#createFileSystem(URI, Configuration)}.
*
* @param theUri which must be that of ViewFs
* @param conf
* @throws IOException
* @throws URISyntaxException
*/
ViewFs(final URI theUri, final Configuration conf) throws IOException,
URISyntaxException {
super(theUri, FsConstants.VIEWFS_SCHEME, false, -1);
creationTime = Time.now();
ugi = UserGroupInformation.getCurrentUser();
config = conf;
// Now build client side view (i.e. client side mount table) from config.
String authority = theUri.getAuthority();
fsState = new InodeTree<AbstractFileSystem>(conf, authority) {
@Override
protected
AbstractFileSystem getTargetFileSystem(final URI uri)
throws URISyntaxException, UnsupportedFileSystemException {
String pathString = uri.getPath();
if (pathString.isEmpty()) {
pathString = "/";
}
return new ChRootedFs(
AbstractFileSystem.createFileSystem(uri, config),
new Path(pathString));
}
@Override
protected
AbstractFileSystem getTargetFileSystem(
final INodeDir<AbstractFileSystem> dir) throws URISyntaxException {
return new InternalDirOfViewFs(dir, creationTime, ugi, getUri());
}
@Override
protected
AbstractFileSystem getTargetFileSystem(URI[] mergeFsURIList)
throws URISyntaxException, UnsupportedFileSystemException {
throw new UnsupportedFileSystemException("mergefs not implemented yet");
// return MergeFs.createMergeFs(mergeFsURIList, config);
}
};
}
@Override
public FsServerDefaults getServerDefaults() throws IOException {
return LocalConfigKeys.getServerDefaults();
}
@Override
public int getUriDefaultPort() {
return -1;
}
@Override
public Path getHomeDirectory() {
if (homeDir == null) {
String base = fsState.getHomeDirPrefixValue();
if (base == null) {
base = "/user";
}
homeDir = (base.equals("/") ?
this.makeQualified(new Path(base + ugi.getShortUserName())):
this.makeQualified(new Path(base + "/" + ugi.getShortUserName())));
}
return homeDir;
}
@Override
public Path resolvePath(final Path f) throws FileNotFoundException,
AccessControlException, UnresolvedLinkException, IOException {
final InodeTree.ResolveResult<AbstractFileSystem> res;
res = fsState.resolve(getUriPath(f), true);
if (res.isInternalDir()) {
return f;
}
return res.targetFileSystem.resolvePath(res.remainingPath);
}
@Override
public FSDataOutputStream createInternal(final Path f,
final EnumSet<CreateFlag> flag, final FsPermission absolutePermission,
final int bufferSize, final short replication, final long blockSize,
final Progressable progress, final ChecksumOpt checksumOpt,
final boolean createParent) throws AccessControlException,
FileAlreadyExistsException, FileNotFoundException,
ParentNotDirectoryException, UnsupportedFileSystemException,
UnresolvedLinkException, IOException {
InodeTree.ResolveResult<AbstractFileSystem> res;
try {
res = fsState.resolve(getUriPath(f), false);
} catch (FileNotFoundException e) {
if (createParent) {
throw readOnlyMountTable("create", f);
} else {
throw e;
}
}
assert(res.remainingPath != null);
return res.targetFileSystem.createInternal(res.remainingPath, flag,
absolutePermission, bufferSize, replication,
blockSize, progress, checksumOpt,
createParent);
}
@Override
public boolean delete(final Path f, final boolean recursive)
throws AccessControlException, FileNotFoundException,
UnresolvedLinkException, IOException {
InodeTree.ResolveResult<AbstractFileSystem> res =
fsState.resolve(getUriPath(f), true);
// If internal dir or target is a mount link (ie remainingPath is Slash)
if (res.isInternalDir() || res.remainingPath == InodeTree.SlashPath) {
throw new AccessControlException(
"Cannot delete internal mount table directory: " + f);
}
return res.targetFileSystem.delete(res.remainingPath, recursive);
}
@Override
public BlockLocation[] getFileBlockLocations(final Path f, final long start,
final long len) throws AccessControlException, FileNotFoundException,
UnresolvedLinkException, IOException {
InodeTree.ResolveResult<AbstractFileSystem> res =
fsState.resolve(getUriPath(f), true);
return
res.targetFileSystem.getFileBlockLocations(res.remainingPath, start, len);
}
@Override
public FileChecksum getFileChecksum(final Path f)
throws AccessControlException, FileNotFoundException,
UnresolvedLinkException, IOException {
InodeTree.ResolveResult<AbstractFileSystem> res =
fsState.resolve(getUriPath(f), true);
return res.targetFileSystem.getFileChecksum(res.remainingPath);
}
@Override
public FileStatus getFileStatus(final Path f) throws AccessControlException,
FileNotFoundException, UnresolvedLinkException, IOException {
InodeTree.ResolveResult<AbstractFileSystem> res =
fsState.resolve(getUriPath(f), true);
// FileStatus#getPath is a fully qualified path relative to the root of
// target file system.
// We need to change it to viewfs URI - relative to root of mount table.
// The implementors of RawLocalFileSystem were trying to be very smart.
// They implement FileStatus#getOwener lazily -- the object
// returned is really a RawLocalFileSystem that expect the
// FileStatus#getPath to be unchanged so that it can get owner when needed.
// Hence we need to interpose a new ViewFsFileStatus that works around.
FileStatus status = res.targetFileSystem.getFileStatus(res.remainingPath);
return new ViewFsFileStatus(status, this.makeQualified(f));
}
@Override
public void access(Path path, FsAction mode) throws AccessControlException,
FileNotFoundException, UnresolvedLinkException, IOException {
InodeTree.ResolveResult<AbstractFileSystem> res =
fsState.resolve(getUriPath(path), true);
res.targetFileSystem.access(res.remainingPath, mode);
}
@Override
public FileStatus getFileLinkStatus(final Path f)
throws AccessControlException, FileNotFoundException,
UnsupportedFileSystemException, IOException {
InodeTree.ResolveResult<AbstractFileSystem> res =
fsState.resolve(getUriPath(f), false); // do not follow mount link
return res.targetFileSystem.getFileLinkStatus(res.remainingPath);
}
@Override
public FsStatus getFsStatus() throws AccessControlException,
FileNotFoundException, IOException {
return new FsStatus(0, 0, 0);
}
@Override
public RemoteIterator<FileStatus> listStatusIterator(final Path f)
throws AccessControlException, FileNotFoundException,
UnresolvedLinkException, IOException {
final InodeTree.ResolveResult<AbstractFileSystem> res =
fsState.resolve(getUriPath(f), true);
final RemoteIterator<FileStatus> fsIter =
res.targetFileSystem.listStatusIterator(res.remainingPath);
if (res.isInternalDir()) {
return fsIter;
}
return new RemoteIterator<FileStatus>() {
final RemoteIterator<FileStatus> myIter;
final ChRootedFs targetFs;
{ // Init
myIter = fsIter;
targetFs = (ChRootedFs) res.targetFileSystem;
}
@Override
public boolean hasNext() throws IOException {
return myIter.hasNext();
}
@Override
public FileStatus next() throws IOException {
FileStatus status = myIter.next();
String suffix = targetFs.stripOutRoot(status.getPath());
return new ViewFsFileStatus(status, makeQualified(
suffix.length() == 0 ? f : new Path(res.resolvedPath, suffix)));
}
};
}
@Override
public FileStatus[] listStatus(final Path f) throws AccessControlException,
FileNotFoundException, UnresolvedLinkException, IOException {
InodeTree.ResolveResult<AbstractFileSystem> res =
fsState.resolve(getUriPath(f), true);
FileStatus[] statusLst = res.targetFileSystem.listStatus(res.remainingPath);
if (!res.isInternalDir()) {
// We need to change the name in the FileStatus as described in
// {@link #getFileStatus }
ChRootedFs targetFs;
targetFs = (ChRootedFs) res.targetFileSystem;
int i = 0;
for (FileStatus status : statusLst) {
String suffix = targetFs.stripOutRoot(status.getPath());
statusLst[i++] = new ViewFsFileStatus(status, this.makeQualified(
suffix.length() == 0 ? f : new Path(res.resolvedPath, suffix)));
}
}
return statusLst;
}
@Override
public void mkdir(final Path dir, final FsPermission permission,
final boolean createParent) throws AccessControlException,
FileAlreadyExistsException,
FileNotFoundException, UnresolvedLinkException, IOException {
InodeTree.ResolveResult<AbstractFileSystem> res =
fsState.resolve(getUriPath(dir), false);
res.targetFileSystem.mkdir(res.remainingPath, permission, createParent);
}
@Override
public FSDataInputStream open(final Path f, final int bufferSize)
throws AccessControlException, FileNotFoundException,
UnresolvedLinkException, IOException {
InodeTree.ResolveResult<AbstractFileSystem> res =
fsState.resolve(getUriPath(f), true);
return res.targetFileSystem.open(res.remainingPath, bufferSize);
}
@Override
public void renameInternal(final Path src, final Path dst,
final boolean overwrite) throws IOException, UnresolvedLinkException {
// passing resolveLastComponet as false to catch renaming a mount point
// itself we need to catch this as an internal operation and fail.
InodeTree.ResolveResult<AbstractFileSystem> resSrc =
fsState.resolve(getUriPath(src), false);
if (resSrc.isInternalDir()) {
throw new AccessControlException(
"Cannot Rename within internal dirs of mount table: it is readOnly");
}
InodeTree.ResolveResult<AbstractFileSystem> resDst =
fsState.resolve(getUriPath(dst), false);
if (resDst.isInternalDir()) {
throw new AccessControlException(
"Cannot Rename within internal dirs of mount table: it is readOnly");
}
/**
// Alternate 1: renames within same file system - valid but we disallow
// Alternate 2: (as described in next para - valid but we have disallowed it
//
// Note we compare the URIs. the URIs include the link targets.
// hence we allow renames across mount links as long as the mount links
// point to the same target.
if (!resSrc.targetFileSystem.getUri().equals(
resDst.targetFileSystem.getUri())) {
throw new IOException("Renames across Mount points not supported");
}
*/
//
// Alternate 3 : renames ONLY within the the same mount links.
//
if (resSrc.targetFileSystem !=resDst.targetFileSystem) {
throw new IOException("Renames across Mount points not supported");
}
resSrc.targetFileSystem.renameInternal(resSrc.remainingPath,
resDst.remainingPath, overwrite);
}
@Override
public void renameInternal(final Path src, final Path dst)
throws AccessControlException, FileAlreadyExistsException,
FileNotFoundException, ParentNotDirectoryException,
UnresolvedLinkException, IOException {
renameInternal(src, dst, false);
}
@Override
public boolean supportsSymlinks() {
return true;
}
@Override
public void createSymlink(final Path target, final Path link,
final boolean createParent) throws IOException, UnresolvedLinkException {
InodeTree.ResolveResult<AbstractFileSystem> res;
try {
res = fsState.resolve(getUriPath(link), false);
} catch (FileNotFoundException e) {
if (createParent) {
throw readOnlyMountTable("createSymlink", link);
} else {
throw e;
}
}
assert(res.remainingPath != null);
res.targetFileSystem.createSymlink(target, res.remainingPath,
createParent);
}
@Override
public Path getLinkTarget(final Path f) throws IOException {
InodeTree.ResolveResult<AbstractFileSystem> res =
fsState.resolve(getUriPath(f), false); // do not follow mount link
return res.targetFileSystem.getLinkTarget(res.remainingPath);
}
@Override
public void setOwner(final Path f, final String username,
final String groupname) throws AccessControlException,
FileNotFoundException, UnresolvedLinkException, IOException {
InodeTree.ResolveResult<AbstractFileSystem> res =
fsState.resolve(getUriPath(f), true);
res.targetFileSystem.setOwner(res.remainingPath, username, groupname);
}
@Override
public void setPermission(final Path f, final FsPermission permission)
throws AccessControlException, FileNotFoundException,
UnresolvedLinkException, IOException {
InodeTree.ResolveResult<AbstractFileSystem> res =
fsState.resolve(getUriPath(f), true);
res.targetFileSystem.setPermission(res.remainingPath, permission);
}
@Override
public boolean setReplication(final Path f, final short replication)
throws AccessControlException, FileNotFoundException,
UnresolvedLinkException, IOException {
InodeTree.ResolveResult<AbstractFileSystem> res =
fsState.resolve(getUriPath(f), true);
return res.targetFileSystem.setReplication(res.remainingPath, replication);
}
@Override
public void setTimes(final Path f, final long mtime, final long atime)
throws AccessControlException, FileNotFoundException,
UnresolvedLinkException, IOException {
InodeTree.ResolveResult<AbstractFileSystem> res =
fsState.resolve(getUriPath(f), true);
res.targetFileSystem.setTimes(res.remainingPath, mtime, atime);
}
@Override
public void setVerifyChecksum(final boolean verifyChecksum)
throws AccessControlException, IOException {
// This is a file system level operations, however ViewFs
// points to many file systems. Noop for ViewFs.
}
public MountPoint[] getMountPoints() {
List<InodeTree.MountPoint<AbstractFileSystem>> mountPoints =
fsState.getMountPoints();
MountPoint[] result = new MountPoint[mountPoints.size()];
for ( int i = 0; i < mountPoints.size(); ++i ) {
result[i] = new MountPoint(new Path(mountPoints.get(i).src),
mountPoints.get(i).target.targetDirLinkList);
}
return result;
}
@Override
public List<Token<?>> getDelegationTokens(String renewer) throws IOException {
List<InodeTree.MountPoint<AbstractFileSystem>> mountPoints =
fsState.getMountPoints();
int initialListSize = 0;
for (InodeTree.MountPoint<AbstractFileSystem> im : mountPoints) {
initialListSize += im.target.targetDirLinkList.length;
}
List<Token<?>> result = new ArrayList<Token<?>>(initialListSize);
for ( int i = 0; i < mountPoints.size(); ++i ) {
List<Token<?>> tokens =
mountPoints.get(i).target.targetFileSystem.getDelegationTokens(renewer);
if (tokens != null) {
result.addAll(tokens);
}
}
return result;
}
@Override
public boolean isValidName(String src) {
// Prefix validated at mount time and rest of path validated by mount target.
return true;
}
@Override
public void modifyAclEntries(Path path, List<AclEntry> aclSpec)
throws IOException {
InodeTree.ResolveResult<AbstractFileSystem> res =
fsState.resolve(getUriPath(path), true);
res.targetFileSystem.modifyAclEntries(res.remainingPath, aclSpec);
}
@Override
public void removeAclEntries(Path path, List<AclEntry> aclSpec)
throws IOException {
InodeTree.ResolveResult<AbstractFileSystem> res =
fsState.resolve(getUriPath(path), true);
res.targetFileSystem.removeAclEntries(res.remainingPath, aclSpec);
}
@Override
public void removeDefaultAcl(Path path)
throws IOException {
InodeTree.ResolveResult<AbstractFileSystem> res =
fsState.resolve(getUriPath(path), true);
res.targetFileSystem.removeDefaultAcl(res.remainingPath);
}
@Override
public void removeAcl(Path path)
throws IOException {
InodeTree.ResolveResult<AbstractFileSystem> res =
fsState.resolve(getUriPath(path), true);
res.targetFileSystem.removeAcl(res.remainingPath);
}
@Override
public void setAcl(Path path, List<AclEntry> aclSpec) throws IOException {
InodeTree.ResolveResult<AbstractFileSystem> res =
fsState.resolve(getUriPath(path), true);
res.targetFileSystem.setAcl(res.remainingPath, aclSpec);
}
@Override
public AclStatus getAclStatus(Path path) throws IOException {
InodeTree.ResolveResult<AbstractFileSystem> res =
fsState.resolve(getUriPath(path), true);
return res.targetFileSystem.getAclStatus(res.remainingPath);
}
@Override
public void setXAttr(Path path, String name, byte[] value,
EnumSet<XAttrSetFlag> flag) throws IOException {
InodeTree.ResolveResult<AbstractFileSystem> res =
fsState.resolve(getUriPath(path), true);
res.targetFileSystem.setXAttr(res.remainingPath, name, value, flag);
}
@Override
public byte[] getXAttr(Path path, String name) throws IOException {
InodeTree.ResolveResult<AbstractFileSystem> res =
fsState.resolve(getUriPath(path), true);
return res.targetFileSystem.getXAttr(res.remainingPath, name);
}
@Override
public Map<String, byte[]> getXAttrs(Path path) throws IOException {
InodeTree.ResolveResult<AbstractFileSystem> res =
fsState.resolve(getUriPath(path), true);
return res.targetFileSystem.getXAttrs(res.remainingPath);
}
@Override
public Map<String, byte[]> getXAttrs(Path path, List<String> names)
throws IOException {
InodeTree.ResolveResult<AbstractFileSystem> res =
fsState.resolve(getUriPath(path), true);
return res.targetFileSystem.getXAttrs(res.remainingPath, names);
}
@Override
public List<String> listXAttrs(Path path) throws IOException {
InodeTree.ResolveResult<AbstractFileSystem> res =
fsState.resolve(getUriPath(path), true);
return res.targetFileSystem.listXAttrs(res.remainingPath);
}
@Override
public void removeXAttr(Path path, String name) throws IOException {
InodeTree.ResolveResult<AbstractFileSystem> res =
fsState.resolve(getUriPath(path), true);
res.targetFileSystem.removeXAttr(res.remainingPath, name);
}
/*
* An instance of this class represents an internal dir of the viewFs
* ie internal dir of the mount table.
* It is a ready only mount tbale and create, mkdir or delete operations
* are not allowed.
* If called on create or mkdir then this target is the parent of the
* directory in which one is trying to create or mkdir; hence
* in this case the path name passed in is the last component.
* Otherwise this target is the end point of the path and hence
* the path name passed in is null.
*/
static class InternalDirOfViewFs extends AbstractFileSystem {
final InodeTree.INodeDir<AbstractFileSystem> theInternalDir;
final long creationTime; // of the the mount table
final UserGroupInformation ugi; // the user/group of user who created mtable
final URI myUri; // the URI of the outer ViewFs
public InternalDirOfViewFs(final InodeTree.INodeDir<AbstractFileSystem> dir,
final long cTime, final UserGroupInformation ugi, final URI uri)
throws URISyntaxException {
super(FsConstants.VIEWFS_URI, FsConstants.VIEWFS_SCHEME, false, -1);
theInternalDir = dir;
creationTime = cTime;
this.ugi = ugi;
myUri = uri;
}
static private void checkPathIsSlash(final Path f) throws IOException {
if (f != InodeTree.SlashPath) {
throw new IOException (
"Internal implementation error: expected file name to be /" );
}
}
@Override
public FSDataOutputStream createInternal(final Path f,
final EnumSet<CreateFlag> flag, final FsPermission absolutePermission,
final int bufferSize, final short replication, final long blockSize,
final Progressable progress, final ChecksumOpt checksumOpt,
final boolean createParent) throws AccessControlException,
FileAlreadyExistsException, FileNotFoundException,
ParentNotDirectoryException, UnsupportedFileSystemException,
UnresolvedLinkException, IOException {
throw readOnlyMountTable("create", f);
}
@Override
public boolean delete(final Path f, final boolean recursive)
throws AccessControlException, IOException {
checkPathIsSlash(f);
throw readOnlyMountTable("delete", f);
}
@Override
public BlockLocation[] getFileBlockLocations(final Path f, final long start,
final long len) throws FileNotFoundException, IOException {
checkPathIsSlash(f);
throw new FileNotFoundException("Path points to dir not a file");
}
@Override
public FileChecksum getFileChecksum(final Path f)
throws FileNotFoundException, IOException {
checkPathIsSlash(f);
throw new FileNotFoundException("Path points to dir not a file");
}
@Override
public FileStatus getFileStatus(final Path f) throws IOException {
checkPathIsSlash(f);
return new FileStatus(0, true, 0, 0, creationTime, creationTime,
PERMISSION_555, ugi.getUserName(), ugi.getGroupNames()[0],
new Path(theInternalDir.fullPath).makeQualified(
myUri, null));
}
@Override
public FileStatus getFileLinkStatus(final Path f)
throws FileNotFoundException {
// look up i internalDirs children - ignore first Slash
INode<AbstractFileSystem> inode =
theInternalDir.children.get(f.toUri().toString().substring(1));
if (inode == null) {
throw new FileNotFoundException(
"viewFs internal mount table - missing entry:" + f);
}
FileStatus result;
if (inode instanceof INodeLink) {
INodeLink<AbstractFileSystem> inodelink =
(INodeLink<AbstractFileSystem>) inode;
result = new FileStatus(0, false, 0, 0, creationTime, creationTime,
PERMISSION_555, ugi.getUserName(), ugi.getGroupNames()[0],
inodelink.getTargetLink(),
new Path(inode.fullPath).makeQualified(
myUri, null));
} else {
result = new FileStatus(0, true, 0, 0, creationTime, creationTime,
PERMISSION_555, ugi.getUserName(), ugi.getGroupNames()[0],
new Path(inode.fullPath).makeQualified(
myUri, null));
}
return result;
}
@Override
public FsStatus getFsStatus() {
return new FsStatus(0, 0, 0);
}
@Override
public FsServerDefaults getServerDefaults() throws IOException {
throw new IOException("FsServerDefaults not implemented yet");
}
@Override
public int getUriDefaultPort() {
return -1;
}
@Override
public FileStatus[] listStatus(final Path f) throws AccessControlException,
IOException {
checkPathIsSlash(f);
FileStatus[] result = new FileStatus[theInternalDir.children.size()];
int i = 0;
for (Entry<String, INode<AbstractFileSystem>> iEntry :
theInternalDir.children.entrySet()) {
INode<AbstractFileSystem> inode = iEntry.getValue();
if (inode instanceof INodeLink ) {
INodeLink<AbstractFileSystem> link =
(INodeLink<AbstractFileSystem>) inode;
result[i++] = new FileStatus(0, false, 0, 0,
creationTime, creationTime,
PERMISSION_555, ugi.getUserName(), ugi.getGroupNames()[0],
link.getTargetLink(),
new Path(inode.fullPath).makeQualified(
myUri, null));
} else {
result[i++] = new FileStatus(0, true, 0, 0,
creationTime, creationTime,
PERMISSION_555, ugi.getUserName(), ugi.getGroupNames()[0],
new Path(inode.fullPath).makeQualified(
myUri, null));
}
}
return result;
}
@Override
public void mkdir(final Path dir, final FsPermission permission,
final boolean createParent) throws AccessControlException,
FileAlreadyExistsException {
if (theInternalDir.isRoot && dir == null) {
throw new FileAlreadyExistsException("/ already exits");
}
throw readOnlyMountTable("mkdir", dir);
}
@Override
public FSDataInputStream open(final Path f, final int bufferSize)
throws FileNotFoundException, IOException {
checkPathIsSlash(f);
throw new FileNotFoundException("Path points to dir not a file");
}
@Override
public void renameInternal(final Path src, final Path dst)
throws AccessControlException, IOException {
checkPathIsSlash(src);
checkPathIsSlash(dst);
throw readOnlyMountTable("rename", src);
}
@Override
public boolean supportsSymlinks() {
return true;
}
@Override
public void createSymlink(final Path target, final Path link,
final boolean createParent) throws AccessControlException {
throw readOnlyMountTable("createSymlink", link);
}
@Override
public Path getLinkTarget(final Path f) throws FileNotFoundException,
IOException {
return getFileLinkStatus(f).getSymlink();
}
@Override
public void setOwner(final Path f, final String username,
final String groupname) throws AccessControlException, IOException {
checkPathIsSlash(f);
throw readOnlyMountTable("setOwner", f);
}
@Override
public void setPermission(final Path f, final FsPermission permission)
throws AccessControlException, IOException {
checkPathIsSlash(f);
throw readOnlyMountTable("setPermission", f);
}
@Override
public boolean setReplication(final Path f, final short replication)
throws AccessControlException, IOException {
checkPathIsSlash(f);
throw readOnlyMountTable("setReplication", f);
}
@Override
public void setTimes(final Path f, final long mtime, final long atime)
throws AccessControlException, IOException {
checkPathIsSlash(f);
throw readOnlyMountTable("setTimes", f);
}
@Override
public void setVerifyChecksum(final boolean verifyChecksum)
throws AccessControlException {
throw readOnlyMountTable("setVerifyChecksum", "");
}
@Override
public void modifyAclEntries(Path path, List<AclEntry> aclSpec)
throws IOException {
checkPathIsSlash(path);
throw readOnlyMountTable("modifyAclEntries", path);
}
@Override
public void removeAclEntries(Path path, List<AclEntry> aclSpec)
throws IOException {
checkPathIsSlash(path);
throw readOnlyMountTable("removeAclEntries", path);
}
@Override
public void removeDefaultAcl(Path path) throws IOException {
checkPathIsSlash(path);
throw readOnlyMountTable("removeDefaultAcl", path);
}
@Override
public void removeAcl(Path path) throws IOException {
checkPathIsSlash(path);
throw readOnlyMountTable("removeAcl", path);
}
@Override
public void setAcl(Path path, List<AclEntry> aclSpec) throws IOException {
checkPathIsSlash(path);
throw readOnlyMountTable("setAcl", path);
}
@Override
public AclStatus getAclStatus(Path path) throws IOException {
checkPathIsSlash(path);
return new AclStatus.Builder().owner(ugi.getUserName())
.group(ugi.getGroupNames()[0])
.addEntries(AclUtil.getMinimalAcl(PERMISSION_555))
.stickyBit(false).build();
}
@Override
public void setXAttr(Path path, String name, byte[] value,
EnumSet<XAttrSetFlag> flag) throws IOException {
checkPathIsSlash(path);
throw readOnlyMountTable("setXAttr", path);
}
@Override
public byte[] getXAttr(Path path, String name) throws IOException {
throw new NotInMountpointException(path, "getXAttr");
}
@Override
public Map<String, byte[]> getXAttrs(Path path) throws IOException {
throw new NotInMountpointException(path, "getXAttrs");
}
@Override
public Map<String, byte[]> getXAttrs(Path path, List<String> names)
throws IOException {
throw new NotInMountpointException(path, "getXAttrs");
}
@Override
public List<String> listXAttrs(Path path) throws IOException {
throw new NotInMountpointException(path, "listXAttrs");
}
@Override
public void removeXAttr(Path path, String name) throws IOException {
checkPathIsSlash(path);
throw readOnlyMountTable("removeXAttr", path);
}
}
}
|
apache/harmony | 36,932 | classlib/modules/swing/src/test/api/java.injected/javax/swing/tree/DefaultTreeModelTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/**
* @author Alexander T. Simbirtsev
*/
package javax.swing.tree;
import java.beans.PropertyChangeListener;
import java.util.Arrays;
import java.util.EventListener;
import javax.swing.SwingTestCase;
import javax.swing.event.TreeModelEvent;
import javax.swing.event.TreeModelListener;
import javax.swing.event.SwingPropertyChangeSupportTest.FindableListener;
public class DefaultTreeModelTest extends SwingTestCase {
protected DefaultTreeModel model = null;
protected DefaultMutableTreeNode root = null;
class ConcreteTreeModelListener extends FindableListener implements TreeModelListener {
public TreeModelEvent event = null;
public String type = null;
public boolean fired = false;
private final boolean debugOutput;
ConcreteTreeModelListener() {
super();
debugOutput = false;
}
ConcreteTreeModelListener(final boolean debugOutput) {
super();
this.debugOutput = debugOutput;
}
@Override
public void reset() {
event = null;
type = null;
fired = false;
}
public void treeNodesChanged(TreeModelEvent e) {
event = e;
fired = true;
type = "changed";
if (debugOutput) {
System.out.println("changed: " + e);
}
}
public void treeNodesInserted(TreeModelEvent e) {
event = e;
fired = true;
type = "inserted";
if (debugOutput) {
System.out.println("inserted: " + e);
}
}
public void treeNodesRemoved(TreeModelEvent e) {
event = e;
fired = true;
type = "removed";
if (debugOutput) {
System.out.println("removed: " + e);
}
}
public void treeStructureChanged(TreeModelEvent e) {
event = e;
fired = true;
type = "structure";
if (debugOutput) {
System.out.println("structure changed: " + e);
}
}
public void checkEvent(Object source, Object[] path, int[] childIndices,
Object[] children) {
assertNotNull("event", event);
assertEquals("source", source, event.getSource());
assertTrue("path", Arrays.equals(path, event.getPath()));
assertTrue("childIndices", Arrays.equals(childIndices, event.getChildIndices()));
assertTrue("children", Arrays.equals(children, event.getChildren()));
}
}
@Override
protected void setUp() throws Exception {
super.setUp();
root = new DefaultMutableTreeNode("root");
model = new DefaultTreeModel(root);
}
@Override
protected void tearDown() throws Exception {
super.tearDown();
}
/*
* Test method for 'javax.swing.tree.DefaultTreeModel.DefaultTreeModel(TreeNode)'
*/
public void testDefaultTreeModelTreeNode() {
TreeNode node = new DefaultMutableTreeNode(null);
model = new DefaultTreeModel(node);
assertEquals(node, model.root);
assertFalse(model.asksAllowsChildren);
assertNotNull(model.listenerList);
model = new DefaultTreeModel(null);
assertNull(model.root);
assertFalse(model.asksAllowsChildren);
assertNotNull(model.listenerList);
}
/*
* Test method for 'javax.swing.tree.DefaultTreeModel.DefaultTreeModel(TreeNode, boolean)'
*/
public void testDefaultTreeModelTreeNodeBoolean() {
TreeNode node = new DefaultMutableTreeNode(null);
model = new DefaultTreeModel(node, true);
assertEquals(node, model.root);
assertTrue(model.asksAllowsChildren);
assertNotNull(model.listenerList);
model = new DefaultTreeModel(null, false);
assertNull(model.root);
assertFalse(model.asksAllowsChildren);
assertNotNull(model.listenerList);
}
/*
* Test method for 'javax.swing.tree.DefaultTreeModel.setAsksAllowsChildren(boolean)'
*/
public void testSetAsksAllowsChildren() {
TreeNode node = new DefaultMutableTreeNode(null);
model = new DefaultTreeModel(node);
model.setAsksAllowsChildren(true);
assertTrue(model.asksAllowsChildren);
model.setAsksAllowsChildren(false);
assertFalse(model.asksAllowsChildren);
}
/*
* Test method for 'javax.swing.tree.DefaultTreeModel.asksAllowsChildren()'
*/
public void testSetGetAsksAllowsChildren() {
TreeNode node = new DefaultMutableTreeNode(null);
model = new DefaultTreeModel(node);
model.asksAllowsChildren = true;
assertTrue(model.asksAllowsChildren());
model.asksAllowsChildren = false;
assertFalse(model.asksAllowsChildren());
}
/*
* Test method for 'javax.swing.tree.DefaultTreeModel.addTreeModelListener(TreeModelListener)'
*/
public void testAddTreeModelListener() {
ConcreteTreeModelListener listener1 = new ConcreteTreeModelListener();
ConcreteTreeModelListener listener2 = new ConcreteTreeModelListener();
TreeModelListener[] listenersArray = null;
model.addTreeModelListener(listener1);
listenersArray = model.getTreeModelListeners();
assertTrue(listenersArray.length == 1);
assertEquals(1, model.listenerList.getListeners(TreeModelListener.class).length);
assertEquals(1, model.getListeners(TreeModelListener.class).length);
assertTrue(listener1.findMe(listenersArray) > 0);
model.addTreeModelListener(listener2);
listenersArray = model.getTreeModelListeners();
assertEquals(2, listenersArray.length);
assertTrue(listener1.findMe(listenersArray) > 0);
assertTrue(listener2.findMe(listenersArray) > 0);
model.addTreeModelListener(listener2);
listenersArray = model.getTreeModelListeners();
assertEquals(3, listenersArray.length);
}
/*
* Test method for 'javax.swing.tree.DefaultTreeModel.removeTreeModelListener(TreeModelListener)'
*/
public void testRemoveTreeModelListener() {
ConcreteTreeModelListener changeListener1 = new ConcreteTreeModelListener();
ConcreteTreeModelListener changeListener2 = new ConcreteTreeModelListener();
ConcreteTreeModelListener changeListener3 = new ConcreteTreeModelListener();
TreeModelListener[] listenersArray = null;
model.addTreeModelListener(changeListener1);
model.addTreeModelListener(changeListener2);
model.addTreeModelListener(changeListener3);
listenersArray = model.getTreeModelListeners();
assertEquals(3, listenersArray.length);
assertEquals(1, changeListener1.findMe(listenersArray));
assertEquals(1, changeListener2.findMe(listenersArray));
assertEquals(1, changeListener3.findMe(listenersArray));
model.removeTreeModelListener(changeListener2);
listenersArray = model.getTreeModelListeners();
assertEquals(2, listenersArray.length);
assertEquals(1, changeListener1.findMe(listenersArray));
assertEquals(0, changeListener2.findMe(listenersArray));
assertEquals(1, changeListener3.findMe(listenersArray));
model.removeTreeModelListener(changeListener1);
listenersArray = model.getTreeModelListeners();
assertEquals(1, listenersArray.length);
assertEquals(1, changeListener3.findMe(listenersArray));
model.removeTreeModelListener(changeListener3);
listenersArray = model.getTreeModelListeners();
assertEquals(0, listenersArray.length);
}
/*
* Test method for 'javax.swing.tree.DefaultTreeModel.getTreeModelListeners()'
*/
public void testGetTreeModelListeners() {
ConcreteTreeModelListener changeListener1 = new ConcreteTreeModelListener();
ConcreteTreeModelListener changeListener2 = new ConcreteTreeModelListener();
ConcreteTreeModelListener changeListener3 = new ConcreteTreeModelListener();
TreeModelListener[] listenersArray = null;
listenersArray = model.getTreeModelListeners();
assertTrue(listenersArray != null && listenersArray.length == 0);
model.addTreeModelListener(changeListener1);
model.addTreeModelListener(changeListener2);
model.addTreeModelListener(changeListener3);
model.addTreeModelListener(changeListener2);
listenersArray = model.getTreeModelListeners();
assertTrue(listenersArray.length == 4);
assertTrue(changeListener1.findMe(listenersArray) == 1);
assertTrue(changeListener2.findMe(listenersArray) == 2);
assertTrue(changeListener3.findMe(listenersArray) == 1);
}
/*
* Test method for 'javax.swing.tree.DefaultTreeModel.getListeners(EventListener)'
*/
public void testGetListeners() {
TreeModelListener listener1 = new ConcreteTreeModelListener();
TreeModelListener listener2 = new ConcreteTreeModelListener();
EventListener[] listenersArray = null;
listenersArray = model.getListeners(TreeModelListener.class);
assertEquals(0, listenersArray.length);
model.addTreeModelListener(listener1);
model.addTreeModelListener(listener2);
listenersArray = model.getListeners(PropertyChangeListener.class);
assertEquals(0, listenersArray.length);
listenersArray = model.getListeners(TreeModelListener.class);
assertEquals(2, listenersArray.length);
model.removeTreeModelListener(listener1);
listenersArray = model.getListeners(TreeModelListener.class);
assertEquals(1, listenersArray.length);
model.addTreeModelListener(listener2);
listenersArray = model.getListeners(TreeModelListener.class);
assertEquals(2, listenersArray.length);
}
/*
* Test method for 'javax.swing.tree.DefaultTreeModel.getChild(Object, int)'
*/
public void testGetChild() {
DefaultMutableTreeNode node1 = new DefaultMutableTreeNode("1");
DefaultMutableTreeNode node2 = new DefaultMutableTreeNode("2");
DefaultMutableTreeNode node3 = new DefaultMutableTreeNode("3");
DefaultMutableTreeNode node4 = new DefaultMutableTreeNode("4");
DefaultMutableTreeNode node5 = new DefaultMutableTreeNode("5");
node1.add(node2);
node1.add(node5);
node2.add(node3);
node2.add(node4);
assertEquals(node2, model.getChild(node1, 0));
assertEquals(node5, model.getChild(node1, 1));
assertEquals(node3, model.getChild(node2, 0));
assertEquals(node4, model.getChild(node2, 1));
}
/*
* Test method for 'javax.swing.tree.DefaultTreeModel.getChildCount(Object)'
*/
public void testGetChildCount() {
DefaultMutableTreeNode node1 = new DefaultMutableTreeNode("1");
DefaultMutableTreeNode node2 = new DefaultMutableTreeNode("2");
DefaultMutableTreeNode node3 = new DefaultMutableTreeNode("3");
DefaultMutableTreeNode node4 = new DefaultMutableTreeNode("4");
DefaultMutableTreeNode node5 = new DefaultMutableTreeNode("5");
node1.add(node2);
node1.add(node5);
node2.add(node3);
node2.add(node4);
assertEquals(2, model.getChildCount(node1));
assertEquals(2, model.getChildCount(node2));
assertEquals(0, model.getChildCount(node3));
assertEquals(0, model.getChildCount(node4));
assertEquals(0, model.getChildCount(node5));
}
/*
* Test method for 'javax.swing.tree.DefaultTreeModel.getIndexOfChild(Object, Object)'
*/
public void testGetIndexOfChild() {
DefaultMutableTreeNode node1 = new DefaultMutableTreeNode(null);
DefaultMutableTreeNode node2 = new DefaultMutableTreeNode(null);
DefaultMutableTreeNode node3 = new DefaultMutableTreeNode(null);
DefaultMutableTreeNode node4 = new DefaultMutableTreeNode(null);
node1.add(node2);
node1.add(node3);
node3.add(node4);
assertEquals(-1, model.getIndexOfChild(null, node1));
assertEquals(-1, model.getIndexOfChild(node1, null));
assertEquals(0, model.getIndexOfChild(node1, node2));
assertEquals(1, model.getIndexOfChild(node1, node3));
assertEquals(-1, model.getIndexOfChild(node1, node4));
assertEquals(0, model.getIndexOfChild(node3, node4));
assertEquals(-1, model.getIndexOfChild(node4, node3));
}
/*
* Test method for 'javax.swing.tree.DefaultTreeModel.setRoot(TreeNode)'
*/
public void testSetRoot() {
TreeNode root1 = new DefaultMutableTreeNode(null);
TreeNode root2 = new DefaultMutableTreeNode(null);
TreeNode root3 = null;
ConcreteTreeModelListener listener = new ConcreteTreeModelListener(false);
model.addTreeModelListener(listener);
model.setRoot(root1);
assertEquals(root1, model.root);
assertNotNull(listener.event);
assertEquals("structure", listener.type);
listener.checkEvent(model, new Object[] { root1 }, null, null);
listener.reset();
model.setRoot(root2);
assertEquals(root2, model.root);
assertNotNull(listener.event);
assertEquals("structure", listener.type);
listener.checkEvent(model, new Object[] { root2 }, null, null);
listener.reset();
model.setRoot(root2);
assertEquals(root2, model.root);
assertNotNull(listener.event);
assertEquals("structure", listener.type);
listener.checkEvent(model, new Object[] { root2 }, null, null);
listener.reset();
model.setRoot(root3);
assertEquals(root3, model.root);
assertNotNull(listener.event);
assertEquals("structure", listener.type);
listener.checkEvent(model, null, new int[0], null);
listener.reset();
}
/*
* Test method for 'javax.swing.tree.DefaultTreeModel.getRoot()'
*/
public void testGetRoot() {
TreeNode root1 = new DefaultMutableTreeNode(null);
TreeNode root2 = new DefaultMutableTreeNode(null);
TreeNode root3 = null;
model.root = root1;
assertEquals(root1, model.getRoot());
model.root = root2;
assertEquals(root2, model.getRoot());
model.root = root3;
assertEquals(root3, model.getRoot());
}
/*
* Test method for 'javax.swing.tree.DefaultTreeModel.isLeaf(Object)'
*/
public void testIsLeaf() {
DefaultMutableTreeNode node1 = new DefaultMutableTreeNode(null, true);
DefaultMutableTreeNode node2 = new DefaultMutableTreeNode(null, false);
DefaultMutableTreeNode node3 = new DefaultMutableTreeNode(null);
model.setAsksAllowsChildren(true);
assertFalse(model.isLeaf(node1));
assertTrue(model.isLeaf(node2));
model.setAsksAllowsChildren(false);
assertTrue(model.isLeaf(node1));
assertTrue(model.isLeaf(node2));
node1.add(node3);
model.setAsksAllowsChildren(true);
assertFalse(model.isLeaf(node1));
model.setAsksAllowsChildren(false);
assertFalse(model.isLeaf(node1));
}
/*
* Test method for 'javax.swing.tree.DefaultTreeModel.valueForPathChanged(TreePath, Object)'
*/
public void testValueForPathChanged() {
DefaultMutableTreeNode node1 = new DefaultMutableTreeNode("1");
DefaultMutableTreeNode node2 = new DefaultMutableTreeNode("2");
DefaultMutableTreeNode node3 = new DefaultMutableTreeNode("3");
DefaultMutableTreeNode node4 = new DefaultMutableTreeNode("4");
DefaultMutableTreeNode node5 = new DefaultMutableTreeNode("5");
node1.add(node2);
node1.add(node5);
node2.add(node3);
node2.add(node4);
model.valueForPathChanged(new TreePath(node1), "11");
assertEquals("11", node1.getUserObject());
model.valueForPathChanged(new TreePath(new Object[] { node1, node2 }), "22");
assertEquals("22", node2.getUserObject());
assertEquals("11", node1.getUserObject());
model.valueForPathChanged(new TreePath(new Object[] { node2 }), "222");
assertEquals("222", node2.getUserObject());
assertEquals("11", node1.getUserObject());
model.valueForPathChanged(new TreePath(new Object[] { node2, node4 }), "222444");
assertEquals("222", node2.getUserObject());
assertEquals("222444", node4.getUserObject());
}
/*
* Test method for 'javax.swing.tree.DefaultTreeModel.reload()'
*/
public void testReload() {
DefaultMutableTreeNode node1 = root;
DefaultMutableTreeNode node2 = new DefaultMutableTreeNode("2");
DefaultMutableTreeNode node3 = new DefaultMutableTreeNode("3");
DefaultMutableTreeNode node4 = new DefaultMutableTreeNode("4");
DefaultMutableTreeNode node5 = new DefaultMutableTreeNode("5");
node1.add(node2);
node1.add(node5);
node2.add(node3);
node2.add(node4);
ConcreteTreeModelListener listener = new ConcreteTreeModelListener();
model.addTreeModelListener(listener);
model.setRoot(null);
model.reload();
assertNotNull(listener.event);
assertEquals("structure", listener.type);
listener.checkEvent(model, null, new int[0], null);
listener.reset();
model.setRoot(root);
model.reload();
assertNotNull(listener.event);
listener.checkEvent(model, new Object[] { root }, null, null);
listener.reset();
}
/*
* Test method for 'javax.swing.tree.DefaultTreeModel.reload(TreeNode)'
*/
public void testReloadTreeNode() {
DefaultMutableTreeNode node1 = root;
DefaultMutableTreeNode node2 = new DefaultMutableTreeNode("2");
DefaultMutableTreeNode node3 = new DefaultMutableTreeNode("3");
DefaultMutableTreeNode node4 = new DefaultMutableTreeNode("4");
DefaultMutableTreeNode node5 = new DefaultMutableTreeNode("5");
node1.add(node2);
node1.add(node5);
node2.add(node3);
node2.add(node4);
ConcreteTreeModelListener listener = new ConcreteTreeModelListener();
model.addTreeModelListener(listener);
model.reload(null);
assertNull(listener.event);
model.setRoot(null);
model.reload(null);
assertNotNull(listener.event);
assertEquals("structure", listener.type);
listener.checkEvent(model, null, new int[0], null);
listener.reset();
model.setRoot(root);
model.reload(root);
assertNotNull(listener.event);
listener.checkEvent(model, new Object[] { root }, null, null);
listener.reset();
model.reload(node2);
assertNotNull(listener.event);
listener.checkEvent(model, new Object[] { root, node2 }, null, null);
listener.reset();
model.reload(node4);
assertNotNull(listener.event);
listener.checkEvent(model, new Object[] { root, node2, node4 }, null, null);
listener.reset();
}
/*
* Test method for 'javax.swing.tree.DefaultTreeModel.insertNodeInto(MutableTreeNode, MutableTreeNode, int)'
*/
public void testInsertNodeInto() {
DefaultMutableTreeNode node1 = new DefaultMutableTreeNode("1");
DefaultMutableTreeNode node2 = new DefaultMutableTreeNode("2");
DefaultMutableTreeNode node3 = new DefaultMutableTreeNode("3");
ConcreteTreeModelListener listener = new ConcreteTreeModelListener();
model.addTreeModelListener(listener);
try {
model.insertNodeInto(null, node1, 0);
fail("no exception's been thrown");
} catch (IllegalArgumentException e) {
}
model.insertNodeInto(node1, node2, 0);
assertNotNull(listener.event);
listener.checkEvent(model, new Object[] { node2 }, new int[] { 0 },
new Object[] { node1 });
assertEquals(node1, node2.getChildAt(0));
listener.reset();
model.insertNodeInto(node3, node2, 0);
listener.checkEvent(model, new Object[] { node2 }, new int[] { 0 },
new Object[] { node3 });
assertEquals(node3, node2.getChildAt(0));
assertEquals(node1, node2.getChildAt(1));
}
/*
* Test method for 'javax.swing.tree.DefaultTreeModel.removeNodeFromParent(MutableTreeNode)'
*/
public void testRemoveNodeFromParent() {
DefaultMutableTreeNode node1 = new DefaultMutableTreeNode("1");
DefaultMutableTreeNode node2 = new DefaultMutableTreeNode("2");
DefaultMutableTreeNode node3 = new DefaultMutableTreeNode("3");
node1.add(node2);
node1.add(node3);
ConcreteTreeModelListener listener = new ConcreteTreeModelListener();
model.addTreeModelListener(listener);
try {
model.removeNodeFromParent(node1);
fail("no exception's been thrown");
} catch (IllegalArgumentException e) {
}
model.removeNodeFromParent(node3);
assertNotNull(listener.event);
listener.checkEvent(model, new Object[] { node1 }, new int[] { 1 },
new Object[] { node3 });
assertEquals(1, node1.getChildCount());
listener.reset();
model.removeNodeFromParent(node2);
assertNotNull(listener.event);
listener.checkEvent(model, new Object[] { node1 }, new int[] { 0 },
new Object[] { node2 });
assertEquals(0, node1.getChildCount());
listener.reset();
}
/*
* Test method for 'javax.swing.tree.DefaultTreeModel.nodeChanged(TreeNode)'
*/
public void testNodeChanged() {
DefaultMutableTreeNode node1 = new DefaultMutableTreeNode("1");
DefaultMutableTreeNode node2 = new DefaultMutableTreeNode("2");
DefaultMutableTreeNode node3 = new DefaultMutableTreeNode("3");
DefaultMutableTreeNode node4 = new DefaultMutableTreeNode("4");
DefaultMutableTreeNode node5 = new DefaultMutableTreeNode("4");
node1.add(node2);
node1.add(node5);
node2.add(node3);
node2.add(node4);
ConcreteTreeModelListener listener = new ConcreteTreeModelListener();
model.addTreeModelListener(listener);
model.nodeChanged(null);
assertNull(listener.event);
model.nodeChanged(node1);
assertNull(listener.event);
model.nodeChanged(root);
assertNotNull(listener.event);
assertEquals("changed", listener.type);
listener.checkEvent(model, new Object[] { root }, null, null);
listener.reset();
model.nodeChanged(node2);
assertNotNull(listener.event);
assertEquals("changed", listener.type);
listener.checkEvent(model, new Object[] { node1 }, new int[] { 0 },
new Object[] { node2 });
listener.reset();
model.nodeChanged(node3);
assertNotNull(listener.event);
assertEquals("changed", listener.type);
listener.checkEvent(model, new Object[] { node1, node2 }, new int[] { 0 },
new Object[] { node3 });
listener.reset();
model.nodeChanged(node4);
assertNotNull(listener.event);
assertEquals("changed", listener.type);
listener.checkEvent(model, new Object[] { node1, node2 }, new int[] { 1 },
new Object[] { node4 });
listener.reset();
}
/*
* Test method for 'javax.swing.tree.DefaultTreeModel.nodesWereInserted(TreeNode, int[])'
*/
public void testNodesWereInserted() {
DefaultMutableTreeNode node1 = new DefaultMutableTreeNode("1");
DefaultMutableTreeNode node2 = new DefaultMutableTreeNode("2");
DefaultMutableTreeNode node3 = new DefaultMutableTreeNode("3");
DefaultMutableTreeNode node4 = new DefaultMutableTreeNode("4");
DefaultMutableTreeNode node5 = new DefaultMutableTreeNode("5");
node1.add(node2);
node1.add(node5);
node2.add(node3);
node2.add(node4);
ConcreteTreeModelListener listener = new ConcreteTreeModelListener();
model.addTreeModelListener(listener);
model.nodesWereInserted(null, null);
assertNull(listener.event);
model.nodesWereInserted(node1, null);
assertNull(listener.event);
model.nodesWereInserted(node1, new int[0]);
assertNull(listener.event);
model.nodesWereInserted(node1, new int[] { 1, 0 });
assertNotNull(listener.event);
assertEquals("inserted", listener.type);
listener.checkEvent(model, new Object[] { node1 }, new int[] { 1, 0 }, new Object[] {
node5, node2 });
listener.reset();
model.nodesWereInserted(node2, new int[] { 0, 1 });
assertNotNull(listener.event);
assertEquals("inserted", listener.type);
listener.checkEvent(model, new Object[] { node1, node2 }, new int[] { 0, 1 },
new Object[] { node3, node4 });
listener.reset();
}
/*
* Test method for 'javax.swing.tree.DefaultTreeModel.nodesWereRemoved(TreeNode, int[], Object[])'
*/
public void testNodesWereRemoved() {
DefaultMutableTreeNode node1 = new DefaultMutableTreeNode("1");
DefaultMutableTreeNode node2 = new DefaultMutableTreeNode("2");
DefaultMutableTreeNode node3 = new DefaultMutableTreeNode("3");
DefaultMutableTreeNode node4 = new DefaultMutableTreeNode("4");
DefaultMutableTreeNode node5 = new DefaultMutableTreeNode("5");
node1.add(node2);
node1.add(node5);
node2.add(node3);
node2.add(node4);
ConcreteTreeModelListener listener = new ConcreteTreeModelListener();
model.addTreeModelListener(listener);
model.nodesWereRemoved(null, null, null);
assertNull(listener.event);
model.nodesWereRemoved(node1, null, null);
assertNull(listener.event);
model.nodesWereRemoved(node1, new int[] { 1, 0 }, new Object[] { node3, node4 });
assertNotNull(listener.event);
assertEquals("removed", listener.type);
listener.checkEvent(model, new Object[] { node1 }, new int[] { 1, 0 }, new Object[] {
node3, node4 });
listener.reset();
model.nodesWereRemoved(node4, new int[] { 1 }, new Object[] { node3, node5 });
assertNotNull(listener.event);
assertEquals("removed", listener.type);
listener.checkEvent(model, new Object[] { node1, node2, node4 }, new int[] { 1 },
new Object[] { node3, node5 });
listener.reset();
}
/*
* Test method for 'javax.swing.tree.DefaultTreeModel.nodesChanged(TreeNode, int[])'
*/
public void testNodesChanged() {
DefaultMutableTreeNode node1 = new DefaultMutableTreeNode("1");
DefaultMutableTreeNode node2 = new DefaultMutableTreeNode("2");
DefaultMutableTreeNode node3 = new DefaultMutableTreeNode("3");
DefaultMutableTreeNode node4 = new DefaultMutableTreeNode("4");
DefaultMutableTreeNode node5 = new DefaultMutableTreeNode("5");
DefaultMutableTreeNode node6 = new DefaultMutableTreeNode("6");
node1.add(node2);
node1.add(node5);
node2.add(node3);
node2.add(node4);
root.add(node6);
ConcreteTreeModelListener listener = new ConcreteTreeModelListener();
model.addTreeModelListener(listener);
model.nodesChanged(null, null);
assertNull(listener.event);
model.nodesChanged(node1, null);
assertNull(listener.event);
model.nodesChanged(root, null);
assertNotNull(listener.event);
assertEquals("changed", listener.type);
listener.checkEvent(model, new Object[] { root }, null, null);
listener.reset();
model.nodesChanged(root, new int[] { 0 });
assertNotNull(listener.event);
assertEquals("changed", listener.type);
listener.checkEvent(model, new Object[] { root }, new int[] { 0 },
new Object[] { node6 });
listener.reset();
model.nodesChanged(node1, new int[0]);
assertNull(listener.event);
model.nodesChanged(null, new int[] { 1 });
assertNull(listener.event);
model.nodesChanged(node2, new int[] { 1 });
assertNotNull(listener.event);
assertEquals("changed", listener.type);
listener.checkEvent(model, new Object[] { node1, node2 }, new int[] { 1 },
new Object[] { node4 });
listener.reset();
model.nodesChanged(node1, new int[] { 0 });
assertNotNull(listener.event);
assertEquals("changed", listener.type);
listener.checkEvent(model, new Object[] { node1 }, new int[] { 0 },
new Object[] { node2 });
listener.reset();
model.nodesChanged(node1, new int[] { 0, 1 });
assertNotNull(listener.event);
assertEquals("changed", listener.type);
listener.checkEvent(model, new Object[] { node1 }, new int[] { 0, 1 }, new Object[] {
node2, node5 });
listener.reset();
}
/*
* Test method for 'javax.swing.tree.DefaultTreeModel.nodeStructureChanged(TreeNode)'
*/
public void testNodeStructureChanged() {
DefaultMutableTreeNode node1 = new DefaultMutableTreeNode("1");
DefaultMutableTreeNode node2 = new DefaultMutableTreeNode("2");
DefaultMutableTreeNode node3 = new DefaultMutableTreeNode("3");
DefaultMutableTreeNode node4 = new DefaultMutableTreeNode("4");
DefaultMutableTreeNode node5 = new DefaultMutableTreeNode("5");
node1.add(node2);
node1.add(node5);
node2.add(node3);
node2.add(node4);
ConcreteTreeModelListener listener = new ConcreteTreeModelListener();
model.addTreeModelListener(listener);
model.nodeStructureChanged(null);
assertNull(listener.event);
model.setRoot(null);
listener.reset();
model.nodeStructureChanged(null);
assertNull(listener.event);
model.nodeStructureChanged(node1);
assertNotNull(listener.event);
assertEquals("structure", listener.type);
listener.checkEvent(model, new Object[] { node1 }, null, null);
listener.reset();
model.nodeStructureChanged(node2);
assertNotNull(listener.event);
listener.checkEvent(model, new Object[] { node1, node2 }, null, null);
listener.reset();
model.nodeStructureChanged(node4);
assertNotNull(listener.event);
listener.checkEvent(model, new Object[] { node1, node2, node4 }, null, null);
listener.reset();
}
/*
* Test method for 'javax.swing.tree.DefaultTreeModel.getPathToRoot(TreeNode)'
*/
public void testGetPathToRootTreeNode() {
DefaultMutableTreeNode node1 = new DefaultMutableTreeNode("1");
DefaultMutableTreeNode node2 = new DefaultMutableTreeNode("2");
DefaultMutableTreeNode node3 = root;
DefaultMutableTreeNode node4 = new DefaultMutableTreeNode("4");
node1.add(node2);
node2.add(node3);
node3.add(node4);
if (isHarmony()) {
assertEquals(0, model.getPathToRoot(null).length);
} else {
assertNull(model.getPathToRoot(null));
}
assertTrue(Arrays.equals(new TreeNode[] { node1 }, model.getPathToRoot(node1)));
assertTrue(Arrays.equals(new TreeNode[] { node1, node2 }, model.getPathToRoot(node2)));
assertTrue(Arrays.equals(new TreeNode[] { node3 }, model.getPathToRoot(node3)));
assertTrue(Arrays.equals(new TreeNode[] { node3, node4 }, model.getPathToRoot(node4)));
}
/*
* Test method for 'javax.swing.tree.DefaultTreeModel.fireTreeNodesChanged(Object, Object[], int[], Object[])'
*/
public void testFireTreeNodesChanged() {
Object source1 = "source1";
Object[] paths1 = new Object[] { "1", "2" };
int[] indices1 = new int[] { 100, 200 };
Object[] children1 = new Object[] { "10", "20" };
ConcreteTreeModelListener listener1 = new ConcreteTreeModelListener();
ConcreteTreeModelListener listener2 = new ConcreteTreeModelListener();
model.addTreeModelListener(listener1);
model.addTreeModelListener(listener2);
model.fireTreeNodesChanged(source1, paths1, indices1, children1);
assertNotNull(listener1.event);
assertNotNull(listener2.event);
assertEquals("changed", listener1.type);
assertEquals("changed", listener2.type);
listener1.checkEvent(source1, paths1, indices1, children1);
listener2.checkEvent(source1, paths1, indices1, children1);
}
/*
* Test method for 'javax.swing.tree.DefaultTreeModel.fireTreeNodesInserted(Object, Object[], int[], Object[])'
*/
public void testFireTreeNodesInserted() {
Object source1 = "source1";
Object[] paths1 = new Object[] { "1", "2" };
int[] indices1 = new int[] { 100, 200 };
Object[] children1 = new Object[] { "10", "20" };
ConcreteTreeModelListener listener1 = new ConcreteTreeModelListener();
ConcreteTreeModelListener listener2 = new ConcreteTreeModelListener();
model.addTreeModelListener(listener1);
model.addTreeModelListener(listener2);
model.fireTreeNodesInserted(source1, paths1, indices1, children1);
assertNotNull(listener1.event);
assertNotNull(listener2.event);
assertEquals("inserted", listener1.type);
assertEquals("inserted", listener2.type);
listener1.checkEvent(source1, paths1, indices1, children1);
listener2.checkEvent(source1, paths1, indices1, children1);
}
/*
* Test method for 'javax.swing.tree.DefaultTreeModel.fireTreeNodesRemoved(Object, Object[], int[], Object[])'
*/
public void testFireTreeNodesRemoved() {
Object source1 = "source1";
Object[] paths1 = new Object[] { "1", "2" };
int[] indices1 = new int[] { 100, 200 };
Object[] children1 = new Object[] { "10", "20" };
ConcreteTreeModelListener listener1 = new ConcreteTreeModelListener();
ConcreteTreeModelListener listener2 = new ConcreteTreeModelListener();
model.addTreeModelListener(listener1);
model.addTreeModelListener(listener2);
model.fireTreeNodesRemoved(source1, paths1, indices1, children1);
assertNotNull(listener1.event);
assertNotNull(listener2.event);
assertEquals("removed", listener1.type);
assertEquals("removed", listener2.type);
listener1.checkEvent(source1, paths1, indices1, children1);
listener2.checkEvent(source1, paths1, indices1, children1);
}
/*
* Test method for 'javax.swing.tree.DefaultTreeModel.fireTreeStructureChanged(Object, Object[], int[], Object[])'
*/
public void testFireTreeStructureChanged() {
Object source1 = "source1";
Object[] paths1 = new Object[] { "1", "2" };
int[] indices1 = new int[] { 100, 200 };
Object[] children1 = new Object[] { "10", "20" };
ConcreteTreeModelListener listener1 = new ConcreteTreeModelListener();
ConcreteTreeModelListener listener2 = new ConcreteTreeModelListener();
model.addTreeModelListener(listener1);
model.addTreeModelListener(listener2);
model.fireTreeStructureChanged(source1, paths1, indices1, children1);
assertNotNull(listener1.event);
assertNotNull(listener2.event);
assertEquals("structure", listener1.type);
assertEquals("structure", listener2.type);
listener1.checkEvent(source1, paths1, indices1, children1);
listener2.checkEvent(source1, paths1, indices1, children1);
}
}
|
googleapis/google-cloud-java | 37,048 | java-shopping-merchant-accounts/grpc-google-shopping-merchant-accounts-v1beta/src/main/java/com/google/shopping/merchant/accounts/v1beta/HomepageServiceGrpc.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.shopping.merchant.accounts.v1beta;
import static io.grpc.MethodDescriptor.generateFullMethodName;
/**
*
*
* <pre>
* Service to support an API for a store's homepage.
* </pre>
*/
@javax.annotation.Generated(
value = "by gRPC proto compiler",
comments = "Source: google/shopping/merchant/accounts/v1beta/homepage.proto")
@io.grpc.stub.annotations.GrpcGenerated
public final class HomepageServiceGrpc {
private HomepageServiceGrpc() {}
public static final java.lang.String SERVICE_NAME =
"google.shopping.merchant.accounts.v1beta.HomepageService";
// Static method descriptors that strictly reflect the proto.
private static volatile io.grpc.MethodDescriptor<
com.google.shopping.merchant.accounts.v1beta.GetHomepageRequest,
com.google.shopping.merchant.accounts.v1beta.Homepage>
getGetHomepageMethod;
@io.grpc.stub.annotations.RpcMethod(
fullMethodName = SERVICE_NAME + '/' + "GetHomepage",
requestType = com.google.shopping.merchant.accounts.v1beta.GetHomepageRequest.class,
responseType = com.google.shopping.merchant.accounts.v1beta.Homepage.class,
methodType = io.grpc.MethodDescriptor.MethodType.UNARY)
public static io.grpc.MethodDescriptor<
com.google.shopping.merchant.accounts.v1beta.GetHomepageRequest,
com.google.shopping.merchant.accounts.v1beta.Homepage>
getGetHomepageMethod() {
io.grpc.MethodDescriptor<
com.google.shopping.merchant.accounts.v1beta.GetHomepageRequest,
com.google.shopping.merchant.accounts.v1beta.Homepage>
getGetHomepageMethod;
if ((getGetHomepageMethod = HomepageServiceGrpc.getGetHomepageMethod) == null) {
synchronized (HomepageServiceGrpc.class) {
if ((getGetHomepageMethod = HomepageServiceGrpc.getGetHomepageMethod) == null) {
HomepageServiceGrpc.getGetHomepageMethod =
getGetHomepageMethod =
io.grpc.MethodDescriptor
.<com.google.shopping.merchant.accounts.v1beta.GetHomepageRequest,
com.google.shopping.merchant.accounts.v1beta.Homepage>
newBuilder()
.setType(io.grpc.MethodDescriptor.MethodType.UNARY)
.setFullMethodName(generateFullMethodName(SERVICE_NAME, "GetHomepage"))
.setSampledToLocalTracing(true)
.setRequestMarshaller(
io.grpc.protobuf.ProtoUtils.marshaller(
com.google.shopping.merchant.accounts.v1beta.GetHomepageRequest
.getDefaultInstance()))
.setResponseMarshaller(
io.grpc.protobuf.ProtoUtils.marshaller(
com.google.shopping.merchant.accounts.v1beta.Homepage
.getDefaultInstance()))
.setSchemaDescriptor(
new HomepageServiceMethodDescriptorSupplier("GetHomepage"))
.build();
}
}
}
return getGetHomepageMethod;
}
private static volatile io.grpc.MethodDescriptor<
com.google.shopping.merchant.accounts.v1beta.UpdateHomepageRequest,
com.google.shopping.merchant.accounts.v1beta.Homepage>
getUpdateHomepageMethod;
@io.grpc.stub.annotations.RpcMethod(
fullMethodName = SERVICE_NAME + '/' + "UpdateHomepage",
requestType = com.google.shopping.merchant.accounts.v1beta.UpdateHomepageRequest.class,
responseType = com.google.shopping.merchant.accounts.v1beta.Homepage.class,
methodType = io.grpc.MethodDescriptor.MethodType.UNARY)
public static io.grpc.MethodDescriptor<
com.google.shopping.merchant.accounts.v1beta.UpdateHomepageRequest,
com.google.shopping.merchant.accounts.v1beta.Homepage>
getUpdateHomepageMethod() {
io.grpc.MethodDescriptor<
com.google.shopping.merchant.accounts.v1beta.UpdateHomepageRequest,
com.google.shopping.merchant.accounts.v1beta.Homepage>
getUpdateHomepageMethod;
if ((getUpdateHomepageMethod = HomepageServiceGrpc.getUpdateHomepageMethod) == null) {
synchronized (HomepageServiceGrpc.class) {
if ((getUpdateHomepageMethod = HomepageServiceGrpc.getUpdateHomepageMethod) == null) {
HomepageServiceGrpc.getUpdateHomepageMethod =
getUpdateHomepageMethod =
io.grpc.MethodDescriptor
.<com.google.shopping.merchant.accounts.v1beta.UpdateHomepageRequest,
com.google.shopping.merchant.accounts.v1beta.Homepage>
newBuilder()
.setType(io.grpc.MethodDescriptor.MethodType.UNARY)
.setFullMethodName(generateFullMethodName(SERVICE_NAME, "UpdateHomepage"))
.setSampledToLocalTracing(true)
.setRequestMarshaller(
io.grpc.protobuf.ProtoUtils.marshaller(
com.google.shopping.merchant.accounts.v1beta.UpdateHomepageRequest
.getDefaultInstance()))
.setResponseMarshaller(
io.grpc.protobuf.ProtoUtils.marshaller(
com.google.shopping.merchant.accounts.v1beta.Homepage
.getDefaultInstance()))
.setSchemaDescriptor(
new HomepageServiceMethodDescriptorSupplier("UpdateHomepage"))
.build();
}
}
}
return getUpdateHomepageMethod;
}
private static volatile io.grpc.MethodDescriptor<
com.google.shopping.merchant.accounts.v1beta.ClaimHomepageRequest,
com.google.shopping.merchant.accounts.v1beta.Homepage>
getClaimHomepageMethod;
@io.grpc.stub.annotations.RpcMethod(
fullMethodName = SERVICE_NAME + '/' + "ClaimHomepage",
requestType = com.google.shopping.merchant.accounts.v1beta.ClaimHomepageRequest.class,
responseType = com.google.shopping.merchant.accounts.v1beta.Homepage.class,
methodType = io.grpc.MethodDescriptor.MethodType.UNARY)
public static io.grpc.MethodDescriptor<
com.google.shopping.merchant.accounts.v1beta.ClaimHomepageRequest,
com.google.shopping.merchant.accounts.v1beta.Homepage>
getClaimHomepageMethod() {
io.grpc.MethodDescriptor<
com.google.shopping.merchant.accounts.v1beta.ClaimHomepageRequest,
com.google.shopping.merchant.accounts.v1beta.Homepage>
getClaimHomepageMethod;
if ((getClaimHomepageMethod = HomepageServiceGrpc.getClaimHomepageMethod) == null) {
synchronized (HomepageServiceGrpc.class) {
if ((getClaimHomepageMethod = HomepageServiceGrpc.getClaimHomepageMethod) == null) {
HomepageServiceGrpc.getClaimHomepageMethod =
getClaimHomepageMethod =
io.grpc.MethodDescriptor
.<com.google.shopping.merchant.accounts.v1beta.ClaimHomepageRequest,
com.google.shopping.merchant.accounts.v1beta.Homepage>
newBuilder()
.setType(io.grpc.MethodDescriptor.MethodType.UNARY)
.setFullMethodName(generateFullMethodName(SERVICE_NAME, "ClaimHomepage"))
.setSampledToLocalTracing(true)
.setRequestMarshaller(
io.grpc.protobuf.ProtoUtils.marshaller(
com.google.shopping.merchant.accounts.v1beta.ClaimHomepageRequest
.getDefaultInstance()))
.setResponseMarshaller(
io.grpc.protobuf.ProtoUtils.marshaller(
com.google.shopping.merchant.accounts.v1beta.Homepage
.getDefaultInstance()))
.setSchemaDescriptor(
new HomepageServiceMethodDescriptorSupplier("ClaimHomepage"))
.build();
}
}
}
return getClaimHomepageMethod;
}
private static volatile io.grpc.MethodDescriptor<
com.google.shopping.merchant.accounts.v1beta.UnclaimHomepageRequest,
com.google.shopping.merchant.accounts.v1beta.Homepage>
getUnclaimHomepageMethod;
@io.grpc.stub.annotations.RpcMethod(
fullMethodName = SERVICE_NAME + '/' + "UnclaimHomepage",
requestType = com.google.shopping.merchant.accounts.v1beta.UnclaimHomepageRequest.class,
responseType = com.google.shopping.merchant.accounts.v1beta.Homepage.class,
methodType = io.grpc.MethodDescriptor.MethodType.UNARY)
public static io.grpc.MethodDescriptor<
com.google.shopping.merchant.accounts.v1beta.UnclaimHomepageRequest,
com.google.shopping.merchant.accounts.v1beta.Homepage>
getUnclaimHomepageMethod() {
io.grpc.MethodDescriptor<
com.google.shopping.merchant.accounts.v1beta.UnclaimHomepageRequest,
com.google.shopping.merchant.accounts.v1beta.Homepage>
getUnclaimHomepageMethod;
if ((getUnclaimHomepageMethod = HomepageServiceGrpc.getUnclaimHomepageMethod) == null) {
synchronized (HomepageServiceGrpc.class) {
if ((getUnclaimHomepageMethod = HomepageServiceGrpc.getUnclaimHomepageMethod) == null) {
HomepageServiceGrpc.getUnclaimHomepageMethod =
getUnclaimHomepageMethod =
io.grpc.MethodDescriptor
.<com.google.shopping.merchant.accounts.v1beta.UnclaimHomepageRequest,
com.google.shopping.merchant.accounts.v1beta.Homepage>
newBuilder()
.setType(io.grpc.MethodDescriptor.MethodType.UNARY)
.setFullMethodName(generateFullMethodName(SERVICE_NAME, "UnclaimHomepage"))
.setSampledToLocalTracing(true)
.setRequestMarshaller(
io.grpc.protobuf.ProtoUtils.marshaller(
com.google.shopping.merchant.accounts.v1beta.UnclaimHomepageRequest
.getDefaultInstance()))
.setResponseMarshaller(
io.grpc.protobuf.ProtoUtils.marshaller(
com.google.shopping.merchant.accounts.v1beta.Homepage
.getDefaultInstance()))
.setSchemaDescriptor(
new HomepageServiceMethodDescriptorSupplier("UnclaimHomepage"))
.build();
}
}
}
return getUnclaimHomepageMethod;
}
/** Creates a new async stub that supports all call types for the service */
public static HomepageServiceStub newStub(io.grpc.Channel channel) {
io.grpc.stub.AbstractStub.StubFactory<HomepageServiceStub> factory =
new io.grpc.stub.AbstractStub.StubFactory<HomepageServiceStub>() {
@java.lang.Override
public HomepageServiceStub newStub(
io.grpc.Channel channel, io.grpc.CallOptions callOptions) {
return new HomepageServiceStub(channel, callOptions);
}
};
return HomepageServiceStub.newStub(factory, channel);
}
/** Creates a new blocking-style stub that supports all types of calls on the service */
public static HomepageServiceBlockingV2Stub newBlockingV2Stub(io.grpc.Channel channel) {
io.grpc.stub.AbstractStub.StubFactory<HomepageServiceBlockingV2Stub> factory =
new io.grpc.stub.AbstractStub.StubFactory<HomepageServiceBlockingV2Stub>() {
@java.lang.Override
public HomepageServiceBlockingV2Stub newStub(
io.grpc.Channel channel, io.grpc.CallOptions callOptions) {
return new HomepageServiceBlockingV2Stub(channel, callOptions);
}
};
return HomepageServiceBlockingV2Stub.newStub(factory, channel);
}
/**
* Creates a new blocking-style stub that supports unary and streaming output calls on the service
*/
public static HomepageServiceBlockingStub newBlockingStub(io.grpc.Channel channel) {
io.grpc.stub.AbstractStub.StubFactory<HomepageServiceBlockingStub> factory =
new io.grpc.stub.AbstractStub.StubFactory<HomepageServiceBlockingStub>() {
@java.lang.Override
public HomepageServiceBlockingStub newStub(
io.grpc.Channel channel, io.grpc.CallOptions callOptions) {
return new HomepageServiceBlockingStub(channel, callOptions);
}
};
return HomepageServiceBlockingStub.newStub(factory, channel);
}
/** Creates a new ListenableFuture-style stub that supports unary calls on the service */
public static HomepageServiceFutureStub newFutureStub(io.grpc.Channel channel) {
io.grpc.stub.AbstractStub.StubFactory<HomepageServiceFutureStub> factory =
new io.grpc.stub.AbstractStub.StubFactory<HomepageServiceFutureStub>() {
@java.lang.Override
public HomepageServiceFutureStub newStub(
io.grpc.Channel channel, io.grpc.CallOptions callOptions) {
return new HomepageServiceFutureStub(channel, callOptions);
}
};
return HomepageServiceFutureStub.newStub(factory, channel);
}
/**
*
*
* <pre>
* Service to support an API for a store's homepage.
* </pre>
*/
public interface AsyncService {
/**
*
*
* <pre>
* Retrieves a store's homepage.
* </pre>
*/
default void getHomepage(
com.google.shopping.merchant.accounts.v1beta.GetHomepageRequest request,
io.grpc.stub.StreamObserver<com.google.shopping.merchant.accounts.v1beta.Homepage>
responseObserver) {
io.grpc.stub.ServerCalls.asyncUnimplementedUnaryCall(
getGetHomepageMethod(), responseObserver);
}
/**
*
*
* <pre>
* Updates a store's homepage. Executing this method requires admin access.
* </pre>
*/
default void updateHomepage(
com.google.shopping.merchant.accounts.v1beta.UpdateHomepageRequest request,
io.grpc.stub.StreamObserver<com.google.shopping.merchant.accounts.v1beta.Homepage>
responseObserver) {
io.grpc.stub.ServerCalls.asyncUnimplementedUnaryCall(
getUpdateHomepageMethod(), responseObserver);
}
/**
*
*
* <pre>
* Claims a store's homepage. Executing this method requires admin access.
* If the homepage is already claimed, this will recheck the
* verification (unless the merchant is exempted from claiming, which also
* exempts from verification) and return a successful response. If ownership
* can no longer be verified, it will return an error, but it won't clear the
* claim. In case of failure, a canonical error message will be returned:
* * PERMISSION_DENIED: user doesn't have the necessary permissions on this
* MC account;
* * FAILED_PRECONDITION:
* - The account is not a Merchant Center account;
* - MC account doesn't have a homepage;
* - claiming failed (in this case the error message will contain more
* details).
* </pre>
*/
default void claimHomepage(
com.google.shopping.merchant.accounts.v1beta.ClaimHomepageRequest request,
io.grpc.stub.StreamObserver<com.google.shopping.merchant.accounts.v1beta.Homepage>
responseObserver) {
io.grpc.stub.ServerCalls.asyncUnimplementedUnaryCall(
getClaimHomepageMethod(), responseObserver);
}
/**
*
*
* <pre>
* Unclaims a store's homepage. Executing this method requires admin access.
* </pre>
*/
default void unclaimHomepage(
com.google.shopping.merchant.accounts.v1beta.UnclaimHomepageRequest request,
io.grpc.stub.StreamObserver<com.google.shopping.merchant.accounts.v1beta.Homepage>
responseObserver) {
io.grpc.stub.ServerCalls.asyncUnimplementedUnaryCall(
getUnclaimHomepageMethod(), responseObserver);
}
}
/**
* Base class for the server implementation of the service HomepageService.
*
* <pre>
* Service to support an API for a store's homepage.
* </pre>
*/
public abstract static class HomepageServiceImplBase
implements io.grpc.BindableService, AsyncService {
@java.lang.Override
public final io.grpc.ServerServiceDefinition bindService() {
return HomepageServiceGrpc.bindService(this);
}
}
/**
* A stub to allow clients to do asynchronous rpc calls to service HomepageService.
*
* <pre>
* Service to support an API for a store's homepage.
* </pre>
*/
public static final class HomepageServiceStub
extends io.grpc.stub.AbstractAsyncStub<HomepageServiceStub> {
private HomepageServiceStub(io.grpc.Channel channel, io.grpc.CallOptions callOptions) {
super(channel, callOptions);
}
@java.lang.Override
protected HomepageServiceStub build(io.grpc.Channel channel, io.grpc.CallOptions callOptions) {
return new HomepageServiceStub(channel, callOptions);
}
/**
*
*
* <pre>
* Retrieves a store's homepage.
* </pre>
*/
public void getHomepage(
com.google.shopping.merchant.accounts.v1beta.GetHomepageRequest request,
io.grpc.stub.StreamObserver<com.google.shopping.merchant.accounts.v1beta.Homepage>
responseObserver) {
io.grpc.stub.ClientCalls.asyncUnaryCall(
getChannel().newCall(getGetHomepageMethod(), getCallOptions()),
request,
responseObserver);
}
/**
*
*
* <pre>
* Updates a store's homepage. Executing this method requires admin access.
* </pre>
*/
public void updateHomepage(
com.google.shopping.merchant.accounts.v1beta.UpdateHomepageRequest request,
io.grpc.stub.StreamObserver<com.google.shopping.merchant.accounts.v1beta.Homepage>
responseObserver) {
io.grpc.stub.ClientCalls.asyncUnaryCall(
getChannel().newCall(getUpdateHomepageMethod(), getCallOptions()),
request,
responseObserver);
}
/**
*
*
* <pre>
* Claims a store's homepage. Executing this method requires admin access.
* If the homepage is already claimed, this will recheck the
* verification (unless the merchant is exempted from claiming, which also
* exempts from verification) and return a successful response. If ownership
* can no longer be verified, it will return an error, but it won't clear the
* claim. In case of failure, a canonical error message will be returned:
* * PERMISSION_DENIED: user doesn't have the necessary permissions on this
* MC account;
* * FAILED_PRECONDITION:
* - The account is not a Merchant Center account;
* - MC account doesn't have a homepage;
* - claiming failed (in this case the error message will contain more
* details).
* </pre>
*/
public void claimHomepage(
com.google.shopping.merchant.accounts.v1beta.ClaimHomepageRequest request,
io.grpc.stub.StreamObserver<com.google.shopping.merchant.accounts.v1beta.Homepage>
responseObserver) {
io.grpc.stub.ClientCalls.asyncUnaryCall(
getChannel().newCall(getClaimHomepageMethod(), getCallOptions()),
request,
responseObserver);
}
/**
*
*
* <pre>
* Unclaims a store's homepage. Executing this method requires admin access.
* </pre>
*/
public void unclaimHomepage(
com.google.shopping.merchant.accounts.v1beta.UnclaimHomepageRequest request,
io.grpc.stub.StreamObserver<com.google.shopping.merchant.accounts.v1beta.Homepage>
responseObserver) {
io.grpc.stub.ClientCalls.asyncUnaryCall(
getChannel().newCall(getUnclaimHomepageMethod(), getCallOptions()),
request,
responseObserver);
}
}
/**
* A stub to allow clients to do synchronous rpc calls to service HomepageService.
*
* <pre>
* Service to support an API for a store's homepage.
* </pre>
*/
public static final class HomepageServiceBlockingV2Stub
extends io.grpc.stub.AbstractBlockingStub<HomepageServiceBlockingV2Stub> {
private HomepageServiceBlockingV2Stub(
io.grpc.Channel channel, io.grpc.CallOptions callOptions) {
super(channel, callOptions);
}
@java.lang.Override
protected HomepageServiceBlockingV2Stub build(
io.grpc.Channel channel, io.grpc.CallOptions callOptions) {
return new HomepageServiceBlockingV2Stub(channel, callOptions);
}
/**
*
*
* <pre>
* Retrieves a store's homepage.
* </pre>
*/
public com.google.shopping.merchant.accounts.v1beta.Homepage getHomepage(
com.google.shopping.merchant.accounts.v1beta.GetHomepageRequest request) {
return io.grpc.stub.ClientCalls.blockingUnaryCall(
getChannel(), getGetHomepageMethod(), getCallOptions(), request);
}
/**
*
*
* <pre>
* Updates a store's homepage. Executing this method requires admin access.
* </pre>
*/
public com.google.shopping.merchant.accounts.v1beta.Homepage updateHomepage(
com.google.shopping.merchant.accounts.v1beta.UpdateHomepageRequest request) {
return io.grpc.stub.ClientCalls.blockingUnaryCall(
getChannel(), getUpdateHomepageMethod(), getCallOptions(), request);
}
/**
*
*
* <pre>
* Claims a store's homepage. Executing this method requires admin access.
* If the homepage is already claimed, this will recheck the
* verification (unless the merchant is exempted from claiming, which also
* exempts from verification) and return a successful response. If ownership
* can no longer be verified, it will return an error, but it won't clear the
* claim. In case of failure, a canonical error message will be returned:
* * PERMISSION_DENIED: user doesn't have the necessary permissions on this
* MC account;
* * FAILED_PRECONDITION:
* - The account is not a Merchant Center account;
* - MC account doesn't have a homepage;
* - claiming failed (in this case the error message will contain more
* details).
* </pre>
*/
public com.google.shopping.merchant.accounts.v1beta.Homepage claimHomepage(
com.google.shopping.merchant.accounts.v1beta.ClaimHomepageRequest request) {
return io.grpc.stub.ClientCalls.blockingUnaryCall(
getChannel(), getClaimHomepageMethod(), getCallOptions(), request);
}
/**
*
*
* <pre>
* Unclaims a store's homepage. Executing this method requires admin access.
* </pre>
*/
public com.google.shopping.merchant.accounts.v1beta.Homepage unclaimHomepage(
com.google.shopping.merchant.accounts.v1beta.UnclaimHomepageRequest request) {
return io.grpc.stub.ClientCalls.blockingUnaryCall(
getChannel(), getUnclaimHomepageMethod(), getCallOptions(), request);
}
}
/**
* A stub to allow clients to do limited synchronous rpc calls to service HomepageService.
*
* <pre>
* Service to support an API for a store's homepage.
* </pre>
*/
public static final class HomepageServiceBlockingStub
extends io.grpc.stub.AbstractBlockingStub<HomepageServiceBlockingStub> {
private HomepageServiceBlockingStub(io.grpc.Channel channel, io.grpc.CallOptions callOptions) {
super(channel, callOptions);
}
@java.lang.Override
protected HomepageServiceBlockingStub build(
io.grpc.Channel channel, io.grpc.CallOptions callOptions) {
return new HomepageServiceBlockingStub(channel, callOptions);
}
/**
*
*
* <pre>
* Retrieves a store's homepage.
* </pre>
*/
public com.google.shopping.merchant.accounts.v1beta.Homepage getHomepage(
com.google.shopping.merchant.accounts.v1beta.GetHomepageRequest request) {
return io.grpc.stub.ClientCalls.blockingUnaryCall(
getChannel(), getGetHomepageMethod(), getCallOptions(), request);
}
/**
*
*
* <pre>
* Updates a store's homepage. Executing this method requires admin access.
* </pre>
*/
public com.google.shopping.merchant.accounts.v1beta.Homepage updateHomepage(
com.google.shopping.merchant.accounts.v1beta.UpdateHomepageRequest request) {
return io.grpc.stub.ClientCalls.blockingUnaryCall(
getChannel(), getUpdateHomepageMethod(), getCallOptions(), request);
}
/**
*
*
* <pre>
* Claims a store's homepage. Executing this method requires admin access.
* If the homepage is already claimed, this will recheck the
* verification (unless the merchant is exempted from claiming, which also
* exempts from verification) and return a successful response. If ownership
* can no longer be verified, it will return an error, but it won't clear the
* claim. In case of failure, a canonical error message will be returned:
* * PERMISSION_DENIED: user doesn't have the necessary permissions on this
* MC account;
* * FAILED_PRECONDITION:
* - The account is not a Merchant Center account;
* - MC account doesn't have a homepage;
* - claiming failed (in this case the error message will contain more
* details).
* </pre>
*/
public com.google.shopping.merchant.accounts.v1beta.Homepage claimHomepage(
com.google.shopping.merchant.accounts.v1beta.ClaimHomepageRequest request) {
return io.grpc.stub.ClientCalls.blockingUnaryCall(
getChannel(), getClaimHomepageMethod(), getCallOptions(), request);
}
/**
*
*
* <pre>
* Unclaims a store's homepage. Executing this method requires admin access.
* </pre>
*/
public com.google.shopping.merchant.accounts.v1beta.Homepage unclaimHomepage(
com.google.shopping.merchant.accounts.v1beta.UnclaimHomepageRequest request) {
return io.grpc.stub.ClientCalls.blockingUnaryCall(
getChannel(), getUnclaimHomepageMethod(), getCallOptions(), request);
}
}
/**
* A stub to allow clients to do ListenableFuture-style rpc calls to service HomepageService.
*
* <pre>
* Service to support an API for a store's homepage.
* </pre>
*/
public static final class HomepageServiceFutureStub
extends io.grpc.stub.AbstractFutureStub<HomepageServiceFutureStub> {
private HomepageServiceFutureStub(io.grpc.Channel channel, io.grpc.CallOptions callOptions) {
super(channel, callOptions);
}
@java.lang.Override
protected HomepageServiceFutureStub build(
io.grpc.Channel channel, io.grpc.CallOptions callOptions) {
return new HomepageServiceFutureStub(channel, callOptions);
}
/**
*
*
* <pre>
* Retrieves a store's homepage.
* </pre>
*/
public com.google.common.util.concurrent.ListenableFuture<
com.google.shopping.merchant.accounts.v1beta.Homepage>
getHomepage(com.google.shopping.merchant.accounts.v1beta.GetHomepageRequest request) {
return io.grpc.stub.ClientCalls.futureUnaryCall(
getChannel().newCall(getGetHomepageMethod(), getCallOptions()), request);
}
/**
*
*
* <pre>
* Updates a store's homepage. Executing this method requires admin access.
* </pre>
*/
public com.google.common.util.concurrent.ListenableFuture<
com.google.shopping.merchant.accounts.v1beta.Homepage>
updateHomepage(com.google.shopping.merchant.accounts.v1beta.UpdateHomepageRequest request) {
return io.grpc.stub.ClientCalls.futureUnaryCall(
getChannel().newCall(getUpdateHomepageMethod(), getCallOptions()), request);
}
/**
*
*
* <pre>
* Claims a store's homepage. Executing this method requires admin access.
* If the homepage is already claimed, this will recheck the
* verification (unless the merchant is exempted from claiming, which also
* exempts from verification) and return a successful response. If ownership
* can no longer be verified, it will return an error, but it won't clear the
* claim. In case of failure, a canonical error message will be returned:
* * PERMISSION_DENIED: user doesn't have the necessary permissions on this
* MC account;
* * FAILED_PRECONDITION:
* - The account is not a Merchant Center account;
* - MC account doesn't have a homepage;
* - claiming failed (in this case the error message will contain more
* details).
* </pre>
*/
public com.google.common.util.concurrent.ListenableFuture<
com.google.shopping.merchant.accounts.v1beta.Homepage>
claimHomepage(com.google.shopping.merchant.accounts.v1beta.ClaimHomepageRequest request) {
return io.grpc.stub.ClientCalls.futureUnaryCall(
getChannel().newCall(getClaimHomepageMethod(), getCallOptions()), request);
}
/**
*
*
* <pre>
* Unclaims a store's homepage. Executing this method requires admin access.
* </pre>
*/
public com.google.common.util.concurrent.ListenableFuture<
com.google.shopping.merchant.accounts.v1beta.Homepage>
unclaimHomepage(
com.google.shopping.merchant.accounts.v1beta.UnclaimHomepageRequest request) {
return io.grpc.stub.ClientCalls.futureUnaryCall(
getChannel().newCall(getUnclaimHomepageMethod(), getCallOptions()), request);
}
}
private static final int METHODID_GET_HOMEPAGE = 0;
private static final int METHODID_UPDATE_HOMEPAGE = 1;
private static final int METHODID_CLAIM_HOMEPAGE = 2;
private static final int METHODID_UNCLAIM_HOMEPAGE = 3;
private static final class MethodHandlers<Req, Resp>
implements io.grpc.stub.ServerCalls.UnaryMethod<Req, Resp>,
io.grpc.stub.ServerCalls.ServerStreamingMethod<Req, Resp>,
io.grpc.stub.ServerCalls.ClientStreamingMethod<Req, Resp>,
io.grpc.stub.ServerCalls.BidiStreamingMethod<Req, Resp> {
private final AsyncService serviceImpl;
private final int methodId;
MethodHandlers(AsyncService serviceImpl, int methodId) {
this.serviceImpl = serviceImpl;
this.methodId = methodId;
}
@java.lang.Override
@java.lang.SuppressWarnings("unchecked")
public void invoke(Req request, io.grpc.stub.StreamObserver<Resp> responseObserver) {
switch (methodId) {
case METHODID_GET_HOMEPAGE:
serviceImpl.getHomepage(
(com.google.shopping.merchant.accounts.v1beta.GetHomepageRequest) request,
(io.grpc.stub.StreamObserver<com.google.shopping.merchant.accounts.v1beta.Homepage>)
responseObserver);
break;
case METHODID_UPDATE_HOMEPAGE:
serviceImpl.updateHomepage(
(com.google.shopping.merchant.accounts.v1beta.UpdateHomepageRequest) request,
(io.grpc.stub.StreamObserver<com.google.shopping.merchant.accounts.v1beta.Homepage>)
responseObserver);
break;
case METHODID_CLAIM_HOMEPAGE:
serviceImpl.claimHomepage(
(com.google.shopping.merchant.accounts.v1beta.ClaimHomepageRequest) request,
(io.grpc.stub.StreamObserver<com.google.shopping.merchant.accounts.v1beta.Homepage>)
responseObserver);
break;
case METHODID_UNCLAIM_HOMEPAGE:
serviceImpl.unclaimHomepage(
(com.google.shopping.merchant.accounts.v1beta.UnclaimHomepageRequest) request,
(io.grpc.stub.StreamObserver<com.google.shopping.merchant.accounts.v1beta.Homepage>)
responseObserver);
break;
default:
throw new AssertionError();
}
}
@java.lang.Override
@java.lang.SuppressWarnings("unchecked")
public io.grpc.stub.StreamObserver<Req> invoke(
io.grpc.stub.StreamObserver<Resp> responseObserver) {
switch (methodId) {
default:
throw new AssertionError();
}
}
}
public static final io.grpc.ServerServiceDefinition bindService(AsyncService service) {
return io.grpc.ServerServiceDefinition.builder(getServiceDescriptor())
.addMethod(
getGetHomepageMethod(),
io.grpc.stub.ServerCalls.asyncUnaryCall(
new MethodHandlers<
com.google.shopping.merchant.accounts.v1beta.GetHomepageRequest,
com.google.shopping.merchant.accounts.v1beta.Homepage>(
service, METHODID_GET_HOMEPAGE)))
.addMethod(
getUpdateHomepageMethod(),
io.grpc.stub.ServerCalls.asyncUnaryCall(
new MethodHandlers<
com.google.shopping.merchant.accounts.v1beta.UpdateHomepageRequest,
com.google.shopping.merchant.accounts.v1beta.Homepage>(
service, METHODID_UPDATE_HOMEPAGE)))
.addMethod(
getClaimHomepageMethod(),
io.grpc.stub.ServerCalls.asyncUnaryCall(
new MethodHandlers<
com.google.shopping.merchant.accounts.v1beta.ClaimHomepageRequest,
com.google.shopping.merchant.accounts.v1beta.Homepage>(
service, METHODID_CLAIM_HOMEPAGE)))
.addMethod(
getUnclaimHomepageMethod(),
io.grpc.stub.ServerCalls.asyncUnaryCall(
new MethodHandlers<
com.google.shopping.merchant.accounts.v1beta.UnclaimHomepageRequest,
com.google.shopping.merchant.accounts.v1beta.Homepage>(
service, METHODID_UNCLAIM_HOMEPAGE)))
.build();
}
private abstract static class HomepageServiceBaseDescriptorSupplier
implements io.grpc.protobuf.ProtoFileDescriptorSupplier,
io.grpc.protobuf.ProtoServiceDescriptorSupplier {
HomepageServiceBaseDescriptorSupplier() {}
@java.lang.Override
public com.google.protobuf.Descriptors.FileDescriptor getFileDescriptor() {
return com.google.shopping.merchant.accounts.v1beta.HomepageProto.getDescriptor();
}
@java.lang.Override
public com.google.protobuf.Descriptors.ServiceDescriptor getServiceDescriptor() {
return getFileDescriptor().findServiceByName("HomepageService");
}
}
private static final class HomepageServiceFileDescriptorSupplier
extends HomepageServiceBaseDescriptorSupplier {
HomepageServiceFileDescriptorSupplier() {}
}
private static final class HomepageServiceMethodDescriptorSupplier
extends HomepageServiceBaseDescriptorSupplier
implements io.grpc.protobuf.ProtoMethodDescriptorSupplier {
private final java.lang.String methodName;
HomepageServiceMethodDescriptorSupplier(java.lang.String methodName) {
this.methodName = methodName;
}
@java.lang.Override
public com.google.protobuf.Descriptors.MethodDescriptor getMethodDescriptor() {
return getServiceDescriptor().findMethodByName(methodName);
}
}
private static volatile io.grpc.ServiceDescriptor serviceDescriptor;
public static io.grpc.ServiceDescriptor getServiceDescriptor() {
io.grpc.ServiceDescriptor result = serviceDescriptor;
if (result == null) {
synchronized (HomepageServiceGrpc.class) {
result = serviceDescriptor;
if (result == null) {
serviceDescriptor =
result =
io.grpc.ServiceDescriptor.newBuilder(SERVICE_NAME)
.setSchemaDescriptor(new HomepageServiceFileDescriptorSupplier())
.addMethod(getGetHomepageMethod())
.addMethod(getUpdateHomepageMethod())
.addMethod(getClaimHomepageMethod())
.addMethod(getUnclaimHomepageMethod())
.build();
}
}
}
return result;
}
}
|
google/ExoPlayer | 37,235 | library/core/src/test/java/com/google/android/exoplayer2/drm/DefaultDrmSessionManagerTest.java | /*
* Copyright (C) 2020 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.android.exoplayer2.drm;
import static com.google.android.exoplayer2.util.Assertions.checkNotNull;
import static com.google.common.truth.Truth.assertThat;
import static java.util.concurrent.TimeUnit.SECONDS;
import static org.junit.Assert.assertThrows;
import android.os.Looper;
import androidx.annotation.Nullable;
import androidx.test.ext.junit.runners.AndroidJUnit4;
import com.google.android.exoplayer2.C;
import com.google.android.exoplayer2.Format;
import com.google.android.exoplayer2.analytics.PlayerId;
import com.google.android.exoplayer2.drm.DrmSessionManager.DrmSessionReference;
import com.google.android.exoplayer2.drm.ExoMediaDrm.AppManagedProvider;
import com.google.android.exoplayer2.source.MediaSource;
import com.google.android.exoplayer2.testutil.FakeExoMediaDrm;
import com.google.android.exoplayer2.testutil.TestUtil;
import com.google.android.exoplayer2.util.MimeTypes;
import com.google.android.exoplayer2.util.Util;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableSet;
import java.util.UUID;
import java.util.concurrent.atomic.AtomicInteger;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.robolectric.shadows.ShadowLooper;
/** Tests for {@link DefaultDrmSessionManager} and {@link DefaultDrmSession}. */
// TODO: Test more branches:
// - Different sources for licenseServerUrl.
// - Multiple acquisitions & releases for same keys -> multiple requests.
// - Key denial.
@RunWith(AndroidJUnit4.class)
public class DefaultDrmSessionManagerTest {
private static final UUID DRM_SCHEME_UUID =
UUID.nameUUIDFromBytes(TestUtil.createByteArray(7, 8, 9));
private static final ImmutableList<DrmInitData.SchemeData> DRM_SCHEME_DATAS =
ImmutableList.of(
new DrmInitData.SchemeData(
DRM_SCHEME_UUID, MimeTypes.VIDEO_MP4, /* data= */ TestUtil.createByteArray(1, 2, 3)));
private static final Format FORMAT_WITH_DRM_INIT_DATA =
new Format.Builder().setDrmInitData(new DrmInitData(DRM_SCHEME_DATAS)).build();
@Test(timeout = 10_000)
public void acquireSession_triggersKeyLoadAndSessionIsOpened() throws Exception {
FakeExoMediaDrm.LicenseServer licenseServer =
FakeExoMediaDrm.LicenseServer.allowingSchemeDatas(DRM_SCHEME_DATAS);
DefaultDrmSessionManager drmSessionManager =
new DefaultDrmSessionManager.Builder()
.setUuidAndExoMediaDrmProvider(DRM_SCHEME_UUID, uuid -> new FakeExoMediaDrm())
.build(/* mediaDrmCallback= */ licenseServer);
drmSessionManager.setPlayer(/* playbackLooper= */ Looper.myLooper(), PlayerId.UNSET);
drmSessionManager.prepare();
DrmSession drmSession =
checkNotNull(
drmSessionManager.acquireSession(
/* eventDispatcher= */ null, FORMAT_WITH_DRM_INIT_DATA));
waitForOpenedWithKeys(drmSession);
assertThat(drmSession.getState()).isEqualTo(DrmSession.STATE_OPENED_WITH_KEYS);
assertThat(drmSession.queryKeyStatus())
.containsExactly(FakeExoMediaDrm.KEY_STATUS_KEY, FakeExoMediaDrm.KEY_STATUS_AVAILABLE);
}
@Test(timeout = 10_000)
public void keepaliveEnabled_sessionsKeptForRequestedTime() throws Exception {
FakeExoMediaDrm.LicenseServer licenseServer =
FakeExoMediaDrm.LicenseServer.allowingSchemeDatas(DRM_SCHEME_DATAS);
DrmSessionManager drmSessionManager =
new DefaultDrmSessionManager.Builder()
.setUuidAndExoMediaDrmProvider(DRM_SCHEME_UUID, uuid -> new FakeExoMediaDrm())
.setSessionKeepaliveMs(10_000)
.build(/* mediaDrmCallback= */ licenseServer);
drmSessionManager.setPlayer(/* playbackLooper= */ Looper.myLooper(), PlayerId.UNSET);
drmSessionManager.prepare();
DrmSession drmSession =
checkNotNull(
drmSessionManager.acquireSession(
/* eventDispatcher= */ null, FORMAT_WITH_DRM_INIT_DATA));
waitForOpenedWithKeys(drmSession);
assertThat(drmSession.getState()).isEqualTo(DrmSession.STATE_OPENED_WITH_KEYS);
drmSession.release(/* eventDispatcher= */ null);
assertThat(drmSession.getState()).isEqualTo(DrmSession.STATE_OPENED_WITH_KEYS);
ShadowLooper.idleMainLooper(10, SECONDS);
assertThat(drmSession.getState()).isEqualTo(DrmSession.STATE_RELEASED);
}
@Test(timeout = 10_000)
public void keepaliveDisabled_sessionsReleasedImmediately() throws Exception {
FakeExoMediaDrm.LicenseServer licenseServer =
FakeExoMediaDrm.LicenseServer.allowingSchemeDatas(DRM_SCHEME_DATAS);
DrmSessionManager drmSessionManager =
new DefaultDrmSessionManager.Builder()
.setUuidAndExoMediaDrmProvider(DRM_SCHEME_UUID, uuid -> new FakeExoMediaDrm())
.setSessionKeepaliveMs(C.TIME_UNSET)
.build(/* mediaDrmCallback= */ licenseServer);
drmSessionManager.setPlayer(/* playbackLooper= */ Looper.myLooper(), PlayerId.UNSET);
drmSessionManager.prepare();
DrmSession drmSession =
checkNotNull(
drmSessionManager.acquireSession(
/* eventDispatcher= */ null, FORMAT_WITH_DRM_INIT_DATA));
waitForOpenedWithKeys(drmSession);
drmSession.release(/* eventDispatcher= */ null);
assertThat(drmSession.getState()).isEqualTo(DrmSession.STATE_RELEASED);
}
@Test(timeout = 10_000)
public void managerRelease_allKeepaliveSessionsImmediatelyReleased() throws Exception {
FakeExoMediaDrm.LicenseServer licenseServer =
FakeExoMediaDrm.LicenseServer.allowingSchemeDatas(DRM_SCHEME_DATAS);
DrmSessionManager drmSessionManager =
new DefaultDrmSessionManager.Builder()
.setUuidAndExoMediaDrmProvider(DRM_SCHEME_UUID, uuid -> new FakeExoMediaDrm())
.setSessionKeepaliveMs(10_000)
.build(/* mediaDrmCallback= */ licenseServer);
drmSessionManager.setPlayer(/* playbackLooper= */ Looper.myLooper(), PlayerId.UNSET);
drmSessionManager.prepare();
DrmSession drmSession =
checkNotNull(
drmSessionManager.acquireSession(
/* eventDispatcher= */ null, FORMAT_WITH_DRM_INIT_DATA));
waitForOpenedWithKeys(drmSession);
drmSession.release(/* eventDispatcher= */ null);
assertThat(drmSession.getState()).isEqualTo(DrmSession.STATE_OPENED_WITH_KEYS);
drmSessionManager.release();
assertThat(drmSession.getState()).isEqualTo(DrmSession.STATE_RELEASED);
}
@Test(timeout = 10_000)
public void managerRelease_keepaliveDisabled_doesntReleaseAnySessions() throws Exception {
FakeExoMediaDrm.LicenseServer licenseServer =
FakeExoMediaDrm.LicenseServer.allowingSchemeDatas(DRM_SCHEME_DATAS);
DrmSessionManager drmSessionManager =
new DefaultDrmSessionManager.Builder()
.setUuidAndExoMediaDrmProvider(DRM_SCHEME_UUID, uuid -> new FakeExoMediaDrm())
.setSessionKeepaliveMs(C.TIME_UNSET)
.build(/* mediaDrmCallback= */ licenseServer);
drmSessionManager.setPlayer(/* playbackLooper= */ Looper.myLooper(), PlayerId.UNSET);
drmSessionManager.prepare();
DrmSession drmSession =
checkNotNull(
drmSessionManager.acquireSession(
/* eventDispatcher= */ null, FORMAT_WITH_DRM_INIT_DATA));
waitForOpenedWithKeys(drmSession);
assertThat(drmSession.getState()).isEqualTo(DrmSession.STATE_OPENED_WITH_KEYS);
// Release the manager, the session should still be open (though it's unusable because
// the underlying ExoMediaDrm is released).
drmSessionManager.release();
assertThat(drmSession.getState()).isEqualTo(DrmSession.STATE_OPENED_WITH_KEYS);
}
@Test(timeout = 10_000)
public void managerRelease_mediaDrmNotReleasedUntilLastSessionReleased() throws Exception {
FakeExoMediaDrm.LicenseServer licenseServer =
FakeExoMediaDrm.LicenseServer.allowingSchemeDatas(DRM_SCHEME_DATAS);
FakeExoMediaDrm exoMediaDrm = new FakeExoMediaDrm();
DrmSessionManager drmSessionManager =
new DefaultDrmSessionManager.Builder()
.setUuidAndExoMediaDrmProvider(DRM_SCHEME_UUID, new AppManagedProvider(exoMediaDrm))
.setSessionKeepaliveMs(10_000)
.build(/* mediaDrmCallback= */ licenseServer);
drmSessionManager.setPlayer(/* playbackLooper= */ Looper.myLooper(), PlayerId.UNSET);
drmSessionManager.prepare();
DrmSession drmSession =
checkNotNull(
drmSessionManager.acquireSession(
/* eventDispatcher= */ null, FORMAT_WITH_DRM_INIT_DATA));
drmSessionManager.release();
// The manager is now in a 'releasing' state because the session is still active - so the
// ExoMediaDrm instance should still be active (with 1 reference held by this test, and 1 held
// by the manager).
assertThat(exoMediaDrm.getReferenceCount()).isEqualTo(2);
// And re-preparing the session shouldn't acquire another reference.
drmSessionManager.prepare();
assertThat(exoMediaDrm.getReferenceCount()).isEqualTo(2);
drmSessionManager.release();
drmSession.release(/* eventDispatcher= */ null);
// The final session has been released, so now the ExoMediaDrm should be released too.
assertThat(exoMediaDrm.getReferenceCount()).isEqualTo(1);
// Re-preparing the fully released manager should now acquire another ExoMediaDrm reference.
drmSessionManager.prepare();
assertThat(exoMediaDrm.getReferenceCount()).isEqualTo(2);
drmSessionManager.release();
exoMediaDrm.release();
}
// https://github.com/google/ExoPlayer/issues/9193
@Test(timeout = 10_000)
public void
managerReleasedBeforeSession_keepaliveEnabled_managerOnlyReleasesOneKeepaliveReference()
throws Exception {
FakeExoMediaDrm.LicenseServer licenseServer =
FakeExoMediaDrm.LicenseServer.allowingSchemeDatas(DRM_SCHEME_DATAS);
FakeExoMediaDrm exoMediaDrm = new FakeExoMediaDrm.Builder().build();
DrmSessionManager drmSessionManager =
new DefaultDrmSessionManager.Builder()
.setUuidAndExoMediaDrmProvider(DRM_SCHEME_UUID, new AppManagedProvider(exoMediaDrm))
.setSessionKeepaliveMs(10_000)
.build(/* mediaDrmCallback= */ licenseServer);
drmSessionManager.setPlayer(/* playbackLooper= */ Looper.myLooper(), PlayerId.UNSET);
drmSessionManager.prepare();
DrmSession drmSession =
checkNotNull(
drmSessionManager.acquireSession(
/* eventDispatcher= */ null, FORMAT_WITH_DRM_INIT_DATA));
waitForOpenedWithKeys(drmSession);
// Release the manager (there's still an explicit reference to the session from acquireSession).
// This should immediately release the manager's internal keepalive session reference.
drmSessionManager.release();
assertThat(drmSession.getState()).isEqualTo(DrmSession.STATE_OPENED_WITH_KEYS);
// Ensure the manager doesn't release a *second* keepalive session reference after the timer
// expires.
ShadowLooper.idleMainLooper(10, SECONDS);
assertThat(drmSession.getState()).isEqualTo(DrmSession.STATE_OPENED_WITH_KEYS);
// Release the explicit session reference.
drmSession.release(/* eventDispatcher= */ null);
assertThat(drmSession.getState()).isEqualTo(DrmSession.STATE_RELEASED);
}
@Test(timeout = 10_000)
public void maxConcurrentSessionsExceeded_allKeepAliveSessionsEagerlyReleased() throws Exception {
ImmutableList<DrmInitData.SchemeData> secondSchemeDatas =
ImmutableList.of(DRM_SCHEME_DATAS.get(0).copyWithData(TestUtil.createByteArray(4, 5, 6)));
FakeExoMediaDrm.LicenseServer licenseServer =
FakeExoMediaDrm.LicenseServer.allowingSchemeDatas(DRM_SCHEME_DATAS, secondSchemeDatas);
Format secondFormatWithDrmInitData =
new Format.Builder().setDrmInitData(new DrmInitData(secondSchemeDatas)).build();
DrmSessionManager drmSessionManager =
new DefaultDrmSessionManager.Builder()
.setUuidAndExoMediaDrmProvider(
DRM_SCHEME_UUID, uuid -> new FakeExoMediaDrm(/* maxConcurrentSessions= */ 1))
.setSessionKeepaliveMs(10_000)
.setMultiSession(true)
.build(/* mediaDrmCallback= */ licenseServer);
drmSessionManager.setPlayer(/* playbackLooper= */ Looper.myLooper(), PlayerId.UNSET);
drmSessionManager.prepare();
DrmSession firstDrmSession =
checkNotNull(
drmSessionManager.acquireSession(
/* eventDispatcher= */ null, FORMAT_WITH_DRM_INIT_DATA));
waitForOpenedWithKeys(firstDrmSession);
firstDrmSession.release(/* eventDispatcher= */ null);
// All external references to firstDrmSession have been released, it's being kept alive by
// drmSessionManager's internal reference.
assertThat(firstDrmSession.getState()).isEqualTo(DrmSession.STATE_OPENED_WITH_KEYS);
DrmSession secondDrmSession =
checkNotNull(
drmSessionManager.acquireSession(
/* eventDispatcher= */ null, secondFormatWithDrmInitData));
// The drmSessionManager had to release firstDrmSession in order to acquire secondDrmSession.
assertThat(firstDrmSession.getState()).isEqualTo(DrmSession.STATE_RELEASED);
waitForOpenedWithKeys(secondDrmSession);
assertThat(secondDrmSession.getState()).isEqualTo(DrmSession.STATE_OPENED_WITH_KEYS);
}
@Test(timeout = 10_000)
public void maxConcurrentSessionsExceeded_allPreacquiredAndKeepaliveSessionsEagerlyReleased()
throws Exception {
ImmutableList<DrmInitData.SchemeData> secondSchemeDatas =
ImmutableList.of(DRM_SCHEME_DATAS.get(0).copyWithData(TestUtil.createByteArray(4, 5, 6)));
FakeExoMediaDrm.LicenseServer licenseServer =
FakeExoMediaDrm.LicenseServer.allowingSchemeDatas(DRM_SCHEME_DATAS, secondSchemeDatas);
Format secondFormatWithDrmInitData =
new Format.Builder().setDrmInitData(new DrmInitData(secondSchemeDatas)).build();
DrmSessionManager drmSessionManager =
new DefaultDrmSessionManager.Builder()
.setUuidAndExoMediaDrmProvider(
DRM_SCHEME_UUID,
uuid -> new FakeExoMediaDrm.Builder().setMaxConcurrentSessions(1).build())
.setSessionKeepaliveMs(10_000)
.setMultiSession(true)
.build(/* mediaDrmCallback= */ licenseServer);
drmSessionManager.setPlayer(/* playbackLooper= */ Looper.myLooper(), PlayerId.UNSET);
drmSessionManager.prepare();
DrmSessionReference firstDrmSessionReference =
checkNotNull(
drmSessionManager.preacquireSession(
/* eventDispatcher= */ null, FORMAT_WITH_DRM_INIT_DATA));
DrmSession firstDrmSession =
checkNotNull(
drmSessionManager.acquireSession(
/* eventDispatcher= */ null, FORMAT_WITH_DRM_INIT_DATA));
waitForOpenedWithKeys(firstDrmSession);
firstDrmSession.release(/* eventDispatcher= */ null);
// The direct reference to firstDrmSession has been released, it's being kept alive by both
// firstDrmSessionReference and drmSessionManager's internal reference.
assertThat(firstDrmSession.getState()).isEqualTo(DrmSession.STATE_OPENED_WITH_KEYS);
DrmSession secondDrmSession =
checkNotNull(
drmSessionManager.acquireSession(
/* eventDispatcher= */ null, secondFormatWithDrmInitData));
// The drmSessionManager had to release both it's internal keep-alive reference and the
// reference represented by firstDrmSessionReference in order to acquire secondDrmSession.
assertThat(firstDrmSession.getState()).isEqualTo(DrmSession.STATE_RELEASED);
waitForOpenedWithKeys(secondDrmSession);
assertThat(secondDrmSession.getState()).isEqualTo(DrmSession.STATE_OPENED_WITH_KEYS);
// Not needed (because the manager has already released this reference) but we call it anyway
// for completeness.
firstDrmSessionReference.release();
// Clean-up
secondDrmSession.release(/* eventDispatcher= */ null);
drmSessionManager.release();
}
@Test(timeout = 10_000)
public void sessionReacquired_keepaliveTimeOutCancelled() throws Exception {
FakeExoMediaDrm.LicenseServer licenseServer =
FakeExoMediaDrm.LicenseServer.allowingSchemeDatas(DRM_SCHEME_DATAS);
DrmSessionManager drmSessionManager =
new DefaultDrmSessionManager.Builder()
.setUuidAndExoMediaDrmProvider(DRM_SCHEME_UUID, uuid -> new FakeExoMediaDrm())
.setSessionKeepaliveMs(10_000)
.build(/* mediaDrmCallback= */ licenseServer);
drmSessionManager.setPlayer(/* playbackLooper= */ Looper.myLooper(), PlayerId.UNSET);
drmSessionManager.prepare();
DrmSession firstDrmSession =
checkNotNull(
drmSessionManager.acquireSession(
/* eventDispatcher= */ null, FORMAT_WITH_DRM_INIT_DATA));
waitForOpenedWithKeys(firstDrmSession);
firstDrmSession.release(/* eventDispatcher= */ null);
ShadowLooper.idleMainLooper(5, SECONDS);
// Acquire a session for the same init data 5s in to the 10s timeout (so expect the same
// instance).
DrmSession secondDrmSession =
checkNotNull(
drmSessionManager.acquireSession(
/* eventDispatcher= */ null, FORMAT_WITH_DRM_INIT_DATA));
assertThat(secondDrmSession).isSameInstanceAs(firstDrmSession);
// Let the timeout definitely expire, and check the session didn't get released.
ShadowLooper.idleMainLooper(10, SECONDS);
assertThat(secondDrmSession.getState()).isEqualTo(DrmSession.STATE_OPENED_WITH_KEYS);
}
@Test(timeout = 10_000)
public void preacquireSession_loadsKeysBeforeFullAcquisition() throws Exception {
AtomicInteger keyLoadCount = new AtomicInteger(0);
DrmSessionEventListener.EventDispatcher eventDispatcher =
new DrmSessionEventListener.EventDispatcher();
eventDispatcher.addEventListener(
Util.createHandlerForCurrentLooper(),
new DrmSessionEventListener() {
@Override
public void onDrmKeysLoaded(
int windowIndex, @Nullable MediaSource.MediaPeriodId mediaPeriodId) {
keyLoadCount.incrementAndGet();
}
});
FakeExoMediaDrm.LicenseServer licenseServer =
FakeExoMediaDrm.LicenseServer.allowingSchemeDatas(DRM_SCHEME_DATAS);
DrmSessionManager drmSessionManager =
new DefaultDrmSessionManager.Builder()
.setUuidAndExoMediaDrmProvider(DRM_SCHEME_UUID, uuid -> new FakeExoMediaDrm())
// Disable keepalive
.setSessionKeepaliveMs(C.TIME_UNSET)
.build(/* mediaDrmCallback= */ licenseServer);
drmSessionManager.setPlayer(/* playbackLooper= */ Looper.myLooper(), PlayerId.UNSET);
drmSessionManager.prepare();
DrmSessionReference sessionReference =
drmSessionManager.preacquireSession(eventDispatcher, FORMAT_WITH_DRM_INIT_DATA);
// Wait for the key load event to propagate, indicating the pre-acquired session is in
// STATE_OPENED_WITH_KEYS.
while (keyLoadCount.get() == 0) {
// Allow the key response to be handled.
ShadowLooper.idleMainLooper();
}
DrmSession drmSession =
checkNotNull(
drmSessionManager.acquireSession(
/* eventDispatcher= */ null, FORMAT_WITH_DRM_INIT_DATA));
// Without idling the main/playback looper, we assert the session is already in OPENED_WITH_KEYS
assertThat(drmSession.getState()).isEqualTo(DrmSession.STATE_OPENED_WITH_KEYS);
assertThat(keyLoadCount.get()).isEqualTo(1);
// After releasing our concrete session reference, the session is held open by the pre-acquired
// reference.
drmSession.release(/* eventDispatcher= */ null);
assertThat(drmSession.getState()).isEqualTo(DrmSession.STATE_OPENED_WITH_KEYS);
// Releasing the pre-acquired reference allows the session to be fully released.
sessionReference.release();
assertThat(drmSession.getState()).isEqualTo(DrmSession.STATE_RELEASED);
}
@Test(timeout = 10_000)
public void
preacquireSession_releaseBeforeUnderlyingAcquisitionCompletesReleasesSessionOnceAcquired()
throws Exception {
FakeExoMediaDrm.LicenseServer licenseServer =
FakeExoMediaDrm.LicenseServer.allowingSchemeDatas(DRM_SCHEME_DATAS);
DrmSessionManager drmSessionManager =
new DefaultDrmSessionManager.Builder()
.setUuidAndExoMediaDrmProvider(DRM_SCHEME_UUID, uuid -> new FakeExoMediaDrm())
// Disable keepalive
.setSessionKeepaliveMs(C.TIME_UNSET)
.build(/* mediaDrmCallback= */ licenseServer);
drmSessionManager.setPlayer(/* playbackLooper= */ Looper.myLooper(), PlayerId.UNSET);
drmSessionManager.prepare();
DrmSessionReference sessionReference =
drmSessionManager.preacquireSession(/* eventDispatcher= */ null, FORMAT_WITH_DRM_INIT_DATA);
// Release the pre-acquired reference before the underlying session has had a chance to be
// constructed.
sessionReference.release();
// Acquiring the same session triggers a second key load (because the pre-acquired session was
// fully released).
DrmSession drmSession =
checkNotNull(
drmSessionManager.acquireSession(
/* eventDispatcher= */ null, FORMAT_WITH_DRM_INIT_DATA));
assertThat(drmSession.getState()).isEqualTo(DrmSession.STATE_OPENED);
waitForOpenedWithKeys(drmSession);
drmSession.release(/* eventDispatcher= */ null);
assertThat(drmSession.getState()).isEqualTo(DrmSession.STATE_RELEASED);
}
@Test(timeout = 10_000)
public void preacquireSession_releaseManagerBeforeAcquisition_acquisitionDoesntHappen()
throws Exception {
FakeExoMediaDrm.LicenseServer licenseServer =
FakeExoMediaDrm.LicenseServer.allowingSchemeDatas(DRM_SCHEME_DATAS);
DrmSessionManager drmSessionManager =
new DefaultDrmSessionManager.Builder()
.setUuidAndExoMediaDrmProvider(DRM_SCHEME_UUID, uuid -> new FakeExoMediaDrm())
// Disable keepalive
.setSessionKeepaliveMs(C.TIME_UNSET)
.build(/* mediaDrmCallback= */ licenseServer);
drmSessionManager.setPlayer(/* playbackLooper= */ Looper.myLooper(), PlayerId.UNSET);
drmSessionManager.prepare();
DrmSessionReference sessionReference =
drmSessionManager.preacquireSession(/* eventDispatcher= */ null, FORMAT_WITH_DRM_INIT_DATA);
// Release the manager before the underlying session has had a chance to be constructed. This
// will release all pre-acquired sessions.
drmSessionManager.release();
// Allow the acquisition event to be handled on the main/playback thread.
ShadowLooper.idleMainLooper();
// Re-prepare the manager so we can fully acquire the same session, and check the previous
// pre-acquisition didn't do anything.
drmSessionManager.prepare();
DrmSession drmSession =
checkNotNull(
drmSessionManager.acquireSession(
/* eventDispatcher= */ null, FORMAT_WITH_DRM_INIT_DATA));
assertThat(drmSession.getState()).isEqualTo(DrmSession.STATE_OPENED);
waitForOpenedWithKeys(drmSession);
drmSession.release(/* eventDispatcher= */ null);
// If the (still unreleased) pre-acquired session above was linked to the same underlying
// session then the state would still be OPENED_WITH_KEYS.
assertThat(drmSession.getState()).isEqualTo(DrmSession.STATE_RELEASED);
// Release the pre-acquired session from above (this is a no-op, but we do it anyway for
// correctness).
sessionReference.release();
drmSessionManager.release();
}
@Test(timeout = 10_000)
public void keyRefreshEvent_triggersKeyRefresh() throws Exception {
FakeExoMediaDrm exoMediaDrm = new FakeExoMediaDrm();
FakeExoMediaDrm.LicenseServer licenseServer =
FakeExoMediaDrm.LicenseServer.allowingSchemeDatas(DRM_SCHEME_DATAS);
DrmSessionManager drmSessionManager =
new DefaultDrmSessionManager.Builder()
.setUuidAndExoMediaDrmProvider(DRM_SCHEME_UUID, new AppManagedProvider(exoMediaDrm))
.build(/* mediaDrmCallback= */ licenseServer);
drmSessionManager.setPlayer(/* playbackLooper= */ Looper.myLooper(), PlayerId.UNSET);
drmSessionManager.prepare();
DefaultDrmSession drmSession =
(DefaultDrmSession)
checkNotNull(
drmSessionManager.acquireSession(
/* eventDispatcher= */ null, FORMAT_WITH_DRM_INIT_DATA));
waitForOpenedWithKeys(drmSession);
assertThat(licenseServer.getReceivedSchemeDatas()).hasSize(1);
exoMediaDrm.triggerEvent(
drmSession::hasSessionId,
ExoMediaDrm.EVENT_KEY_REQUIRED,
/* extra= */ 0,
/* data= */ Util.EMPTY_BYTE_ARRAY);
while (licenseServer.getReceivedSchemeDatas().size() == 1) {
// Allow the key refresh event to be handled.
ShadowLooper.idleMainLooper();
}
assertThat(licenseServer.getReceivedSchemeDatas()).hasSize(2);
assertThat(ImmutableSet.copyOf(licenseServer.getReceivedSchemeDatas())).hasSize(1);
drmSession.release(/* eventDispatcher= */ null);
drmSessionManager.release();
exoMediaDrm.release();
}
@Test(timeout = 10_000)
public void keyRefreshEvent_whileManagerIsReleasing_triggersKeyRefresh() throws Exception {
FakeExoMediaDrm exoMediaDrm = new FakeExoMediaDrm();
FakeExoMediaDrm.LicenseServer licenseServer =
FakeExoMediaDrm.LicenseServer.allowingSchemeDatas(DRM_SCHEME_DATAS);
DrmSessionManager drmSessionManager =
new DefaultDrmSessionManager.Builder()
.setUuidAndExoMediaDrmProvider(DRM_SCHEME_UUID, new AppManagedProvider(exoMediaDrm))
.build(/* mediaDrmCallback= */ licenseServer);
drmSessionManager.setPlayer(/* playbackLooper= */ Looper.myLooper(), PlayerId.UNSET);
drmSessionManager.prepare();
DefaultDrmSession drmSession =
(DefaultDrmSession)
checkNotNull(
drmSessionManager.acquireSession(
/* eventDispatcher= */ null, FORMAT_WITH_DRM_INIT_DATA));
waitForOpenedWithKeys(drmSession);
assertThat(licenseServer.getReceivedSchemeDatas()).hasSize(1);
drmSessionManager.release();
exoMediaDrm.triggerEvent(
drmSession::hasSessionId,
ExoMediaDrm.EVENT_KEY_REQUIRED,
/* extra= */ 0,
/* data= */ Util.EMPTY_BYTE_ARRAY);
while (licenseServer.getReceivedSchemeDatas().size() == 1) {
// Allow the key refresh event to be handled.
ShadowLooper.idleMainLooper();
}
assertThat(licenseServer.getReceivedSchemeDatas()).hasSize(2);
assertThat(ImmutableSet.copyOf(licenseServer.getReceivedSchemeDatas())).hasSize(1);
drmSession.release(/* eventDispatcher= */ null);
exoMediaDrm.release();
}
@Test
public void
deviceNotProvisioned_exceptionThrownFromOpenSession_provisioningDoneAndOpenSessionRetried() {
FakeExoMediaDrm.LicenseServer licenseServer =
FakeExoMediaDrm.LicenseServer.allowingSchemeDatas(DRM_SCHEME_DATAS);
DefaultDrmSessionManager drmSessionManager =
new DefaultDrmSessionManager.Builder()
.setUuidAndExoMediaDrmProvider(
DRM_SCHEME_UUID,
uuid -> new FakeExoMediaDrm.Builder().setProvisionsRequired(1).build())
.build(/* mediaDrmCallback= */ licenseServer);
drmSessionManager.setPlayer(/* playbackLooper= */ Looper.myLooper(), PlayerId.UNSET);
drmSessionManager.prepare();
DrmSession drmSession =
checkNotNull(
drmSessionManager.acquireSession(
/* eventDispatcher= */ null, FORMAT_WITH_DRM_INIT_DATA));
// Confirm that opening the session threw NotProvisionedException (otherwise state would be
// OPENED)
assertThat(drmSession.getState()).isEqualTo(DrmSession.STATE_OPENING);
waitForOpenedWithKeys(drmSession);
assertThat(drmSession.getState()).isEqualTo(DrmSession.STATE_OPENED_WITH_KEYS);
assertThat(drmSession.queryKeyStatus())
.containsExactly(FakeExoMediaDrm.KEY_STATUS_KEY, FakeExoMediaDrm.KEY_STATUS_AVAILABLE);
assertThat(licenseServer.getReceivedProvisionRequests()).hasSize(1);
}
@Test
public void
deviceNotProvisioned_exceptionThrownFromGetKeyRequest_provisioningDoneAndOpenSessionRetried() {
FakeExoMediaDrm.LicenseServer licenseServer =
FakeExoMediaDrm.LicenseServer.allowingSchemeDatas(DRM_SCHEME_DATAS);
DefaultDrmSessionManager drmSessionManager =
new DefaultDrmSessionManager.Builder()
.setUuidAndExoMediaDrmProvider(
DRM_SCHEME_UUID,
uuid ->
new FakeExoMediaDrm.Builder()
.setProvisionsRequired(1)
.throwNotProvisionedExceptionFromGetKeyRequest()
.build())
.build(/* mediaDrmCallback= */ licenseServer);
drmSessionManager.setPlayer(/* playbackLooper= */ Looper.myLooper(), PlayerId.UNSET);
drmSessionManager.prepare();
DrmSession drmSession =
checkNotNull(
drmSessionManager.acquireSession(
/* eventDispatcher= */ null, FORMAT_WITH_DRM_INIT_DATA));
assertThat(drmSession.getState()).isEqualTo(DrmSession.STATE_OPENED);
waitForOpenedWithKeys(drmSession);
assertThat(drmSession.getState()).isEqualTo(DrmSession.STATE_OPENED_WITH_KEYS);
assertThat(drmSession.queryKeyStatus())
.containsExactly(FakeExoMediaDrm.KEY_STATUS_KEY, FakeExoMediaDrm.KEY_STATUS_AVAILABLE);
assertThat(licenseServer.getReceivedProvisionRequests()).hasSize(1);
}
@Test
public void deviceNotProvisioned_doubleProvisioningHandledAndOpenSessionRetried() {
FakeExoMediaDrm.LicenseServer licenseServer =
FakeExoMediaDrm.LicenseServer.allowingSchemeDatas(DRM_SCHEME_DATAS);
DefaultDrmSessionManager drmSessionManager =
new DefaultDrmSessionManager.Builder()
.setUuidAndExoMediaDrmProvider(
DRM_SCHEME_UUID,
uuid -> new FakeExoMediaDrm.Builder().setProvisionsRequired(2).build())
.build(/* mediaDrmCallback= */ licenseServer);
drmSessionManager.setPlayer(/* playbackLooper= */ Looper.myLooper(), PlayerId.UNSET);
drmSessionManager.prepare();
DrmSession drmSession =
checkNotNull(
drmSessionManager.acquireSession(
/* eventDispatcher= */ null, FORMAT_WITH_DRM_INIT_DATA));
// Confirm that opening the session threw NotProvisionedException (otherwise state would be
// OPENED)
assertThat(drmSession.getState()).isEqualTo(DrmSession.STATE_OPENING);
waitForOpenedWithKeys(drmSession);
assertThat(drmSession.getState()).isEqualTo(DrmSession.STATE_OPENED_WITH_KEYS);
assertThat(drmSession.queryKeyStatus())
.containsExactly(FakeExoMediaDrm.KEY_STATUS_KEY, FakeExoMediaDrm.KEY_STATUS_AVAILABLE);
assertThat(licenseServer.getReceivedProvisionRequests()).hasSize(2);
}
@Test
public void keyResponseIndicatesProvisioningRequired_provisioningDone() {
FakeExoMediaDrm.LicenseServer licenseServer =
FakeExoMediaDrm.LicenseServer.requiringProvisioningThenAllowingSchemeDatas(
DRM_SCHEME_DATAS);
DefaultDrmSessionManager drmSessionManager =
new DefaultDrmSessionManager.Builder()
.setUuidAndExoMediaDrmProvider(
DRM_SCHEME_UUID, uuid -> new FakeExoMediaDrm.Builder().build())
.build(/* mediaDrmCallback= */ licenseServer);
drmSessionManager.setPlayer(/* playbackLooper= */ Looper.myLooper(), PlayerId.UNSET);
drmSessionManager.prepare();
DrmSession drmSession =
checkNotNull(
drmSessionManager.acquireSession(
/* eventDispatcher= */ null, FORMAT_WITH_DRM_INIT_DATA));
assertThat(drmSession.getState()).isEqualTo(DrmSession.STATE_OPENED);
waitForOpenedWithKeys(drmSession);
assertThat(drmSession.getState()).isEqualTo(DrmSession.STATE_OPENED_WITH_KEYS);
assertThat(drmSession.queryKeyStatus())
.containsExactly(FakeExoMediaDrm.KEY_STATUS_KEY, FakeExoMediaDrm.KEY_STATUS_AVAILABLE);
assertThat(licenseServer.getReceivedProvisionRequests()).hasSize(1);
}
@Test
public void provisioningUndoneWhileManagerIsActive_deviceReprovisioned() {
FakeExoMediaDrm.LicenseServer licenseServer =
FakeExoMediaDrm.LicenseServer.allowingSchemeDatas(DRM_SCHEME_DATAS);
FakeExoMediaDrm mediaDrm = new FakeExoMediaDrm.Builder().setProvisionsRequired(2).build();
DefaultDrmSessionManager drmSessionManager =
new DefaultDrmSessionManager.Builder()
.setUuidAndExoMediaDrmProvider(DRM_SCHEME_UUID, new AppManagedProvider(mediaDrm))
.setSessionKeepaliveMs(C.TIME_UNSET)
.build(/* mediaDrmCallback= */ licenseServer);
drmSessionManager.setPlayer(/* playbackLooper= */ Looper.myLooper(), PlayerId.UNSET);
drmSessionManager.prepare();
DrmSession drmSession =
checkNotNull(
drmSessionManager.acquireSession(
/* eventDispatcher= */ null, FORMAT_WITH_DRM_INIT_DATA));
// Confirm that opening the session threw NotProvisionedException (otherwise state would be
// OPENED)
assertThat(drmSession.getState()).isEqualTo(DrmSession.STATE_OPENING);
waitForOpenedWithKeys(drmSession);
drmSession.release(/* eventDispatcher= */ null);
mediaDrm.resetProvisioning();
drmSession =
checkNotNull(
drmSessionManager.acquireSession(
/* eventDispatcher= */ null, FORMAT_WITH_DRM_INIT_DATA));
// Confirm that opening the session threw NotProvisionedException (otherwise state would be
// OPENED)
assertThat(drmSession.getState()).isEqualTo(DrmSession.STATE_OPENING);
waitForOpenedWithKeys(drmSession);
assertThat(licenseServer.getReceivedProvisionRequests()).hasSize(4);
}
@Test
public void managerNotPrepared_acquireSessionAndPreacquireSessionFail() throws Exception {
FakeExoMediaDrm.LicenseServer licenseServer =
FakeExoMediaDrm.LicenseServer.allowingSchemeDatas(DRM_SCHEME_DATAS);
DefaultDrmSessionManager drmSessionManager =
new DefaultDrmSessionManager.Builder()
.setUuidAndExoMediaDrmProvider(DRM_SCHEME_UUID, uuid -> new FakeExoMediaDrm())
.build(/* mediaDrmCallback= */ licenseServer);
assertThrows(
Exception.class,
() ->
drmSessionManager.acquireSession(
/* eventDispatcher= */ null, FORMAT_WITH_DRM_INIT_DATA));
assertThrows(
Exception.class,
() ->
drmSessionManager.preacquireSession(
/* eventDispatcher= */ null, FORMAT_WITH_DRM_INIT_DATA));
}
@Test
public void managerReleasing_acquireSessionAndPreacquireSessionFail() throws Exception {
FakeExoMediaDrm.LicenseServer licenseServer =
FakeExoMediaDrm.LicenseServer.allowingSchemeDatas(DRM_SCHEME_DATAS);
DefaultDrmSessionManager drmSessionManager =
new DefaultDrmSessionManager.Builder()
.setUuidAndExoMediaDrmProvider(DRM_SCHEME_UUID, uuid -> new FakeExoMediaDrm())
.build(/* mediaDrmCallback= */ licenseServer);
drmSessionManager.setPlayer(/* playbackLooper= */ Looper.myLooper(), PlayerId.UNSET);
drmSessionManager.prepare();
DrmSession drmSession =
checkNotNull(
drmSessionManager.acquireSession(
/* eventDispatcher= */ null, FORMAT_WITH_DRM_INIT_DATA));
drmSessionManager.release();
// The manager's prepareCount is now zero, but the drmSession is keeping it in a 'releasing'
// state. acquireSession and preacquireSession should still fail.
assertThrows(
Exception.class,
() ->
drmSessionManager.acquireSession(
/* eventDispatcher= */ null, FORMAT_WITH_DRM_INIT_DATA));
assertThrows(
Exception.class,
() ->
drmSessionManager.preacquireSession(
/* eventDispatcher= */ null, FORMAT_WITH_DRM_INIT_DATA));
drmSession.release(/* eventDispatcher= */ null);
}
private static void waitForOpenedWithKeys(DrmSession drmSession) {
while (drmSession.getState() != DrmSession.STATE_OPENED_WITH_KEYS) {
// Check the error first, so we get a meaningful failure if there's been an error.
assertThat(drmSession.getError()).isNull();
assertThat(drmSession.getState()).isAnyOf(DrmSession.STATE_OPENING, DrmSession.STATE_OPENED);
// Allow the key response to be handled.
ShadowLooper.idleMainLooper();
}
}
}
|
googleapis/google-cloud-java | 36,850 | java-dlp/proto-google-cloud-dlp-v2/src/main/java/com/google/privacy/dlp/v2/ListTableDataProfilesResponse.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/privacy/dlp/v2/dlp.proto
// Protobuf Java Version: 3.25.8
package com.google.privacy.dlp.v2;
/**
*
*
* <pre>
* List of profiles generated for a given organization or project.
* </pre>
*
* Protobuf type {@code google.privacy.dlp.v2.ListTableDataProfilesResponse}
*/
public final class ListTableDataProfilesResponse extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.privacy.dlp.v2.ListTableDataProfilesResponse)
ListTableDataProfilesResponseOrBuilder {
private static final long serialVersionUID = 0L;
// Use ListTableDataProfilesResponse.newBuilder() to construct.
private ListTableDataProfilesResponse(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private ListTableDataProfilesResponse() {
tableDataProfiles_ = java.util.Collections.emptyList();
nextPageToken_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new ListTableDataProfilesResponse();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.privacy.dlp.v2.DlpProto
.internal_static_google_privacy_dlp_v2_ListTableDataProfilesResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.privacy.dlp.v2.DlpProto
.internal_static_google_privacy_dlp_v2_ListTableDataProfilesResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.privacy.dlp.v2.ListTableDataProfilesResponse.class,
com.google.privacy.dlp.v2.ListTableDataProfilesResponse.Builder.class);
}
public static final int TABLE_DATA_PROFILES_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private java.util.List<com.google.privacy.dlp.v2.TableDataProfile> tableDataProfiles_;
/**
*
*
* <pre>
* List of data profiles.
* </pre>
*
* <code>repeated .google.privacy.dlp.v2.TableDataProfile table_data_profiles = 1;</code>
*/
@java.lang.Override
public java.util.List<com.google.privacy.dlp.v2.TableDataProfile> getTableDataProfilesList() {
return tableDataProfiles_;
}
/**
*
*
* <pre>
* List of data profiles.
* </pre>
*
* <code>repeated .google.privacy.dlp.v2.TableDataProfile table_data_profiles = 1;</code>
*/
@java.lang.Override
public java.util.List<? extends com.google.privacy.dlp.v2.TableDataProfileOrBuilder>
getTableDataProfilesOrBuilderList() {
return tableDataProfiles_;
}
/**
*
*
* <pre>
* List of data profiles.
* </pre>
*
* <code>repeated .google.privacy.dlp.v2.TableDataProfile table_data_profiles = 1;</code>
*/
@java.lang.Override
public int getTableDataProfilesCount() {
return tableDataProfiles_.size();
}
/**
*
*
* <pre>
* List of data profiles.
* </pre>
*
* <code>repeated .google.privacy.dlp.v2.TableDataProfile table_data_profiles = 1;</code>
*/
@java.lang.Override
public com.google.privacy.dlp.v2.TableDataProfile getTableDataProfiles(int index) {
return tableDataProfiles_.get(index);
}
/**
*
*
* <pre>
* List of data profiles.
* </pre>
*
* <code>repeated .google.privacy.dlp.v2.TableDataProfile table_data_profiles = 1;</code>
*/
@java.lang.Override
public com.google.privacy.dlp.v2.TableDataProfileOrBuilder getTableDataProfilesOrBuilder(
int index) {
return tableDataProfiles_.get(index);
}
public static final int NEXT_PAGE_TOKEN_FIELD_NUMBER = 2;
@SuppressWarnings("serial")
private volatile java.lang.Object nextPageToken_ = "";
/**
*
*
* <pre>
* The next page token.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The nextPageToken.
*/
@java.lang.Override
public java.lang.String getNextPageToken() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
nextPageToken_ = s;
return s;
}
}
/**
*
*
* <pre>
* The next page token.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The bytes for nextPageToken.
*/
@java.lang.Override
public com.google.protobuf.ByteString getNextPageTokenBytes() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
nextPageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
for (int i = 0; i < tableDataProfiles_.size(); i++) {
output.writeMessage(1, tableDataProfiles_.get(i));
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 2, nextPageToken_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
for (int i = 0; i < tableDataProfiles_.size(); i++) {
size +=
com.google.protobuf.CodedOutputStream.computeMessageSize(1, tableDataProfiles_.get(i));
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, nextPageToken_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.privacy.dlp.v2.ListTableDataProfilesResponse)) {
return super.equals(obj);
}
com.google.privacy.dlp.v2.ListTableDataProfilesResponse other =
(com.google.privacy.dlp.v2.ListTableDataProfilesResponse) obj;
if (!getTableDataProfilesList().equals(other.getTableDataProfilesList())) return false;
if (!getNextPageToken().equals(other.getNextPageToken())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (getTableDataProfilesCount() > 0) {
hash = (37 * hash) + TABLE_DATA_PROFILES_FIELD_NUMBER;
hash = (53 * hash) + getTableDataProfilesList().hashCode();
}
hash = (37 * hash) + NEXT_PAGE_TOKEN_FIELD_NUMBER;
hash = (53 * hash) + getNextPageToken().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.privacy.dlp.v2.ListTableDataProfilesResponse parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.privacy.dlp.v2.ListTableDataProfilesResponse parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.privacy.dlp.v2.ListTableDataProfilesResponse parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.privacy.dlp.v2.ListTableDataProfilesResponse parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.privacy.dlp.v2.ListTableDataProfilesResponse parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.privacy.dlp.v2.ListTableDataProfilesResponse parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.privacy.dlp.v2.ListTableDataProfilesResponse parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.privacy.dlp.v2.ListTableDataProfilesResponse parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.privacy.dlp.v2.ListTableDataProfilesResponse parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.privacy.dlp.v2.ListTableDataProfilesResponse parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.privacy.dlp.v2.ListTableDataProfilesResponse parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.privacy.dlp.v2.ListTableDataProfilesResponse parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.privacy.dlp.v2.ListTableDataProfilesResponse prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* List of profiles generated for a given organization or project.
* </pre>
*
* Protobuf type {@code google.privacy.dlp.v2.ListTableDataProfilesResponse}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.privacy.dlp.v2.ListTableDataProfilesResponse)
com.google.privacy.dlp.v2.ListTableDataProfilesResponseOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.privacy.dlp.v2.DlpProto
.internal_static_google_privacy_dlp_v2_ListTableDataProfilesResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.privacy.dlp.v2.DlpProto
.internal_static_google_privacy_dlp_v2_ListTableDataProfilesResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.privacy.dlp.v2.ListTableDataProfilesResponse.class,
com.google.privacy.dlp.v2.ListTableDataProfilesResponse.Builder.class);
}
// Construct using com.google.privacy.dlp.v2.ListTableDataProfilesResponse.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
if (tableDataProfilesBuilder_ == null) {
tableDataProfiles_ = java.util.Collections.emptyList();
} else {
tableDataProfiles_ = null;
tableDataProfilesBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000001);
nextPageToken_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.privacy.dlp.v2.DlpProto
.internal_static_google_privacy_dlp_v2_ListTableDataProfilesResponse_descriptor;
}
@java.lang.Override
public com.google.privacy.dlp.v2.ListTableDataProfilesResponse getDefaultInstanceForType() {
return com.google.privacy.dlp.v2.ListTableDataProfilesResponse.getDefaultInstance();
}
@java.lang.Override
public com.google.privacy.dlp.v2.ListTableDataProfilesResponse build() {
com.google.privacy.dlp.v2.ListTableDataProfilesResponse result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.privacy.dlp.v2.ListTableDataProfilesResponse buildPartial() {
com.google.privacy.dlp.v2.ListTableDataProfilesResponse result =
new com.google.privacy.dlp.v2.ListTableDataProfilesResponse(this);
buildPartialRepeatedFields(result);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartialRepeatedFields(
com.google.privacy.dlp.v2.ListTableDataProfilesResponse result) {
if (tableDataProfilesBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0)) {
tableDataProfiles_ = java.util.Collections.unmodifiableList(tableDataProfiles_);
bitField0_ = (bitField0_ & ~0x00000001);
}
result.tableDataProfiles_ = tableDataProfiles_;
} else {
result.tableDataProfiles_ = tableDataProfilesBuilder_.build();
}
}
private void buildPartial0(com.google.privacy.dlp.v2.ListTableDataProfilesResponse result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000002) != 0)) {
result.nextPageToken_ = nextPageToken_;
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.privacy.dlp.v2.ListTableDataProfilesResponse) {
return mergeFrom((com.google.privacy.dlp.v2.ListTableDataProfilesResponse) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.privacy.dlp.v2.ListTableDataProfilesResponse other) {
if (other == com.google.privacy.dlp.v2.ListTableDataProfilesResponse.getDefaultInstance())
return this;
if (tableDataProfilesBuilder_ == null) {
if (!other.tableDataProfiles_.isEmpty()) {
if (tableDataProfiles_.isEmpty()) {
tableDataProfiles_ = other.tableDataProfiles_;
bitField0_ = (bitField0_ & ~0x00000001);
} else {
ensureTableDataProfilesIsMutable();
tableDataProfiles_.addAll(other.tableDataProfiles_);
}
onChanged();
}
} else {
if (!other.tableDataProfiles_.isEmpty()) {
if (tableDataProfilesBuilder_.isEmpty()) {
tableDataProfilesBuilder_.dispose();
tableDataProfilesBuilder_ = null;
tableDataProfiles_ = other.tableDataProfiles_;
bitField0_ = (bitField0_ & ~0x00000001);
tableDataProfilesBuilder_ =
com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders
? getTableDataProfilesFieldBuilder()
: null;
} else {
tableDataProfilesBuilder_.addAllMessages(other.tableDataProfiles_);
}
}
}
if (!other.getNextPageToken().isEmpty()) {
nextPageToken_ = other.nextPageToken_;
bitField0_ |= 0x00000002;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
com.google.privacy.dlp.v2.TableDataProfile m =
input.readMessage(
com.google.privacy.dlp.v2.TableDataProfile.parser(), extensionRegistry);
if (tableDataProfilesBuilder_ == null) {
ensureTableDataProfilesIsMutable();
tableDataProfiles_.add(m);
} else {
tableDataProfilesBuilder_.addMessage(m);
}
break;
} // case 10
case 18:
{
nextPageToken_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000002;
break;
} // case 18
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.util.List<com.google.privacy.dlp.v2.TableDataProfile> tableDataProfiles_ =
java.util.Collections.emptyList();
private void ensureTableDataProfilesIsMutable() {
if (!((bitField0_ & 0x00000001) != 0)) {
tableDataProfiles_ =
new java.util.ArrayList<com.google.privacy.dlp.v2.TableDataProfile>(tableDataProfiles_);
bitField0_ |= 0x00000001;
}
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.privacy.dlp.v2.TableDataProfile,
com.google.privacy.dlp.v2.TableDataProfile.Builder,
com.google.privacy.dlp.v2.TableDataProfileOrBuilder>
tableDataProfilesBuilder_;
/**
*
*
* <pre>
* List of data profiles.
* </pre>
*
* <code>repeated .google.privacy.dlp.v2.TableDataProfile table_data_profiles = 1;</code>
*/
public java.util.List<com.google.privacy.dlp.v2.TableDataProfile> getTableDataProfilesList() {
if (tableDataProfilesBuilder_ == null) {
return java.util.Collections.unmodifiableList(tableDataProfiles_);
} else {
return tableDataProfilesBuilder_.getMessageList();
}
}
/**
*
*
* <pre>
* List of data profiles.
* </pre>
*
* <code>repeated .google.privacy.dlp.v2.TableDataProfile table_data_profiles = 1;</code>
*/
public int getTableDataProfilesCount() {
if (tableDataProfilesBuilder_ == null) {
return tableDataProfiles_.size();
} else {
return tableDataProfilesBuilder_.getCount();
}
}
/**
*
*
* <pre>
* List of data profiles.
* </pre>
*
* <code>repeated .google.privacy.dlp.v2.TableDataProfile table_data_profiles = 1;</code>
*/
public com.google.privacy.dlp.v2.TableDataProfile getTableDataProfiles(int index) {
if (tableDataProfilesBuilder_ == null) {
return tableDataProfiles_.get(index);
} else {
return tableDataProfilesBuilder_.getMessage(index);
}
}
/**
*
*
* <pre>
* List of data profiles.
* </pre>
*
* <code>repeated .google.privacy.dlp.v2.TableDataProfile table_data_profiles = 1;</code>
*/
public Builder setTableDataProfiles(
int index, com.google.privacy.dlp.v2.TableDataProfile value) {
if (tableDataProfilesBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureTableDataProfilesIsMutable();
tableDataProfiles_.set(index, value);
onChanged();
} else {
tableDataProfilesBuilder_.setMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* List of data profiles.
* </pre>
*
* <code>repeated .google.privacy.dlp.v2.TableDataProfile table_data_profiles = 1;</code>
*/
public Builder setTableDataProfiles(
int index, com.google.privacy.dlp.v2.TableDataProfile.Builder builderForValue) {
if (tableDataProfilesBuilder_ == null) {
ensureTableDataProfilesIsMutable();
tableDataProfiles_.set(index, builderForValue.build());
onChanged();
} else {
tableDataProfilesBuilder_.setMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* List of data profiles.
* </pre>
*
* <code>repeated .google.privacy.dlp.v2.TableDataProfile table_data_profiles = 1;</code>
*/
public Builder addTableDataProfiles(com.google.privacy.dlp.v2.TableDataProfile value) {
if (tableDataProfilesBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureTableDataProfilesIsMutable();
tableDataProfiles_.add(value);
onChanged();
} else {
tableDataProfilesBuilder_.addMessage(value);
}
return this;
}
/**
*
*
* <pre>
* List of data profiles.
* </pre>
*
* <code>repeated .google.privacy.dlp.v2.TableDataProfile table_data_profiles = 1;</code>
*/
public Builder addTableDataProfiles(
int index, com.google.privacy.dlp.v2.TableDataProfile value) {
if (tableDataProfilesBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureTableDataProfilesIsMutable();
tableDataProfiles_.add(index, value);
onChanged();
} else {
tableDataProfilesBuilder_.addMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* List of data profiles.
* </pre>
*
* <code>repeated .google.privacy.dlp.v2.TableDataProfile table_data_profiles = 1;</code>
*/
public Builder addTableDataProfiles(
com.google.privacy.dlp.v2.TableDataProfile.Builder builderForValue) {
if (tableDataProfilesBuilder_ == null) {
ensureTableDataProfilesIsMutable();
tableDataProfiles_.add(builderForValue.build());
onChanged();
} else {
tableDataProfilesBuilder_.addMessage(builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* List of data profiles.
* </pre>
*
* <code>repeated .google.privacy.dlp.v2.TableDataProfile table_data_profiles = 1;</code>
*/
public Builder addTableDataProfiles(
int index, com.google.privacy.dlp.v2.TableDataProfile.Builder builderForValue) {
if (tableDataProfilesBuilder_ == null) {
ensureTableDataProfilesIsMutable();
tableDataProfiles_.add(index, builderForValue.build());
onChanged();
} else {
tableDataProfilesBuilder_.addMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* List of data profiles.
* </pre>
*
* <code>repeated .google.privacy.dlp.v2.TableDataProfile table_data_profiles = 1;</code>
*/
public Builder addAllTableDataProfiles(
java.lang.Iterable<? extends com.google.privacy.dlp.v2.TableDataProfile> values) {
if (tableDataProfilesBuilder_ == null) {
ensureTableDataProfilesIsMutable();
com.google.protobuf.AbstractMessageLite.Builder.addAll(values, tableDataProfiles_);
onChanged();
} else {
tableDataProfilesBuilder_.addAllMessages(values);
}
return this;
}
/**
*
*
* <pre>
* List of data profiles.
* </pre>
*
* <code>repeated .google.privacy.dlp.v2.TableDataProfile table_data_profiles = 1;</code>
*/
public Builder clearTableDataProfiles() {
if (tableDataProfilesBuilder_ == null) {
tableDataProfiles_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
} else {
tableDataProfilesBuilder_.clear();
}
return this;
}
/**
*
*
* <pre>
* List of data profiles.
* </pre>
*
* <code>repeated .google.privacy.dlp.v2.TableDataProfile table_data_profiles = 1;</code>
*/
public Builder removeTableDataProfiles(int index) {
if (tableDataProfilesBuilder_ == null) {
ensureTableDataProfilesIsMutable();
tableDataProfiles_.remove(index);
onChanged();
} else {
tableDataProfilesBuilder_.remove(index);
}
return this;
}
/**
*
*
* <pre>
* List of data profiles.
* </pre>
*
* <code>repeated .google.privacy.dlp.v2.TableDataProfile table_data_profiles = 1;</code>
*/
public com.google.privacy.dlp.v2.TableDataProfile.Builder getTableDataProfilesBuilder(
int index) {
return getTableDataProfilesFieldBuilder().getBuilder(index);
}
/**
*
*
* <pre>
* List of data profiles.
* </pre>
*
* <code>repeated .google.privacy.dlp.v2.TableDataProfile table_data_profiles = 1;</code>
*/
public com.google.privacy.dlp.v2.TableDataProfileOrBuilder getTableDataProfilesOrBuilder(
int index) {
if (tableDataProfilesBuilder_ == null) {
return tableDataProfiles_.get(index);
} else {
return tableDataProfilesBuilder_.getMessageOrBuilder(index);
}
}
/**
*
*
* <pre>
* List of data profiles.
* </pre>
*
* <code>repeated .google.privacy.dlp.v2.TableDataProfile table_data_profiles = 1;</code>
*/
public java.util.List<? extends com.google.privacy.dlp.v2.TableDataProfileOrBuilder>
getTableDataProfilesOrBuilderList() {
if (tableDataProfilesBuilder_ != null) {
return tableDataProfilesBuilder_.getMessageOrBuilderList();
} else {
return java.util.Collections.unmodifiableList(tableDataProfiles_);
}
}
/**
*
*
* <pre>
* List of data profiles.
* </pre>
*
* <code>repeated .google.privacy.dlp.v2.TableDataProfile table_data_profiles = 1;</code>
*/
public com.google.privacy.dlp.v2.TableDataProfile.Builder addTableDataProfilesBuilder() {
return getTableDataProfilesFieldBuilder()
.addBuilder(com.google.privacy.dlp.v2.TableDataProfile.getDefaultInstance());
}
/**
*
*
* <pre>
* List of data profiles.
* </pre>
*
* <code>repeated .google.privacy.dlp.v2.TableDataProfile table_data_profiles = 1;</code>
*/
public com.google.privacy.dlp.v2.TableDataProfile.Builder addTableDataProfilesBuilder(
int index) {
return getTableDataProfilesFieldBuilder()
.addBuilder(index, com.google.privacy.dlp.v2.TableDataProfile.getDefaultInstance());
}
/**
*
*
* <pre>
* List of data profiles.
* </pre>
*
* <code>repeated .google.privacy.dlp.v2.TableDataProfile table_data_profiles = 1;</code>
*/
public java.util.List<com.google.privacy.dlp.v2.TableDataProfile.Builder>
getTableDataProfilesBuilderList() {
return getTableDataProfilesFieldBuilder().getBuilderList();
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.privacy.dlp.v2.TableDataProfile,
com.google.privacy.dlp.v2.TableDataProfile.Builder,
com.google.privacy.dlp.v2.TableDataProfileOrBuilder>
getTableDataProfilesFieldBuilder() {
if (tableDataProfilesBuilder_ == null) {
tableDataProfilesBuilder_ =
new com.google.protobuf.RepeatedFieldBuilderV3<
com.google.privacy.dlp.v2.TableDataProfile,
com.google.privacy.dlp.v2.TableDataProfile.Builder,
com.google.privacy.dlp.v2.TableDataProfileOrBuilder>(
tableDataProfiles_,
((bitField0_ & 0x00000001) != 0),
getParentForChildren(),
isClean());
tableDataProfiles_ = null;
}
return tableDataProfilesBuilder_;
}
private java.lang.Object nextPageToken_ = "";
/**
*
*
* <pre>
* The next page token.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The nextPageToken.
*/
public java.lang.String getNextPageToken() {
java.lang.Object ref = nextPageToken_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
nextPageToken_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* The next page token.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The bytes for nextPageToken.
*/
public com.google.protobuf.ByteString getNextPageTokenBytes() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
nextPageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* The next page token.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @param value The nextPageToken to set.
* @return This builder for chaining.
*/
public Builder setNextPageToken(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
nextPageToken_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* The next page token.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return This builder for chaining.
*/
public Builder clearNextPageToken() {
nextPageToken_ = getDefaultInstance().getNextPageToken();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
return this;
}
/**
*
*
* <pre>
* The next page token.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @param value The bytes for nextPageToken to set.
* @return This builder for chaining.
*/
public Builder setNextPageTokenBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
nextPageToken_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.privacy.dlp.v2.ListTableDataProfilesResponse)
}
// @@protoc_insertion_point(class_scope:google.privacy.dlp.v2.ListTableDataProfilesResponse)
private static final com.google.privacy.dlp.v2.ListTableDataProfilesResponse DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.privacy.dlp.v2.ListTableDataProfilesResponse();
}
public static com.google.privacy.dlp.v2.ListTableDataProfilesResponse getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<ListTableDataProfilesResponse> PARSER =
new com.google.protobuf.AbstractParser<ListTableDataProfilesResponse>() {
@java.lang.Override
public ListTableDataProfilesResponse parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<ListTableDataProfilesResponse> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<ListTableDataProfilesResponse> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.privacy.dlp.v2.ListTableDataProfilesResponse getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.